mirror of
https://github.com/platformio/platformio-core.git
synced 2025-07-30 01:57:13 +02:00
Merge branch 'release/v4.0.1'
This commit is contained in:
@ -1,3 +1,3 @@
|
|||||||
[settings]
|
[settings]
|
||||||
line_length=79
|
line_length=79
|
||||||
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial,twisted,autobahn,jsonrpc
|
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial,twisted,autobahn,jsonrpc,tabulate
|
||||||
|
@ -4,18 +4,18 @@ Contributing
|
|||||||
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||||
|
|
||||||
1. Fork the repository on GitHub.
|
1. Fork the repository on GitHub.
|
||||||
2. Make a branch off of ``develop``
|
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||||
3. Run ``pip install tox``
|
3. Run `pip install tox`
|
||||||
4. Go to the root of project where is located ``tox.ini`` and run ``tox -e develop``
|
4. Go to the root of project where is located `tox.ini` and run `tox -e py27`
|
||||||
5. Activate current development environment:
|
5. Activate current development environment:
|
||||||
|
|
||||||
* Windows: ``.tox\develop\Scripts\activate``
|
* Windows: `.tox\py27\Scripts\activate`
|
||||||
* Bash/ZSH: ``source .tox/develop/bin/activate``
|
* Bash/ZSH: `source .tox/py27/bin/activate`
|
||||||
* Fish: ``source .tox/bin/activate.fish``
|
* Fish: `source .tox/py27/bin/activate.fish`
|
||||||
|
|
||||||
6. Make changes to code, documentation, etc.
|
6. Make changes to code, documentation, etc.
|
||||||
7. Lint source code ``tox -e lint``
|
7. Lint source code `make lint`
|
||||||
8. Run the tests ``tox -e py27``
|
8. Run the tests `make test`
|
||||||
9. Build documentation ``tox -e docs`` (creates a directory _build under docs where you can find the html)
|
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
|
||||||
10. Commit changes to your forked repository
|
10. Commit changes to your forked repository
|
||||||
11. Submit a Pull Request on GitHub.
|
11. Submit a Pull Request on GitHub.
|
||||||
|
15
HISTORY.rst
15
HISTORY.rst
@ -6,6 +6,21 @@ Release Notes
|
|||||||
PlatformIO 4.0
|
PlatformIO 4.0
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
|
4.0.1 (2019-08-22)
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
* Print `debug tool <http://docs.platformio.org/page/plus/debugging.html#tools-debug-probes>`__ name for the active debugging session
|
||||||
|
* Do not shutdown PIO Home Server for "upgrade" operations (`issue #2784 <https://github.com/platformio/platformio-core/issues/2784>`_)
|
||||||
|
* Improved computing of project check sum (structure, configuration) and avoid unnecessary rebuilding
|
||||||
|
* Improved printing of tabulated results
|
||||||
|
* Automatically normalize file system paths to UNIX-style for Project Generator (`issue #2857 <https://github.com/platformio/platformio-core/issues/2857>`_)
|
||||||
|
* Ability to set "databaseFilename" for VSCode and C/C++ extension (`issue #2825 <https://github.com/platformio/platformio-core/issues/2825>`_)
|
||||||
|
* Renamed "enable_ssl" setting to `strict_ssl <http://docs.platformio.org/page/userguide/cmd_settings.html#strict-ssl>`__
|
||||||
|
* Fixed an issue with incorrect escaping of Windows slashes when using `PIO Unified Debugger <http://docs.platformio.org/page/plus/debugging.html>`__ and "piped" openOCD
|
||||||
|
* Fixed an issue when "debug", "home", "run", and "test" commands were not shown in "platformio --help" CLI
|
||||||
|
* Fixed an issue with PIO Home's "No JSON object could be decoded" (`issue #2823 <https://github.com/platformio/platformio-core/issues/2823>`_)
|
||||||
|
* Fixed an issue when `library.json <http://docs.platformio.org/page/librarymanager/config.html>`__ had priority over project configuration for `LDF <http://docs.platformio.org/page/librarymanager/ldf.html>`__ (`issue #2867 <https://github.com/platformio/platformio-core/issues/2867>`_)
|
||||||
|
|
||||||
4.0.0 (2019-07-10)
|
4.0.0 (2019-07-10)
|
||||||
~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
3
Makefile
3
Makefile
@ -28,3 +28,6 @@ profile:
|
|||||||
# Usage $ > make PIOARGS="boards" profile
|
# Usage $ > make PIOARGS="boards" profile
|
||||||
python -m cProfile -o .tox/.tmp/cprofile.prof $(shell which platformio) ${PIOARGS}
|
python -m cProfile -o .tox/.tmp/cprofile.prof $(shell which platformio) ${PIOARGS}
|
||||||
snakeviz .tox/.tmp/cprofile.prof
|
snakeviz .tox/.tmp/cprofile.prof
|
||||||
|
|
||||||
|
publish:
|
||||||
|
python setup.py sdist upload
|
||||||
|
21
README.rst
21
README.rst
@ -10,20 +10,16 @@ PlatformIO
|
|||||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||||
:target: https://pypi.python.org/pypi/platformio/
|
:target: https://pypi.python.org/pypi/platformio/
|
||||||
:alt: Latest Version
|
:alt: Latest Version
|
||||||
.. image:: https://img.shields.io/pypi/l/platformio.svg
|
.. image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg
|
||||||
:target: https://pypi.python.org/pypi/platformio/
|
:target: https://pypi.python.org/pypi/platformio/
|
||||||
:alt: License
|
:alt: License
|
||||||
.. image:: https://img.shields.io/PlatformIO/Community.png
|
.. image:: https://img.shields.io/badge/PlatformIO-Community-orange.svg
|
||||||
:alt: Community Forums
|
:alt: Community Forums
|
||||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||||
.. image:: https://img.shields.io/PIO/Plus.png?color=orange
|
|
||||||
:alt: PIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
|
||||||
:target: https://platformio.org/pricing?utm_source=github&utm_medium=core
|
|
||||||
|
|
||||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||||
`PIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
|
||||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||||
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
|
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||||
@ -53,7 +49,7 @@ Open Source
|
|||||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`_
|
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||||
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||||
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||||
@ -132,6 +128,15 @@ Contributing
|
|||||||
|
|
||||||
See `contributing guidelines <https://github.com/platformio/platformio/blob/develop/CONTRIBUTING.md>`_.
|
See `contributing guidelines <https://github.com/platformio/platformio/blob/develop/CONTRIBUTING.md>`_.
|
||||||
|
|
||||||
|
Telemetry / Privacy Policy
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
Share minimal diagnostics and usage information to help us make PlatformIO better.
|
||||||
|
It is enabled by default. For more information see:
|
||||||
|
|
||||||
|
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||||
|
* `SSL Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
|
||||||
|
|
||||||
License
|
License
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
2
docs
2
docs
Submodule docs updated: ae7deefa58...29f80d45f2
2
examples
2
examples
Submodule examples updated: 70f28968f2...a71564ab46
@ -12,7 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
VERSION = (4, 0, 0)
|
VERSION = (4, 0, 1)
|
||||||
__version__ = ".".join([str(s) for s in VERSION])
|
__version__ = ".".join([str(s) for s in VERSION])
|
||||||
|
|
||||||
__title__ = "platformio"
|
__title__ = "platformio"
|
||||||
|
@ -22,7 +22,7 @@ from time import time
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from platformio import exception, lockfile, util
|
from platformio import exception, fs, lockfile
|
||||||
from platformio.compat import (WINDOWS, dump_json_to_unicode,
|
from platformio.compat import (WINDOWS, dump_json_to_unicode,
|
||||||
hashlib_encode_data)
|
hashlib_encode_data)
|
||||||
from platformio.proc import is_ci
|
from platformio.proc import is_ci
|
||||||
@ -73,16 +73,14 @@ DEFAULT_SETTINGS = {
|
|||||||
"description": "Enable caching for API requests and Library Manager",
|
"description": "Enable caching for API requests and Library Manager",
|
||||||
"value": True
|
"value": True
|
||||||
},
|
},
|
||||||
"enable_ssl": {
|
"strict_ssl": {
|
||||||
"description": "Enable SSL for PlatformIO Services",
|
"description": "Strict SSL for PlatformIO Services",
|
||||||
"value": False
|
"value": False
|
||||||
},
|
},
|
||||||
"enable_telemetry": {
|
"enable_telemetry": {
|
||||||
"description":
|
"description":
|
||||||
("Telemetry service <https://docs.platformio.org/page/"
|
("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
|
||||||
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
"value": True
|
||||||
"value":
|
|
||||||
True
|
|
||||||
},
|
},
|
||||||
"force_verbose": {
|
"force_verbose": {
|
||||||
"description": "Force verbose output when processing environments",
|
"description": "Force verbose output when processing environments",
|
||||||
@ -113,7 +111,7 @@ class State(object):
|
|||||||
try:
|
try:
|
||||||
self._lock_state_file()
|
self._lock_state_file()
|
||||||
if isfile(self.path):
|
if isfile(self.path):
|
||||||
self._storage = util.load_json(self.path)
|
self._storage = fs.load_json(self.path)
|
||||||
assert isinstance(self._storage, dict)
|
assert isinstance(self._storage, dict)
|
||||||
except (AssertionError, ValueError, UnicodeDecodeError,
|
except (AssertionError, ValueError, UnicodeDecodeError,
|
||||||
exception.InvalidJSONFile):
|
exception.InvalidJSONFile):
|
||||||
@ -157,6 +155,9 @@ class State(object):
|
|||||||
self.modified = True
|
self.modified = True
|
||||||
return self._storage.update(*args, **kwargs)
|
return self._storage.update(*args, **kwargs)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
return self._storage.clear()
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self._storage[key]
|
return self._storage[key]
|
||||||
|
|
||||||
@ -287,7 +288,7 @@ class ContentCache(object):
|
|||||||
try:
|
try:
|
||||||
remove(path)
|
remove(path)
|
||||||
if not listdir(dirname(path)):
|
if not listdir(dirname(path)):
|
||||||
util.rmtree_(dirname(path))
|
fs.rmtree(dirname(path))
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -301,7 +302,7 @@ class ContentCache(object):
|
|||||||
def clean(self):
|
def clean(self):
|
||||||
if not self.cache_dir or not isdir(self.cache_dir):
|
if not self.cache_dir or not isdir(self.cache_dir):
|
||||||
return
|
return
|
||||||
util.rmtree_(self.cache_dir)
|
fs.rmtree(self.cache_dir)
|
||||||
|
|
||||||
|
|
||||||
def clean_cache():
|
def clean_cache():
|
||||||
|
@ -27,7 +27,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
|||||||
from SCons.Script import Import # pylint: disable=import-error
|
from SCons.Script import Import # pylint: disable=import-error
|
||||||
from SCons.Script import Variables # pylint: disable=import-error
|
from SCons.Script import Variables # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import util
|
from platformio import fs
|
||||||
from platformio.compat import PY2, dump_json_to_unicode
|
from platformio.compat import PY2, dump_json_to_unicode
|
||||||
from platformio.managers.platform import PlatformBase
|
from platformio.managers.platform import PlatformBase
|
||||||
from platformio.proc import get_pythonexe_path
|
from platformio.proc import get_pythonexe_path
|
||||||
@ -51,7 +51,7 @@ DEFAULT_ENV_OPTIONS = dict(
|
|||||||
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
|
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
|
||||||
"pioproject", "piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
|
"pioproject", "piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
|
||||||
],
|
],
|
||||||
toolpath=[join(util.get_source_dir(), "builder", "tools")],
|
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
|
||||||
variables=clivars,
|
variables=clivars,
|
||||||
|
|
||||||
# Propagating External Environment
|
# Propagating External Environment
|
||||||
@ -145,10 +145,10 @@ if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
|
|||||||
Default("checkprogsize")
|
Default("checkprogsize")
|
||||||
|
|
||||||
# Print configured protocols
|
# Print configured protocols
|
||||||
env.AddPreAction(
|
env.AddPreAction(["upload", "program"],
|
||||||
["upload", "program"],
|
env.VerboseAction(
|
||||||
env.VerboseAction(lambda source, target, env: env.PrintUploadInfo(),
|
lambda source, target, env: env.PrintUploadInfo(),
|
||||||
"Configuring upload protocol..."))
|
"Configuring upload protocol..."))
|
||||||
|
|
||||||
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
||||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||||
|
@ -31,7 +31,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
|||||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs, util
|
||||||
from platformio.builder.tools import platformio as piotool
|
from platformio.builder.tools import platformio as piotool
|
||||||
from platformio.compat import (WINDOWS, get_file_contents, hashlib_encode_data,
|
from platformio.compat import (WINDOWS, get_file_contents, hashlib_encode_data,
|
||||||
string_types)
|
string_types)
|
||||||
@ -78,7 +78,7 @@ class LibBuilderFactory(object):
|
|||||||
if "mbed_lib.json" in files:
|
if "mbed_lib.json" in files:
|
||||||
return ["mbed"]
|
return ["mbed"]
|
||||||
for fname in files:
|
for fname in files:
|
||||||
if not env.IsFileWithExt(
|
if not fs.path_endswith_ext(
|
||||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
|
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
|
||||||
continue
|
continue
|
||||||
content = get_file_contents(join(root, fname))
|
content = get_file_contents(join(root, fname))
|
||||||
@ -200,21 +200,6 @@ class LibBuilderBase(object):
|
|||||||
def extra_script(self):
|
def extra_script(self):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
|
||||||
def lib_archive(self):
|
|
||||||
return self.env.GetProjectOption("lib_archive", True)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def lib_ldf_mode(self):
|
|
||||||
return self.validate_ldf_mode(
|
|
||||||
self.env.GetProjectOption("lib_ldf_mode", self.LDF_MODE_DEFAULT))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def lib_compat_mode(self):
|
|
||||||
return self.validate_compat_mode(
|
|
||||||
self.env.GetProjectOption("lib_compat_mode",
|
|
||||||
self.COMPAT_MODE_DEFAULT))
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def depbuilders(self):
|
def depbuilders(self):
|
||||||
return self._depbuilders
|
return self._depbuilders
|
||||||
@ -227,6 +212,14 @@ class LibBuilderBase(object):
|
|||||||
def is_built(self):
|
def is_built(self):
|
||||||
return self._is_built
|
return self._is_built
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lib_archive(self):
|
||||||
|
return self.env.GetProjectOption("lib_archive", True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lib_ldf_mode(self):
|
||||||
|
return self.env.GetProjectOption("lib_ldf_mode", self.LDF_MODE_DEFAULT)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def validate_ldf_mode(mode):
|
def validate_ldf_mode(mode):
|
||||||
if isinstance(mode, string_types):
|
if isinstance(mode, string_types):
|
||||||
@ -239,6 +232,11 @@ class LibBuilderBase(object):
|
|||||||
pass
|
pass
|
||||||
return LibBuilderBase.LDF_MODE_DEFAULT
|
return LibBuilderBase.LDF_MODE_DEFAULT
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lib_compat_mode(self):
|
||||||
|
return self.env.GetProjectOption("lib_compat_mode",
|
||||||
|
self.COMPAT_MODE_DEFAULT)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def validate_compat_mode(mode):
|
def validate_compat_mode(mode):
|
||||||
if isinstance(mode, string_types):
|
if isinstance(mode, string_types):
|
||||||
@ -261,7 +259,7 @@ class LibBuilderBase(object):
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
def process_extra_options(self):
|
def process_extra_options(self):
|
||||||
with util.cd(self.path):
|
with fs.cd(self.path):
|
||||||
self.env.ProcessFlags(self.build_flags)
|
self.env.ProcessFlags(self.build_flags)
|
||||||
if self.extra_script:
|
if self.extra_script:
|
||||||
self.env.SConscriptChdir(1)
|
self.env.SConscriptChdir(1)
|
||||||
@ -351,7 +349,7 @@ class LibBuilderBase(object):
|
|||||||
if not self.PARSE_SRC_BY_H_NAME:
|
if not self.PARSE_SRC_BY_H_NAME:
|
||||||
continue
|
continue
|
||||||
_h_path = item.get_abspath()
|
_h_path = item.get_abspath()
|
||||||
if not self.env.IsFileWithExt(_h_path, piotool.SRC_HEADER_EXT):
|
if not fs.path_endswith_ext(_h_path, piotool.SRC_HEADER_EXT):
|
||||||
continue
|
continue
|
||||||
_f_part = _h_path[:_h_path.rindex(".")]
|
_f_part = _h_path[:_h_path.rindex(".")]
|
||||||
for ext in piotool.SRC_C_EXT:
|
for ext in piotool.SRC_C_EXT:
|
||||||
@ -533,7 +531,7 @@ class MbedLibBuilder(LibBuilderBase):
|
|||||||
def load_manifest(self):
|
def load_manifest(self):
|
||||||
if not isfile(join(self.path, "module.json")):
|
if not isfile(join(self.path, "module.json")):
|
||||||
return {}
|
return {}
|
||||||
return util.load_json(join(self.path, "module.json"))
|
return fs.load_json(join(self.path, "module.json"))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def include_dir(self):
|
def include_dir(self):
|
||||||
@ -611,7 +609,7 @@ class MbedLibBuilder(LibBuilderBase):
|
|||||||
def _mbed_lib_conf_parse_macros(self, mbed_lib_path):
|
def _mbed_lib_conf_parse_macros(self, mbed_lib_path):
|
||||||
macros = {}
|
macros = {}
|
||||||
cppdefines = str(self.env.Flatten(self.env.subst("$CPPDEFINES")))
|
cppdefines = str(self.env.Flatten(self.env.subst("$CPPDEFINES")))
|
||||||
manifest = util.load_json(mbed_lib_path)
|
manifest = fs.load_json(mbed_lib_path)
|
||||||
|
|
||||||
# default macros
|
# default macros
|
||||||
for macro in manifest.get("macros", []):
|
for macro in manifest.get("macros", []):
|
||||||
@ -682,7 +680,7 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
|||||||
|
|
||||||
def load_manifest(self):
|
def load_manifest(self):
|
||||||
assert isfile(join(self.path, "library.json"))
|
assert isfile(join(self.path, "library.json"))
|
||||||
manifest = util.load_json(join(self.path, "library.json"))
|
manifest = fs.load_json(join(self.path, "library.json"))
|
||||||
assert "name" in manifest
|
assert "name" in manifest
|
||||||
|
|
||||||
# replace "espressif" old name dev/platform with ESP8266
|
# replace "espressif" old name dev/platform with ESP8266
|
||||||
@ -700,14 +698,14 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
|||||||
@property
|
@property
|
||||||
def include_dir(self):
|
def include_dir(self):
|
||||||
if "includeDir" in self._manifest.get("build", {}):
|
if "includeDir" in self._manifest.get("build", {}):
|
||||||
with util.cd(self.path):
|
with fs.cd(self.path):
|
||||||
return realpath(self._manifest.get("build").get("includeDir"))
|
return realpath(self._manifest.get("build").get("includeDir"))
|
||||||
return LibBuilderBase.include_dir.fget(self)
|
return LibBuilderBase.include_dir.fget(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def src_dir(self):
|
def src_dir(self):
|
||||||
if "srcDir" in self._manifest.get("build", {}):
|
if "srcDir" in self._manifest.get("build", {}):
|
||||||
with util.cd(self.path):
|
with fs.cd(self.path):
|
||||||
return realpath(self._manifest.get("build").get("srcDir"))
|
return realpath(self._manifest.get("build").get("srcDir"))
|
||||||
return LibBuilderBase.src_dir.fget(self)
|
return LibBuilderBase.src_dir.fget(self)
|
||||||
|
|
||||||
@ -741,23 +739,28 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def lib_archive(self):
|
def lib_archive(self):
|
||||||
if "libArchive" in self._manifest.get("build", {}):
|
global_value = self.env.GetProjectOption("lib_archive")
|
||||||
return self._manifest.get("build").get("libArchive")
|
if global_value is not None:
|
||||||
return LibBuilderBase.lib_archive.fget(self)
|
return global_value
|
||||||
|
return self._manifest.get("build", {}).get(
|
||||||
|
"libArchive", LibBuilderBase.lib_archive.fget(self))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def lib_ldf_mode(self):
|
def lib_ldf_mode(self):
|
||||||
if "libLDFMode" in self._manifest.get("build", {}):
|
return self.validate_ldf_mode(
|
||||||
return self.validate_ldf_mode(
|
self.env.GetProjectOption(
|
||||||
self._manifest.get("build").get("libLDFMode"))
|
"lib_ldf_mode",
|
||||||
return LibBuilderBase.lib_ldf_mode.fget(self)
|
self._manifest.get("build", {}).get(
|
||||||
|
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self))))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def lib_compat_mode(self):
|
def lib_compat_mode(self):
|
||||||
if "libCompatMode" in self._manifest.get("build", {}):
|
return self.validate_ldf_mode(
|
||||||
return self.validate_compat_mode(
|
self.env.GetProjectOption(
|
||||||
self._manifest.get("build").get("libCompatMode"))
|
"lib_compat_mode",
|
||||||
return LibBuilderBase.lib_compat_mode.fget(self)
|
self._manifest.get("build", {}).get(
|
||||||
|
"libCompatMode",
|
||||||
|
LibBuilderBase.lib_compat_mode.fget(self))))
|
||||||
|
|
||||||
def is_platforms_compatible(self, platforms):
|
def is_platforms_compatible(self, platforms):
|
||||||
items = self._manifest.get("platforms")
|
items = self._manifest.get("platforms")
|
||||||
@ -1000,7 +1003,7 @@ def ConfigureProjectLibBuilder(env):
|
|||||||
|
|
||||||
def _get_vcs_info(lb):
|
def _get_vcs_info(lb):
|
||||||
path = LibraryManager.get_src_manifest_path(lb.path)
|
path = LibraryManager.get_src_manifest_path(lb.path)
|
||||||
return util.load_json(path) if path else None
|
return fs.load_json(path) if path else None
|
||||||
|
|
||||||
def _correct_found_libs(lib_builders):
|
def _correct_found_libs(lib_builders):
|
||||||
# build full dependency graph
|
# build full dependency graph
|
||||||
|
@ -24,7 +24,7 @@ from tempfile import mkstemp
|
|||||||
from SCons.Action import Action # pylint: disable=import-error
|
from SCons.Action import Action # pylint: disable=import-error
|
||||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import util
|
from platformio import fs, util
|
||||||
from platformio.compat import get_file_contents, glob_escape
|
from platformio.compat import get_file_contents, glob_escape
|
||||||
from platformio.managers.core import get_core_package_dir
|
from platformio.managers.core import get_core_package_dir
|
||||||
from platformio.proc import exec_command
|
from platformio.proc import exec_command
|
||||||
@ -295,7 +295,7 @@ def PioClean(env, clean_dir):
|
|||||||
print("Removed %s" %
|
print("Removed %s" %
|
||||||
(dst if clean_rel_path.startswith(".") else relpath(dst)))
|
(dst if clean_rel_path.startswith(".") else relpath(dst)))
|
||||||
print("Done cleaning")
|
print("Done cleaning")
|
||||||
util.rmtree_(clean_dir)
|
fs.rmtree(clean_dir)
|
||||||
env.Exit(0)
|
env.Exit(0)
|
||||||
|
|
||||||
|
|
||||||
@ -333,7 +333,7 @@ def GetExtraScripts(env, scope):
|
|||||||
items.append(item[len(scope) + 1:])
|
items.append(item[len(scope) + 1:])
|
||||||
if not items:
|
if not items:
|
||||||
return items
|
return items
|
||||||
with util.cd(env.subst("$PROJECT_DIR")):
|
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||||
return [realpath(item) for item in items]
|
return [realpath(item) for item in items]
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ from os.path import isdir, isfile, join
|
|||||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs, util
|
||||||
from platformio.compat import WINDOWS
|
from platformio.compat import WINDOWS
|
||||||
from platformio.managers.platform import PlatformFactory
|
from platformio.managers.platform import PlatformFactory
|
||||||
from platformio.project.config import ProjectOptions
|
from platformio.project.config import ProjectOptions
|
||||||
@ -129,7 +129,7 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
|
|||||||
src_manifest_path = platform.pm.get_src_manifest_path(
|
src_manifest_path = platform.pm.get_src_manifest_path(
|
||||||
platform.get_dir())
|
platform.get_dir())
|
||||||
if src_manifest_path:
|
if src_manifest_path:
|
||||||
src_manifest = util.load_json(src_manifest_path)
|
src_manifest = fs.load_json(src_manifest_path)
|
||||||
if "version" in src_manifest:
|
if "version" in src_manifest:
|
||||||
data.append("#" + src_manifest['version'])
|
data.append("#" + src_manifest['version'])
|
||||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||||
@ -152,7 +152,7 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
|
|||||||
ram = board_config.get("upload", {}).get("maximum_ram_size")
|
ram = board_config.get("upload", {}).get("maximum_ram_size")
|
||||||
flash = board_config.get("upload", {}).get("maximum_size")
|
flash = board_config.get("upload", {}).get("maximum_size")
|
||||||
data.append("%s RAM, %s Flash" %
|
data.append("%s RAM, %s Flash" %
|
||||||
(util.format_filesize(ram), util.format_filesize(flash)))
|
(fs.format_filesize(ram), fs.format_filesize(flash)))
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _get_debug_data():
|
def _get_debug_data():
|
||||||
|
@ -25,7 +25,7 @@ from time import sleep
|
|||||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||||
from serial import Serial, SerialException
|
from serial import Serial, SerialException
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs, util
|
||||||
from platformio.compat import WINDOWS
|
from platformio.compat import WINDOWS
|
||||||
from platformio.proc import exec_command
|
from platformio.proc import exec_command
|
||||||
|
|
||||||
@ -156,7 +156,7 @@ def AutodetectUploadPort(*args, **kwargs):
|
|||||||
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
util.ensure_udev_rules()
|
fs.ensure_udev_rules()
|
||||||
except exception.InvalidUdevRules as e:
|
except exception.InvalidUdevRules as e:
|
||||||
sys.stderr.write("\n%s\n\n" % e)
|
sys.stderr.write("\n%s\n\n" % e)
|
||||||
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
||||||
|
@ -14,10 +14,8 @@
|
|||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import re
|
import os
|
||||||
import sys
|
import sys
|
||||||
from glob import glob
|
|
||||||
from os import sep, walk
|
|
||||||
from os.path import basename, dirname, isdir, join, realpath
|
from os.path import basename, dirname, isdir, join, realpath
|
||||||
|
|
||||||
from SCons import Builder, Util # pylint: disable=import-error
|
from SCons import Builder, Util # pylint: disable=import-error
|
||||||
@ -27,14 +25,14 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
|||||||
from SCons.Script import Export # pylint: disable=import-error
|
from SCons.Script import Export # pylint: disable=import-error
|
||||||
from SCons.Script import SConscript # pylint: disable=import-error
|
from SCons.Script import SConscript # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio.compat import glob_escape, string_types
|
from platformio import fs
|
||||||
|
from platformio.compat import string_types
|
||||||
from platformio.util import pioversion_to_intstr
|
from platformio.util import pioversion_to_intstr
|
||||||
|
|
||||||
SRC_HEADER_EXT = ["h", "hpp"]
|
SRC_HEADER_EXT = ["h", "hpp"]
|
||||||
SRC_C_EXT = ["c", "cc", "cpp"]
|
SRC_C_EXT = ["c", "cc", "cpp"]
|
||||||
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep]
|
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % os.sep, "-<.svn%s>" % os.sep]
|
||||||
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
|
|
||||||
|
|
||||||
|
|
||||||
def scons_patched_match_splitext(path, suffixes=None):
|
def scons_patched_match_splitext(path, suffixes=None):
|
||||||
@ -230,44 +228,11 @@ def ProcessUnFlags(env, flags):
|
|||||||
env[key].remove(current)
|
env[key].remove(current)
|
||||||
|
|
||||||
|
|
||||||
def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
|
|
||||||
if basename(file_).startswith("."):
|
|
||||||
return False
|
|
||||||
for e in ext:
|
|
||||||
if file_.endswith(".%s" % e):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def MatchSourceFiles(env, src_dir, src_filter=None):
|
def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||||
|
|
||||||
def _append_build_item(items, item, src_dir):
|
|
||||||
if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT):
|
|
||||||
items.add(item.replace(src_dir + sep, ""))
|
|
||||||
|
|
||||||
src_dir = env.subst(src_dir)
|
|
||||||
src_filter = env.subst(src_filter) if src_filter else None
|
src_filter = env.subst(src_filter) if src_filter else None
|
||||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||||
if isinstance(src_filter, (list, tuple)):
|
return fs.match_src_files(env.subst(src_dir), src_filter,
|
||||||
src_filter = " ".join(src_filter)
|
SRC_BUILD_EXT + SRC_HEADER_EXT)
|
||||||
|
|
||||||
matches = set()
|
|
||||||
# correct fs directory separator
|
|
||||||
src_filter = src_filter.replace("/", sep).replace("\\", sep)
|
|
||||||
for (action, pattern) in SRC_FILTER_PATTERNS_RE.findall(src_filter):
|
|
||||||
items = set()
|
|
||||||
for item in glob(join(glob_escape(src_dir), pattern)):
|
|
||||||
if isdir(item):
|
|
||||||
for root, _, files in walk(item, followlinks=True):
|
|
||||||
for f in files:
|
|
||||||
_append_build_item(items, join(root, f), src_dir)
|
|
||||||
else:
|
|
||||||
_append_build_item(items, item, src_dir)
|
|
||||||
if action == "+":
|
|
||||||
matches |= items
|
|
||||||
else:
|
|
||||||
matches -= items
|
|
||||||
return sorted(list(matches))
|
|
||||||
|
|
||||||
|
|
||||||
def CollectBuildFiles(env,
|
def CollectBuildFiles(env,
|
||||||
@ -279,7 +244,7 @@ def CollectBuildFiles(env,
|
|||||||
variants = []
|
variants = []
|
||||||
|
|
||||||
src_dir = env.subst(src_dir)
|
src_dir = env.subst(src_dir)
|
||||||
if src_dir.endswith(sep):
|
if src_dir.endswith(os.sep):
|
||||||
src_dir = src_dir[:-1]
|
src_dir = src_dir[:-1]
|
||||||
|
|
||||||
for item in env.MatchSourceFiles(src_dir, src_filter):
|
for item in env.MatchSourceFiles(src_dir, src_filter):
|
||||||
@ -291,7 +256,7 @@ def CollectBuildFiles(env,
|
|||||||
variants.append(_var_dir)
|
variants.append(_var_dir)
|
||||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||||
|
|
||||||
if env.IsFileWithExt(item, SRC_BUILD_EXT):
|
if fs.path_endswith_ext(item, SRC_BUILD_EXT):
|
||||||
sources.append(env.File(join(_var_dir, basename(item))))
|
sources.append(env.File(join(_var_dir, basename(item))))
|
||||||
|
|
||||||
return sources
|
return sources
|
||||||
@ -316,7 +281,7 @@ def BuildFrameworks(env, frameworks):
|
|||||||
env.Exit(1)
|
env.Exit(1)
|
||||||
|
|
||||||
for f in frameworks:
|
for f in frameworks:
|
||||||
if f in ("arduino", "energia"):
|
if f == "arduino":
|
||||||
# Arduino IDE appends .o the end of filename
|
# Arduino IDE appends .o the end of filename
|
||||||
Builder.match_splitext = scons_patched_match_splitext
|
Builder.match_splitext = scons_patched_match_splitext
|
||||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||||
@ -352,7 +317,6 @@ def generate(env):
|
|||||||
env.AddMethod(ParseFlagsExtended)
|
env.AddMethod(ParseFlagsExtended)
|
||||||
env.AddMethod(ProcessFlags)
|
env.AddMethod(ProcessFlags)
|
||||||
env.AddMethod(ProcessUnFlags)
|
env.AddMethod(ProcessUnFlags)
|
||||||
env.AddMethod(IsFileWithExt)
|
|
||||||
env.AddMethod(MatchSourceFiles)
|
env.AddMethod(MatchSourceFiles)
|
||||||
env.AddMethod(CollectBuildFiles)
|
env.AddMethod(CollectBuildFiles)
|
||||||
env.AddMethod(BuildFrameworks)
|
env.AddMethod(BuildFrameworks)
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from os.path import dirname
|
from os.path import dirname, isfile, join
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
@ -38,11 +38,14 @@ class PlatformioCLI(click.MultiCommand):
|
|||||||
|
|
||||||
def list_commands(self, ctx):
|
def list_commands(self, ctx):
|
||||||
cmds = []
|
cmds = []
|
||||||
for filename in os.listdir(dirname(__file__)):
|
cmds_dir = dirname(__file__)
|
||||||
if filename.startswith("__init__"):
|
for name in os.listdir(cmds_dir):
|
||||||
|
if name.startswith("__init__"):
|
||||||
continue
|
continue
|
||||||
if filename.endswith(".py"):
|
if isfile(join(cmds_dir, name, "command.py")):
|
||||||
cmds.append(filename[:-3])
|
cmds.append(name)
|
||||||
|
elif name.endswith(".py"):
|
||||||
|
cmds.append(name[:-3])
|
||||||
cmds.sort()
|
cmds.sort()
|
||||||
return cmds
|
return cmds
|
||||||
|
|
||||||
|
@ -15,8 +15,9 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import util
|
from platformio import fs
|
||||||
from platformio.compat import dump_json_to_unicode
|
from platformio.compat import dump_json_to_unicode
|
||||||
from platformio.managers.platform import PlatformManager
|
from platformio.managers.platform import PlatformManager
|
||||||
|
|
||||||
@ -42,32 +43,18 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
|||||||
click.echo("")
|
click.echo("")
|
||||||
click.echo("Platform: ", nl=False)
|
click.echo("Platform: ", nl=False)
|
||||||
click.secho(platform, bold=True)
|
click.secho(platform, bold=True)
|
||||||
click.echo("-" * terminal_width)
|
click.echo("=" * terminal_width)
|
||||||
print_boards(boards)
|
print_boards(boards)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def print_boards(boards):
|
def print_boards(boards):
|
||||||
terminal_width, _ = click.get_terminal_size()
|
|
||||||
BOARDLIST_TPL = ("{type:<30} {mcu:<14} {frequency:<8} "
|
|
||||||
" {flash:<7} {ram:<6} {name}")
|
|
||||||
click.echo(
|
click.echo(
|
||||||
BOARDLIST_TPL.format(type=click.style("ID", fg="cyan"),
|
tabulate([(click.style(b['id'], fg="cyan"), b['mcu'], "%dMHz" %
|
||||||
mcu="MCU",
|
(b['fcpu'] / 1000000), fs.format_filesize(
|
||||||
frequency="Frequency",
|
b['rom']), fs.format_filesize(b['ram']), b['name'])
|
||||||
flash="Flash",
|
for b in boards],
|
||||||
ram="RAM",
|
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"]))
|
||||||
name="Name"))
|
|
||||||
click.echo("-" * terminal_width)
|
|
||||||
|
|
||||||
for board in boards:
|
|
||||||
click.echo(
|
|
||||||
BOARDLIST_TPL.format(type=click.style(board['id'], fg="cyan"),
|
|
||||||
mcu=board['mcu'],
|
|
||||||
frequency="%dMHz" % (board['fcpu'] / 1000000),
|
|
||||||
flash=util.format_filesize(board['rom']),
|
|
||||||
ram=util.format_filesize(board['ram']),
|
|
||||||
name=board['name']))
|
|
||||||
|
|
||||||
|
|
||||||
def _get_boards(installed=False):
|
def _get_boards(installed=False):
|
||||||
|
@ -20,7 +20,7 @@ from tempfile import mkdtemp
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import app, util
|
from platformio import app, fs
|
||||||
from platformio.commands.init import cli as cmd_init
|
from platformio.commands.init import cli as cmd_init
|
||||||
from platformio.commands.init import validate_boards
|
from platformio.commands.init import validate_boards
|
||||||
from platformio.commands.run import cli as cmd_run
|
from platformio.commands.run import cli as cmd_run
|
||||||
@ -89,7 +89,7 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
|||||||
app.set_session_var("force_option", True)
|
app.set_session_var("force_option", True)
|
||||||
|
|
||||||
if not keep_build_dir and isdir(build_dir):
|
if not keep_build_dir and isdir(build_dir):
|
||||||
util.rmtree_(build_dir)
|
fs.rmtree(build_dir)
|
||||||
if not isdir(build_dir):
|
if not isdir(build_dir):
|
||||||
makedirs(build_dir)
|
makedirs(build_dir)
|
||||||
|
|
||||||
@ -119,7 +119,7 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
|||||||
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
|
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
|
||||||
finally:
|
finally:
|
||||||
if not keep_build_dir:
|
if not keep_build_dir:
|
||||||
util.rmtree_(build_dir)
|
fs.rmtree(build_dir)
|
||||||
|
|
||||||
|
|
||||||
def _copy_contents(dst_dir, contents):
|
def _copy_contents(dst_dir, contents):
|
||||||
@ -161,7 +161,7 @@ def _exclude_contents(dst_dir, patterns):
|
|||||||
for path in contents:
|
for path in contents:
|
||||||
path = abspath(path)
|
path = abspath(path)
|
||||||
if isdir(path):
|
if isdir(path):
|
||||||
util.rmtree_(path)
|
fs.rmtree(path)
|
||||||
elif isfile(path):
|
elif isfile(path):
|
||||||
remove(path)
|
remove(path)
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ from twisted.internet import reactor # pylint: disable=import-error
|
|||||||
from twisted.internet import stdio # pylint: disable=import-error
|
from twisted.internet import stdio # pylint: disable=import-error
|
||||||
from twisted.internet import task # pylint: disable=import-error
|
from twisted.internet import task # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import app, exception, util
|
from platformio import app, exception, fs, proc, util
|
||||||
from platformio.commands.debug import helpers, initcfgs
|
from platformio.commands.debug import helpers, initcfgs
|
||||||
from platformio.commands.debug.process import BaseProcess
|
from platformio.commands.debug.process import BaseProcess
|
||||||
from platformio.commands.debug.server import DebugServer
|
from platformio.commands.debug.server import DebugServer
|
||||||
@ -66,9 +66,9 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
|||||||
self._kill_previous_session()
|
self._kill_previous_session()
|
||||||
|
|
||||||
patterns = {
|
patterns = {
|
||||||
"PROJECT_DIR": helpers.escape_path(self.project_dir),
|
"PROJECT_DIR": self.project_dir,
|
||||||
"PROG_PATH": helpers.escape_path(prog_path),
|
"PROG_PATH": prog_path,
|
||||||
"PROG_DIR": helpers.escape_path(dirname(prog_path)),
|
"PROG_DIR": dirname(prog_path),
|
||||||
"PROG_NAME": basename(splitext(prog_path)[0]),
|
"PROG_NAME": basename(splitext(prog_path)[0]),
|
||||||
"DEBUG_PORT": self.debug_options['port'],
|
"DEBUG_PORT": self.debug_options['port'],
|
||||||
"UPLOAD_PROTOCOL": self.debug_options['upload_protocol'],
|
"UPLOAD_PROTOCOL": self.debug_options['upload_protocol'],
|
||||||
@ -157,6 +157,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
|||||||
|
|
||||||
banner = [
|
banner = [
|
||||||
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
|
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
|
||||||
|
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options['tool'],
|
||||||
"echo PlatformIO: Initializing remote target...\\n"
|
"echo PlatformIO: Initializing remote target...\\n"
|
||||||
]
|
]
|
||||||
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
||||||
@ -197,7 +198,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
|||||||
def processEnded(self, reason): # pylint: disable=unused-argument
|
def processEnded(self, reason): # pylint: disable=unused-argument
|
||||||
self._unlock_session()
|
self._unlock_session()
|
||||||
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
|
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
|
||||||
util.rmtree_(self._gdbsrc_dir)
|
fs.rmtree(self._gdbsrc_dir)
|
||||||
if self._debug_server:
|
if self._debug_server:
|
||||||
self._debug_server.terminate()
|
self._debug_server.terminate()
|
||||||
|
|
||||||
@ -252,8 +253,9 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
|||||||
return
|
return
|
||||||
configuration = {"debug": self.debug_options, "env": self.env_options}
|
configuration = {"debug": self.debug_options, "env": self.env_options}
|
||||||
exd = re.sub(r'\\(?!")', "/", json.dumps(configuration))
|
exd = re.sub(r'\\(?!")', "/", json.dumps(configuration))
|
||||||
exd = re.sub(r'"(?:[a-z]\:)?((/[^"/]+)+)"', lambda m: '"%s"' % join(
|
exd = re.sub(r'"(?:[a-z]\:)?((/[^"/]+)+)"',
|
||||||
*m.group(1).split("/")[-2:]), exd, re.I | re.M)
|
lambda m: '"%s"' % join(*m.group(1).split("/")[-2:]), exd,
|
||||||
|
re.I | re.M)
|
||||||
mp = MeasurementProtocol()
|
mp = MeasurementProtocol()
|
||||||
mp['exd'] = "DebugGDBPioInitError: %s" % exd
|
mp['exd'] = "DebugGDBPioInitError: %s" % exd
|
||||||
mp['exf'] = 1
|
mp['exf'] = 1
|
||||||
@ -273,7 +275,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
|||||||
else:
|
else:
|
||||||
kill = ["kill", pid]
|
kill = ["kill", pid]
|
||||||
try:
|
try:
|
||||||
util.exec_command(kill)
|
proc.exec_command(kill)
|
||||||
except: # pylint: disable=bare-except
|
except: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ from os.path import isfile, join
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs, proc, util
|
||||||
from platformio.commands.debug import helpers
|
from platformio.commands.debug import helpers
|
||||||
from platformio.managers.core import inject_contrib_pysite
|
from platformio.managers.core import inject_contrib_pysite
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
@ -61,7 +61,7 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface,
|
|||||||
if os.getenv(sysenv):
|
if os.getenv(sysenv):
|
||||||
project_dir = os.getenv(sysenv)
|
project_dir = os.getenv(sysenv)
|
||||||
|
|
||||||
with util.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
config = ProjectConfig.get_instance(
|
config = ProjectConfig.get_instance(
|
||||||
project_conf or join(project_dir, "platformio.ini"))
|
project_conf or join(project_dir, "platformio.ini"))
|
||||||
config.validate(envs=[environment] if environment else None)
|
config.validate(envs=[environment] if environment else None)
|
||||||
@ -83,16 +83,14 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface,
|
|||||||
"Could not load debug configuration")
|
"Could not load debug configuration")
|
||||||
|
|
||||||
if "--version" in __unprocessed:
|
if "--version" in __unprocessed:
|
||||||
result = util.exec_command([configuration['gdb_path'], "--version"])
|
result = proc.exec_command([configuration['gdb_path'], "--version"])
|
||||||
if result['returncode'] == 0:
|
if result['returncode'] == 0:
|
||||||
return click.echo(result['out'])
|
return click.echo(result['out'])
|
||||||
raise exception.PlatformioException("\n".join(
|
raise exception.PlatformioException("\n".join(
|
||||||
[result['out'], result['err']]))
|
[result['out'], result['err']]))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
util.ensure_udev_rules()
|
fs.ensure_udev_rules()
|
||||||
except NameError:
|
|
||||||
pass
|
|
||||||
except exception.InvalidUdevRules as e:
|
except exception.InvalidUdevRules as e:
|
||||||
for line in str(e).split("\n") + [""]:
|
for line in str(e).split("\n") + [""]:
|
||||||
click.echo(
|
click.echo(
|
||||||
|
@ -44,10 +44,6 @@ def is_mi_mode(args):
|
|||||||
return "--interpreter" in " ".join(args)
|
return "--interpreter" in " ".join(args)
|
||||||
|
|
||||||
|
|
||||||
def escape_path(path):
|
|
||||||
return path.replace("\\", "/")
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_debug_env(config):
|
def get_default_debug_env(config):
|
||||||
default_envs = config.default_envs()
|
default_envs = config.default_envs()
|
||||||
all_envs = config.envs()
|
all_envs = config.envs()
|
||||||
@ -121,7 +117,7 @@ def validate_debug_options(cmd_ctx, env_options):
|
|||||||
cwd=server_package_dir if server_package else None,
|
cwd=server_package_dir if server_package else None,
|
||||||
executable=tool_settings['server'].get("executable"),
|
executable=tool_settings['server'].get("executable"),
|
||||||
arguments=[
|
arguments=[
|
||||||
a.replace("$PACKAGE_DIR", escape_path(server_package_dir))
|
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||||
if server_package_dir else a
|
if server_package_dir else a
|
||||||
for a in tool_settings['server'].get("arguments", [])
|
for a in tool_settings['server'].get("arguments", [])
|
||||||
])
|
])
|
||||||
@ -169,11 +165,11 @@ def configure_esp32_load_cmds(debug_options, configuration):
|
|||||||
|
|
||||||
mon_cmds = [
|
mon_cmds = [
|
||||||
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
||||||
path=escape_path(item['path']), offset=item['offset'])
|
path=item['path'], offset=item['offset'])
|
||||||
for item in configuration.get("flash_extra_images")
|
for item in configuration.get("flash_extra_images")
|
||||||
]
|
]
|
||||||
mon_cmds.append('monitor program_esp32 "{%s.bin}" 0x10000 verify' %
|
mon_cmds.append('monitor program_esp32 "{%s.bin}" 0x10000 verify' %
|
||||||
escape_path(configuration['prog_path'][:-4]))
|
configuration['prog_path'][:-4])
|
||||||
return mon_cmds
|
return mon_cmds
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,7 +17,6 @@ import signal
|
|||||||
import click
|
import click
|
||||||
from twisted.internet import protocol # pylint: disable=import-error
|
from twisted.internet import protocol # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio.commands.debug import helpers
|
|
||||||
from platformio.compat import string_types
|
from platformio.compat import string_types
|
||||||
from platformio.proc import get_pythonexe_path
|
from platformio.proc import get_pythonexe_path
|
||||||
from platformio.project.helpers import get_project_core_dir
|
from platformio.project.helpers import get_project_core_dir
|
||||||
@ -30,8 +29,8 @@ class BaseProcess(protocol.ProcessProtocol, object):
|
|||||||
STDOUT_CHUNK_SIZE = 2048
|
STDOUT_CHUNK_SIZE = 2048
|
||||||
|
|
||||||
COMMON_PATTERNS = {
|
COMMON_PATTERNS = {
|
||||||
"PLATFORMIO_HOME_DIR": helpers.escape_path(get_project_core_dir()),
|
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
|
||||||
"PLATFORMIO_CORE_DIR": helpers.escape_path(get_project_core_dir()),
|
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
|
||||||
"PYTHONEXE": get_pythonexe_path()
|
"PYTHONEXE": get_pythonexe_path()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,7 +19,6 @@ from twisted.internet import error # pylint: disable=import-error
|
|||||||
from twisted.internet import reactor # pylint: disable=import-error
|
from twisted.internet import reactor # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, util
|
||||||
from platformio.commands.debug import helpers
|
|
||||||
from platformio.commands.debug.process import BaseProcess
|
from platformio.commands.debug.process import BaseProcess
|
||||||
from platformio.proc import where_is_program
|
from platformio.proc import where_is_program
|
||||||
|
|
||||||
@ -67,15 +66,15 @@ class DebugServer(BaseProcess):
|
|||||||
if openocd_pipe_allowed:
|
if openocd_pipe_allowed:
|
||||||
args = []
|
args = []
|
||||||
if server['cwd']:
|
if server['cwd']:
|
||||||
args.extend(["-s", helpers.escape_path(server['cwd'])])
|
args.extend(["-s", server['cwd']])
|
||||||
args.extend([
|
args.extend([
|
||||||
"-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"
|
"-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"
|
||||||
])
|
])
|
||||||
args.extend(server['arguments'])
|
args.extend(server['arguments'])
|
||||||
str_args = " ".join(
|
str_args = " ".join(
|
||||||
[arg if arg.startswith("-") else '"%s"' % arg for arg in args])
|
[arg if arg.startswith("-") else '"%s"' % arg for arg in args])
|
||||||
self._debug_port = '| "%s" %s' % (
|
self._debug_port = '| "%s" %s' % (server_executable, str_args)
|
||||||
helpers.escape_path(server_executable), str_args)
|
self._debug_port = self._debug_port.replace("\\", "\\\\")
|
||||||
else:
|
else:
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
# prepend server "lib" folder to LD path
|
# prepend server "lib" folder to LD path
|
||||||
|
@ -25,6 +25,11 @@ class AppRPC(object):
|
|||||||
|
|
||||||
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
|
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
|
||||||
|
|
||||||
|
IGNORE_STORAGE_KEYS = [
|
||||||
|
"cid", "coreVersion", "coreSystype", "coreCaller", "coreSettings",
|
||||||
|
"homeDir", "projectsDir"
|
||||||
|
]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_state():
|
def load_state():
|
||||||
with app.State(AppRPC.APPSTATE_PATH, lock=True) as state:
|
with app.State(AppRPC.APPSTATE_PATH, lock=True) as state:
|
||||||
@ -57,6 +62,7 @@ class AppRPC(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
state['storage'] = storage
|
state['storage'] = storage
|
||||||
|
state.modified = False # skip saving extra fields
|
||||||
return state.as_dict()
|
return state.as_dict()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -66,6 +72,10 @@ class AppRPC(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def save_state(state):
|
def save_state(state):
|
||||||
with app.State(AppRPC.APPSTATE_PATH, lock=True) as s:
|
with app.State(AppRPC.APPSTATE_PATH, lock=True) as s:
|
||||||
# s.clear()
|
s.clear()
|
||||||
s.update(state)
|
s.update(state)
|
||||||
|
storage = s.get("storage", {})
|
||||||
|
for k in AppRPC.IGNORE_STORAGE_KEYS:
|
||||||
|
if k in storage:
|
||||||
|
del storage[k]
|
||||||
return True
|
return True
|
||||||
|
@ -21,22 +21,24 @@ from twisted.internet import defer # pylint: disable=import-error
|
|||||||
class IDERPC(object):
|
class IDERPC(object):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._queue = []
|
self._queue = {}
|
||||||
|
|
||||||
def send_command(self, command, params):
|
def send_command(self, command, params, sid=0):
|
||||||
if not self._queue:
|
if not self._queue.get(sid):
|
||||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||||
code=4005, message="PIO Home IDE agent is not started")
|
code=4005, message="PIO Home IDE agent is not started")
|
||||||
while self._queue:
|
while self._queue[sid]:
|
||||||
self._queue.pop().callback({
|
self._queue[sid].pop().callback({
|
||||||
"id": time.time(),
|
"id": time.time(),
|
||||||
"method": command,
|
"method": command,
|
||||||
"params": params
|
"params": params
|
||||||
})
|
})
|
||||||
|
|
||||||
def listen_commands(self):
|
def listen_commands(self, sid=0):
|
||||||
self._queue.append(defer.Deferred())
|
if sid not in self._queue:
|
||||||
return self._queue[-1]
|
self._queue[sid] = []
|
||||||
|
self._queue[sid].append(defer.Deferred())
|
||||||
|
return self._queue[sid][-1]
|
||||||
|
|
||||||
def open_project(self, project_dir):
|
def open_project(self, project_dir, sid=0):
|
||||||
return self.send_command("open_project", project_dir)
|
return self.send_command("open_project", project_dir, sid)
|
||||||
|
@ -21,10 +21,11 @@ from io import BytesIO, StringIO
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
import jsonrpc # pylint: disable=import-error
|
import jsonrpc # pylint: disable=import-error
|
||||||
|
from twisted.internet import defer # pylint: disable=import-error
|
||||||
from twisted.internet import threads # pylint: disable=import-error
|
from twisted.internet import threads # pylint: disable=import-error
|
||||||
from twisted.internet import utils # pylint: disable=import-error
|
from twisted.internet import utils # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import __main__, __version__, util
|
from platformio import __main__, __version__, fs
|
||||||
from platformio.commands.home import helpers
|
from platformio.commands.home import helpers
|
||||||
from platformio.compat import (PY2, get_filesystem_encoding, is_bytes,
|
from platformio.compat import (PY2, get_filesystem_encoding, is_bytes,
|
||||||
string_types)
|
string_types)
|
||||||
@ -68,6 +69,10 @@ class MultiThreadingStdStream(object):
|
|||||||
|
|
||||||
class PIOCoreRPC(object):
|
class PIOCoreRPC(object):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def version():
|
||||||
|
return __version__
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def setup_multithreading_std_streams():
|
def setup_multithreading_std_streams():
|
||||||
if isinstance(sys.stdout, MultiThreadingStdStream):
|
if isinstance(sys.stdout, MultiThreadingStdStream):
|
||||||
@ -79,41 +84,67 @@ class PIOCoreRPC(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def call(args, options=None):
|
def call(args, options=None):
|
||||||
PIOCoreRPC.setup_multithreading_std_streams()
|
return defer.maybeDeferred(PIOCoreRPC._call_generator, args, options)
|
||||||
cwd = (options or {}).get("cwd") or os.getcwd()
|
|
||||||
|
@staticmethod
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _call_generator(args, options=None):
|
||||||
for i, arg in enumerate(args):
|
for i, arg in enumerate(args):
|
||||||
if isinstance(arg, string_types):
|
if isinstance(arg, string_types):
|
||||||
args[i] = arg.encode(get_filesystem_encoding()) if PY2 else arg
|
args[i] = arg.encode(get_filesystem_encoding()) if PY2 else arg
|
||||||
else:
|
else:
|
||||||
args[i] = str(arg)
|
args[i] = str(arg)
|
||||||
|
|
||||||
def _call_inline():
|
to_json = "--json-output" in args
|
||||||
with util.cd(cwd):
|
|
||||||
|
try:
|
||||||
|
if args and args[0] in ("account", "remote"):
|
||||||
|
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||||
|
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||||
|
else:
|
||||||
|
result = yield PIOCoreRPC._call_inline(args, options)
|
||||||
|
try:
|
||||||
|
defer.returnValue(
|
||||||
|
PIOCoreRPC._process_result(result, to_json))
|
||||||
|
except ValueError:
|
||||||
|
# fall-back to subprocess method
|
||||||
|
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||||
|
defer.returnValue(
|
||||||
|
PIOCoreRPC._process_result(result, to_json))
|
||||||
|
except Exception as e: # pylint: disable=bare-except
|
||||||
|
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||||
|
code=4003, message="PIO Core Call Error", data=str(e))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _call_inline(args, options):
|
||||||
|
PIOCoreRPC.setup_multithreading_std_streams()
|
||||||
|
cwd = (options or {}).get("cwd") or os.getcwd()
|
||||||
|
|
||||||
|
def _thread_task():
|
||||||
|
with fs.cd(cwd):
|
||||||
exit_code = __main__.main(["-c"] + args)
|
exit_code = __main__.main(["-c"] + args)
|
||||||
return (PIOCoreRPC.thread_stdout.get_value_and_reset(),
|
return (PIOCoreRPC.thread_stdout.get_value_and_reset(),
|
||||||
PIOCoreRPC.thread_stderr.get_value_and_reset(), exit_code)
|
PIOCoreRPC.thread_stderr.get_value_and_reset(), exit_code)
|
||||||
|
|
||||||
if args and args[0] in ("account", "remote"):
|
return threads.deferToThread(_thread_task)
|
||||||
d = utils.getProcessOutputAndValue(
|
|
||||||
helpers.get_core_fullpath(),
|
|
||||||
args,
|
|
||||||
path=cwd,
|
|
||||||
env={k: v
|
|
||||||
for k, v in os.environ.items() if "%" not in k})
|
|
||||||
else:
|
|
||||||
d = threads.deferToThread(_call_inline)
|
|
||||||
|
|
||||||
d.addCallback(PIOCoreRPC._call_callback, "--json-output" in args)
|
|
||||||
d.addErrback(PIOCoreRPC._call_errback)
|
|
||||||
return d
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _call_callback(result, json_output=False):
|
def _call_subprocess(args, options):
|
||||||
|
cwd = (options or {}).get("cwd") or os.getcwd()
|
||||||
|
return utils.getProcessOutputAndValue(
|
||||||
|
helpers.get_core_fullpath(),
|
||||||
|
args,
|
||||||
|
path=cwd,
|
||||||
|
env={k: v
|
||||||
|
for k, v in os.environ.items() if "%" not in k})
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _process_result(result, to_json=False):
|
||||||
out, err, code = result
|
out, err, code = result
|
||||||
text = ("%s\n\n%s" % (out, err)).strip()
|
text = ("%s\n\n%s" % (out, err)).strip()
|
||||||
if code != 0:
|
if code != 0:
|
||||||
raise Exception(text)
|
raise Exception(text)
|
||||||
if not json_output:
|
if not to_json:
|
||||||
return text
|
return text
|
||||||
try:
|
try:
|
||||||
return json.loads(out)
|
return json.loads(out)
|
||||||
@ -129,14 +160,3 @@ class PIOCoreRPC(object):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _call_errback(failure):
|
|
||||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
|
||||||
code=4003,
|
|
||||||
message="PIO Core Call Error",
|
|
||||||
data=failure.getErrorMessage())
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def version():
|
|
||||||
return __version__
|
|
||||||
|
@ -22,7 +22,7 @@ from os.path import (basename, expanduser, getmtime, isdir, isfile, join,
|
|||||||
|
|
||||||
import jsonrpc # pylint: disable=import-error
|
import jsonrpc # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs
|
||||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||||
from platformio.compat import PY2, get_filesystem_encoding
|
from platformio.compat import PY2, get_filesystem_encoding
|
||||||
@ -77,7 +77,7 @@ class ProjectRPC(object):
|
|||||||
data = {}
|
data = {}
|
||||||
boards = []
|
boards = []
|
||||||
try:
|
try:
|
||||||
with util.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
data = _get_project_data(project_dir)
|
data = _get_project_data(project_dir)
|
||||||
except exception.PlatformIOProjectException:
|
except exception.PlatformIOProjectException:
|
||||||
continue
|
continue
|
||||||
@ -86,7 +86,7 @@ class ProjectRPC(object):
|
|||||||
name = board_id
|
name = board_id
|
||||||
try:
|
try:
|
||||||
name = pm.board_config(board_id)['name']
|
name = pm.board_config(board_id)['name']
|
||||||
except (exception.UnknownBoard, exception.UnknownPlatform):
|
except exception.PlatformioException:
|
||||||
pass
|
pass
|
||||||
boards.append({"id": board_id, "name": name})
|
boards.append({"id": board_id, "name": name})
|
||||||
|
|
||||||
@ -196,7 +196,7 @@ class ProjectRPC(object):
|
|||||||
]) # yapf: disable
|
]) # yapf: disable
|
||||||
if not main_content:
|
if not main_content:
|
||||||
return project_dir
|
return project_dir
|
||||||
with util.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
src_dir = get_project_src_dir()
|
src_dir = get_project_src_dir()
|
||||||
main_path = join(src_dir, "main.cpp")
|
main_path = join(src_dir, "main.cpp")
|
||||||
if isfile(main_path):
|
if isfile(main_path):
|
||||||
@ -249,10 +249,10 @@ class ProjectRPC(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
|
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
|
||||||
with util.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
src_dir = get_project_src_dir()
|
src_dir = get_project_src_dir()
|
||||||
if isdir(src_dir):
|
if isdir(src_dir):
|
||||||
util.rmtree_(src_dir)
|
fs.rmtree(src_dir)
|
||||||
shutil.copytree(arduino_project_dir, src_dir)
|
shutil.copytree(arduino_project_dir, src_dir)
|
||||||
return project_dir
|
return project_dir
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ from os.path import isdir, isfile, join
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs
|
||||||
from platformio.commands.platform import \
|
from platformio.commands.platform import \
|
||||||
platform_install as cli_platform_install
|
platform_install as cli_platform_install
|
||||||
from platformio.ide.projectgenerator import ProjectGenerator
|
from platformio.ide.projectgenerator import ProjectGenerator
|
||||||
@ -102,8 +102,7 @@ def cli(
|
|||||||
ide is not None)
|
ide is not None)
|
||||||
|
|
||||||
if ide:
|
if ide:
|
||||||
pg = ProjectGenerator(project_dir, ide,
|
pg = ProjectGenerator(project_dir, ide, board)
|
||||||
get_best_envname(project_dir, board))
|
|
||||||
pg.generate()
|
pg.generate()
|
||||||
|
|
||||||
if is_new_project:
|
if is_new_project:
|
||||||
@ -131,32 +130,9 @@ def cli(
|
|||||||
fg="green")
|
fg="green")
|
||||||
|
|
||||||
|
|
||||||
def get_best_envname(project_dir, boards=None):
|
|
||||||
config = ProjectConfig.get_instance(join(project_dir, "platformio.ini"))
|
|
||||||
config.validate()
|
|
||||||
|
|
||||||
envname = None
|
|
||||||
default_envs = config.default_envs()
|
|
||||||
if default_envs:
|
|
||||||
envname = default_envs[0]
|
|
||||||
if not boards:
|
|
||||||
return envname
|
|
||||||
|
|
||||||
for env in config.envs():
|
|
||||||
if not boards:
|
|
||||||
return env
|
|
||||||
if not envname:
|
|
||||||
envname = env
|
|
||||||
items = config.items(env=env, as_dict=True)
|
|
||||||
if "board" in items and items.get("board") in boards:
|
|
||||||
return env
|
|
||||||
|
|
||||||
return envname
|
|
||||||
|
|
||||||
|
|
||||||
def init_base_project(project_dir):
|
def init_base_project(project_dir):
|
||||||
ProjectConfig(join(project_dir, "platformio.ini")).save()
|
ProjectConfig(join(project_dir, "platformio.ini")).save()
|
||||||
with util.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
dir_to_readme = [
|
dir_to_readme = [
|
||||||
(get_project_src_dir(), None),
|
(get_project_src_dir(), None),
|
||||||
(get_project_include_dir(), init_include_readme),
|
(get_project_include_dir(), init_include_readme),
|
||||||
|
@ -19,8 +19,9 @@ from os.path import isdir, join
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
import semantic_version
|
import semantic_version
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs, util
|
||||||
from platformio.commands import PlatformioCLI
|
from platformio.commands import PlatformioCLI
|
||||||
from platformio.compat import dump_json_to_unicode
|
from platformio.compat import dump_json_to_unicode
|
||||||
from platformio.managers.lib import (LibraryManager, get_builtin_libs,
|
from platformio.managers.lib import (LibraryManager, get_builtin_libs,
|
||||||
@ -99,7 +100,7 @@ def cli(ctx, **options):
|
|||||||
if not is_platformio_project(storage_dir):
|
if not is_platformio_project(storage_dir):
|
||||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
||||||
continue
|
continue
|
||||||
with util.cd(storage_dir):
|
with fs.cd(storage_dir):
|
||||||
libdeps_dir = get_project_libdeps_dir()
|
libdeps_dir = get_project_libdeps_dir()
|
||||||
config = ProjectConfig.get_instance(join(storage_dir,
|
config = ProjectConfig.get_instance(join(storage_dir,
|
||||||
"platformio.ini"))
|
"platformio.ini"))
|
||||||
@ -486,66 +487,48 @@ def lib_stats(json_output):
|
|||||||
if json_output:
|
if json_output:
|
||||||
return click.echo(dump_json_to_unicode(result))
|
return click.echo(dump_json_to_unicode(result))
|
||||||
|
|
||||||
printitem_tpl = "{name:<33} {url}"
|
|
||||||
printitemdate_tpl = "{name:<33} {date:23} {url}"
|
|
||||||
|
|
||||||
def _print_title(title):
|
|
||||||
click.secho(title.upper(), bold=True)
|
|
||||||
click.echo("*" * len(title))
|
|
||||||
|
|
||||||
def _print_header(with_date=False):
|
|
||||||
click.echo((printitemdate_tpl if with_date else printitem_tpl).format(
|
|
||||||
name=click.style("Name", fg="cyan"),
|
|
||||||
date="Date",
|
|
||||||
url=click.style("Url", fg="blue")))
|
|
||||||
|
|
||||||
terminal_width, _ = click.get_terminal_size()
|
|
||||||
click.echo("-" * terminal_width)
|
|
||||||
|
|
||||||
def _print_lib_item(item):
|
|
||||||
date = str(
|
|
||||||
time.strftime("%c", util.parse_date(item['date'])) if "date" in
|
|
||||||
item else "")
|
|
||||||
url = click.style("https://platformio.org/lib/show/%s/%s" %
|
|
||||||
(item['id'], quote(item['name'])),
|
|
||||||
fg="blue")
|
|
||||||
click.echo(
|
|
||||||
(printitemdate_tpl if "date" in item else printitem_tpl).format(
|
|
||||||
name=click.style(item['name'], fg="cyan"), date=date, url=url))
|
|
||||||
|
|
||||||
def _print_tag_item(name):
|
|
||||||
click.echo(
|
|
||||||
printitem_tpl.format(
|
|
||||||
name=click.style(name, fg="cyan"),
|
|
||||||
url=click.style("https://platformio.org/lib/search?query=" +
|
|
||||||
quote("keyword:%s" % name),
|
|
||||||
fg="blue")))
|
|
||||||
|
|
||||||
for key in ("updated", "added"):
|
for key in ("updated", "added"):
|
||||||
_print_title("Recently " + key)
|
tabular_data = [(click.style(item['name'], fg="cyan"),
|
||||||
_print_header(with_date=True)
|
time.strftime("%c", util.parse_date(item['date'])),
|
||||||
for item in result.get(key, []):
|
"https://platformio.org/lib/show/%s/%s" %
|
||||||
_print_lib_item(item)
|
(item['id'], quote(item['name'])))
|
||||||
|
for item in result.get(key, [])]
|
||||||
|
table = tabulate(tabular_data,
|
||||||
|
headers=[
|
||||||
|
click.style("RECENTLY " + key.upper(), bold=True),
|
||||||
|
"Date", "URL"
|
||||||
|
])
|
||||||
|
click.echo(table)
|
||||||
click.echo()
|
click.echo()
|
||||||
|
|
||||||
_print_title("Recent keywords")
|
for key in ("lastkeywords", "topkeywords"):
|
||||||
_print_header(with_date=False)
|
tabular_data = [(click.style(name, fg="cyan"),
|
||||||
for item in result.get("lastkeywords"):
|
"https://platformio.org/lib/search?query=" +
|
||||||
_print_tag_item(item)
|
quote("keyword:%s" % name))
|
||||||
click.echo()
|
for name in result.get(key, [])]
|
||||||
|
table = tabulate(
|
||||||
_print_title("Popular keywords")
|
tabular_data,
|
||||||
_print_header(with_date=False)
|
headers=[
|
||||||
for item in result.get("topkeywords"):
|
click.style(
|
||||||
_print_tag_item(item)
|
("RECENT" if key == "lastkeywords" else "POPULAR") +
|
||||||
click.echo()
|
" KEYWORDS",
|
||||||
|
bold=True), "URL"
|
||||||
|
])
|
||||||
|
click.echo(table)
|
||||||
|
click.echo()
|
||||||
|
|
||||||
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
|
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
|
||||||
"Month")):
|
"Month")):
|
||||||
_print_title("Featured: " + title)
|
tabular_data = [(click.style(item['name'], fg="cyan"),
|
||||||
_print_header(with_date=False)
|
"https://platformio.org/lib/show/%s/%s" %
|
||||||
for item in result.get(key, []):
|
(item['id'], quote(item['name'])))
|
||||||
_print_lib_item(item)
|
for item in result.get(key, [])]
|
||||||
|
table = tabulate(tabular_data,
|
||||||
|
headers=[
|
||||||
|
click.style("FEATURED: " + title.upper(),
|
||||||
|
bold=True), "URL"
|
||||||
|
])
|
||||||
|
click.echo(table)
|
||||||
click.echo()
|
click.echo()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -21,7 +21,7 @@ from time import sleep
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs
|
||||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||||
from platformio.compat import get_file_contents
|
from platformio.compat import get_file_contents
|
||||||
from platformio.managers.core import pioplus_call
|
from platformio.managers.core import pioplus_call
|
||||||
@ -202,4 +202,4 @@ def device_monitor(ctx, **kwargs):
|
|||||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||||
t.join(2)
|
t.join(2)
|
||||||
finally:
|
finally:
|
||||||
util.rmtree_(sock_dir)
|
fs.rmtree(sock_dir)
|
||||||
|
@ -13,4 +13,3 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from platformio.commands.run.command import cli
|
from platformio.commands.run.command import cli
|
||||||
from platformio.commands.run.helpers import print_header
|
|
||||||
|
@ -18,13 +18,14 @@ from os.path import isfile, join
|
|||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs, util
|
||||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||||
from platformio.commands.run.helpers import (clean_build_dir,
|
from platformio.commands.run.helpers import (clean_build_dir,
|
||||||
handle_legacy_libdeps,
|
handle_legacy_libdeps)
|
||||||
print_summary)
|
|
||||||
from platformio.commands.run.processor import EnvironmentProcessor
|
from platformio.commands.run.processor import EnvironmentProcessor
|
||||||
|
from platformio.commands.test.processor import CTX_META_TEST_IS_RUNNING
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
from platformio.project.helpers import (find_project_dir_above,
|
from platformio.project.helpers import (find_project_dir_above,
|
||||||
get_project_build_dir)
|
get_project_build_dir)
|
||||||
@ -73,11 +74,17 @@ def cli(ctx, environment, target, upload_port, project_dir, project_conf, jobs,
|
|||||||
if isfile(project_dir):
|
if isfile(project_dir):
|
||||||
project_dir = find_project_dir_above(project_dir)
|
project_dir = find_project_dir_above(project_dir)
|
||||||
|
|
||||||
with util.cd(project_dir):
|
is_test_running = CTX_META_TEST_IS_RUNNING in ctx.meta
|
||||||
|
|
||||||
|
with fs.cd(project_dir):
|
||||||
|
config = ProjectConfig.get_instance(
|
||||||
|
project_conf or join(project_dir, "platformio.ini"))
|
||||||
|
config.validate(environment)
|
||||||
|
|
||||||
# clean obsolete build dir
|
# clean obsolete build dir
|
||||||
if not disable_auto_clean:
|
if not disable_auto_clean:
|
||||||
try:
|
try:
|
||||||
clean_build_dir(get_project_build_dir())
|
clean_build_dir(get_project_build_dir(), config)
|
||||||
except: # pylint: disable=bare-except
|
except: # pylint: disable=bare-except
|
||||||
click.secho(
|
click.secho(
|
||||||
"Can not remove temporary directory `%s`. Please remove "
|
"Can not remove temporary directory `%s`. Please remove "
|
||||||
@ -85,44 +92,114 @@ def cli(ctx, environment, target, upload_port, project_dir, project_conf, jobs,
|
|||||||
get_project_build_dir(force=True),
|
get_project_build_dir(force=True),
|
||||||
fg="yellow")
|
fg="yellow")
|
||||||
|
|
||||||
config = ProjectConfig.get_instance(
|
|
||||||
project_conf or join(project_dir, "platformio.ini"))
|
|
||||||
config.validate(environment)
|
|
||||||
|
|
||||||
handle_legacy_libdeps(project_dir, config)
|
handle_legacy_libdeps(project_dir, config)
|
||||||
|
|
||||||
results = []
|
|
||||||
start_time = time()
|
|
||||||
default_envs = config.default_envs()
|
default_envs = config.default_envs()
|
||||||
for envname in config.envs():
|
results = []
|
||||||
|
for env in config.envs():
|
||||||
skipenv = any([
|
skipenv = any([
|
||||||
environment and envname not in environment, not environment
|
environment and env not in environment, not environment
|
||||||
and default_envs and envname not in default_envs
|
and default_envs and env not in default_envs
|
||||||
])
|
])
|
||||||
if skipenv:
|
if skipenv:
|
||||||
results.append((envname, None))
|
results.append({"env": env})
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not silent and any(status is not None
|
# print empty line between multi environment project
|
||||||
for (_, status) in results):
|
if not silent and any(
|
||||||
|
r.get("succeeded") is not None for r in results):
|
||||||
click.echo()
|
click.echo()
|
||||||
|
|
||||||
ep = EnvironmentProcessor(ctx, envname, config, target,
|
results.append(
|
||||||
upload_port, silent, verbose, jobs)
|
process_env(ctx, env, config, environment, target, upload_port,
|
||||||
result = (envname, ep.process())
|
silent, verbose, jobs, is_test_running))
|
||||||
results.append(result)
|
|
||||||
|
|
||||||
if result[1] and "monitor" in ep.get_build_targets() and \
|
command_failed = any(r.get("succeeded") is False for r in results)
|
||||||
"nobuild" not in ep.get_build_targets():
|
|
||||||
ctx.invoke(cmd_device_monitor,
|
|
||||||
environment=environment[0] if environment else None)
|
|
||||||
|
|
||||||
found_error = any(status is False for (_, status) in results)
|
if (not is_test_running and (command_failed or not silent)
|
||||||
|
and len(results) > 1):
|
||||||
|
print_processing_summary(results)
|
||||||
|
|
||||||
if (found_error or not silent) and len(results) > 1:
|
if command_failed:
|
||||||
click.echo()
|
|
||||||
print_summary(results, start_time)
|
|
||||||
|
|
||||||
if found_error:
|
|
||||||
raise exception.ReturnErrorCode(1)
|
raise exception.ReturnErrorCode(1)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def process_env(ctx, name, config, environments, targets, upload_port, silent,
|
||||||
|
verbose, jobs, is_test_running):
|
||||||
|
if not is_test_running and not silent:
|
||||||
|
print_processing_header(name, config, verbose)
|
||||||
|
|
||||||
|
ep = EnvironmentProcessor(ctx, name, config, targets, upload_port, silent,
|
||||||
|
verbose, jobs)
|
||||||
|
result = {"env": name, "duration": time(), "succeeded": ep.process()}
|
||||||
|
result['duration'] = time() - result['duration']
|
||||||
|
|
||||||
|
# print footer on error or when is not unit testing
|
||||||
|
if not is_test_running and (not silent or not result['succeeded']):
|
||||||
|
print_processing_footer(result)
|
||||||
|
|
||||||
|
if (result['succeeded'] and "monitor" in ep.get_build_targets()
|
||||||
|
and "nobuild" not in ep.get_build_targets()):
|
||||||
|
ctx.invoke(cmd_device_monitor,
|
||||||
|
environment=environments[0] if environments else None)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def print_processing_header(env, config, verbose=False):
|
||||||
|
env_dump = []
|
||||||
|
for k, v in config.items(env=env):
|
||||||
|
if verbose or k in ("platform", "framework", "board"):
|
||||||
|
env_dump.append("%s: %s" %
|
||||||
|
(k, ", ".join(v) if isinstance(v, list) else v))
|
||||||
|
click.echo("Processing %s (%s)" %
|
||||||
|
(click.style(env, fg="cyan", bold=True), "; ".join(env_dump)))
|
||||||
|
terminal_width, _ = click.get_terminal_size()
|
||||||
|
click.secho("-" * terminal_width, bold=True)
|
||||||
|
|
||||||
|
|
||||||
|
def print_processing_footer(result):
|
||||||
|
is_failed = not result.get("succeeded")
|
||||||
|
util.print_labeled_bar(
|
||||||
|
"[%s] Took %.2f seconds" %
|
||||||
|
((click.style("FAILED", fg="red", bold=True) if is_failed else
|
||||||
|
click.style("SUCCESS", fg="green", bold=True)), result['duration']),
|
||||||
|
is_error=is_failed)
|
||||||
|
|
||||||
|
|
||||||
|
def print_processing_summary(results):
|
||||||
|
tabular_data = []
|
||||||
|
succeeded_nums = 0
|
||||||
|
failed_nums = 0
|
||||||
|
duration = 0
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
duration += result.get("duration", 0)
|
||||||
|
if result.get("succeeded") is False:
|
||||||
|
failed_nums += 1
|
||||||
|
status_str = click.style("FAILED", fg="red")
|
||||||
|
elif result.get("succeeded") is None:
|
||||||
|
status_str = "IGNORED"
|
||||||
|
else:
|
||||||
|
succeeded_nums += 1
|
||||||
|
status_str = click.style("SUCCESS", fg="green")
|
||||||
|
|
||||||
|
tabular_data.append(
|
||||||
|
(click.style(result['env'], fg="cyan"), status_str,
|
||||||
|
util.humanize_duration_time(result.get("duration"))))
|
||||||
|
|
||||||
|
click.echo()
|
||||||
|
click.echo(tabulate(tabular_data,
|
||||||
|
headers=[
|
||||||
|
click.style(s, bold=True)
|
||||||
|
for s in ("Environment", "Status", "Duration")
|
||||||
|
]),
|
||||||
|
err=failed_nums)
|
||||||
|
|
||||||
|
util.print_labeled_bar(
|
||||||
|
"%s%d succeeded in %s" %
|
||||||
|
("%d failed, " % failed_nums if failed_nums else "", succeeded_nums,
|
||||||
|
util.humanize_duration_time(duration)),
|
||||||
|
is_error=failed_nums,
|
||||||
|
fg="red" if failed_nums else "green")
|
||||||
|
@ -13,13 +13,12 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from os import makedirs
|
from os import makedirs
|
||||||
from os.path import getmtime, isdir, isfile, join
|
from os.path import isdir, isfile, join
|
||||||
from time import time
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import util
|
from platformio import fs
|
||||||
from platformio.project.helpers import (calculate_project_hash,
|
from platformio.project.helpers import (compute_project_checksum,
|
||||||
get_project_dir,
|
get_project_dir,
|
||||||
get_project_libdeps_dir)
|
get_project_libdeps_dir)
|
||||||
|
|
||||||
@ -43,67 +42,23 @@ def handle_legacy_libdeps(project_dir, config):
|
|||||||
fg="yellow")
|
fg="yellow")
|
||||||
|
|
||||||
|
|
||||||
def clean_build_dir(build_dir):
|
def clean_build_dir(build_dir, config):
|
||||||
# remove legacy ".pioenvs" folder
|
# remove legacy ".pioenvs" folder
|
||||||
legacy_build_dir = join(get_project_dir(), ".pioenvs")
|
legacy_build_dir = join(get_project_dir(), ".pioenvs")
|
||||||
if isdir(legacy_build_dir) and legacy_build_dir != build_dir:
|
if isdir(legacy_build_dir) and legacy_build_dir != build_dir:
|
||||||
util.rmtree_(legacy_build_dir)
|
fs.rmtree(legacy_build_dir)
|
||||||
|
|
||||||
structhash_file = join(build_dir, "structure.hash")
|
checksum_file = join(build_dir, "project.checksum")
|
||||||
proj_hash = calculate_project_hash()
|
checksum = compute_project_checksum(config)
|
||||||
|
|
||||||
# if project's config is modified
|
if isdir(build_dir):
|
||||||
if (isdir(build_dir) and getmtime(join(
|
# check project structure
|
||||||
get_project_dir(), "platformio.ini")) > getmtime(build_dir)):
|
if isfile(checksum_file):
|
||||||
util.rmtree_(build_dir)
|
with open(checksum_file) as f:
|
||||||
|
if f.read() == checksum:
|
||||||
|
return
|
||||||
|
fs.rmtree(build_dir)
|
||||||
|
|
||||||
# check project structure
|
makedirs(build_dir)
|
||||||
if isdir(build_dir) and isfile(structhash_file):
|
with open(checksum_file, "w") as f:
|
||||||
with open(structhash_file) as f:
|
f.write(checksum)
|
||||||
if f.read() == proj_hash:
|
|
||||||
return
|
|
||||||
util.rmtree_(build_dir)
|
|
||||||
|
|
||||||
if not isdir(build_dir):
|
|
||||||
makedirs(build_dir)
|
|
||||||
|
|
||||||
with open(structhash_file, "w") as f:
|
|
||||||
f.write(proj_hash)
|
|
||||||
|
|
||||||
|
|
||||||
def print_header(label, is_error=False, fg=None):
|
|
||||||
terminal_width, _ = click.get_terminal_size()
|
|
||||||
width = len(click.unstyle(label))
|
|
||||||
half_line = "=" * int((terminal_width - width - 2) / 2)
|
|
||||||
click.secho("%s %s %s" % (half_line, label, half_line),
|
|
||||||
fg=fg,
|
|
||||||
err=is_error)
|
|
||||||
|
|
||||||
|
|
||||||
def print_summary(results, start_time):
|
|
||||||
print_header("[%s]" % click.style("SUMMARY"))
|
|
||||||
|
|
||||||
succeeded_nums = 0
|
|
||||||
failed_nums = 0
|
|
||||||
envname_max_len = max(
|
|
||||||
[len(click.style(envname, fg="cyan")) for (envname, _) in results])
|
|
||||||
for (envname, status) in results:
|
|
||||||
if status is False:
|
|
||||||
failed_nums += 1
|
|
||||||
status_str = click.style("FAILED", fg="red")
|
|
||||||
elif status is None:
|
|
||||||
status_str = click.style("IGNORED", fg="yellow")
|
|
||||||
else:
|
|
||||||
succeeded_nums += 1
|
|
||||||
status_str = click.style("SUCCESS", fg="green")
|
|
||||||
|
|
||||||
format_str = "Environment {0:<%d}\t[{1}]" % envname_max_len
|
|
||||||
click.echo(format_str.format(click.style(envname, fg="cyan"),
|
|
||||||
status_str),
|
|
||||||
err=status is False)
|
|
||||||
|
|
||||||
print_header("%s%d succeeded in %.2f seconds" %
|
|
||||||
("%d failed, " % failed_nums if failed_nums else "",
|
|
||||||
succeeded_nums, time() - start_time),
|
|
||||||
is_error=failed_nums,
|
|
||||||
fg="red" if failed_nums else "green")
|
|
||||||
|
@ -12,16 +12,10 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from time import time
|
|
||||||
|
|
||||||
import click
|
|
||||||
|
|
||||||
from platformio import exception, telemetry
|
from platformio import exception, telemetry
|
||||||
from platformio.commands.platform import \
|
from platformio.commands.platform import \
|
||||||
platform_install as cmd_platform_install
|
platform_install as cmd_platform_install
|
||||||
from platformio.commands.run.helpers import print_header
|
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
|
||||||
from platformio.commands.test.processor import (CTX_META_TEST_IS_RUNNING,
|
|
||||||
CTX_META_TEST_RUNNING_NAME)
|
|
||||||
from platformio.managers.platform import PlatformFactory
|
from platformio.managers.platform import PlatformFactory
|
||||||
|
|
||||||
# pylint: disable=too-many-instance-attributes
|
# pylint: disable=too-many-instance-attributes
|
||||||
@ -29,8 +23,6 @@ from platformio.managers.platform import PlatformFactory
|
|||||||
|
|
||||||
class EnvironmentProcessor(object):
|
class EnvironmentProcessor(object):
|
||||||
|
|
||||||
DEFAULT_PRINT_OPTIONS = ("platform", "framework", "board")
|
|
||||||
|
|
||||||
def __init__( # pylint: disable=too-many-arguments
|
def __init__( # pylint: disable=too-many-arguments
|
||||||
self, cmd_ctx, name, config, targets, upload_port, silent, verbose,
|
self, cmd_ctx, name, config, targets, upload_port, silent, verbose,
|
||||||
jobs):
|
jobs):
|
||||||
@ -44,37 +36,6 @@ class EnvironmentProcessor(object):
|
|||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
self.options = config.items(env=name, as_dict=True)
|
self.options = config.items(env=name, as_dict=True)
|
||||||
|
|
||||||
def process(self):
|
|
||||||
terminal_width, _ = click.get_terminal_size()
|
|
||||||
start_time = time()
|
|
||||||
env_dump = []
|
|
||||||
|
|
||||||
for k, v in self.options.items():
|
|
||||||
if self.verbose or k in self.DEFAULT_PRINT_OPTIONS:
|
|
||||||
env_dump.append(
|
|
||||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v))
|
|
||||||
|
|
||||||
if not self.silent:
|
|
||||||
click.echo("Processing %s (%s)" % (click.style(
|
|
||||||
self.name, fg="cyan", bold=True), "; ".join(env_dump)))
|
|
||||||
click.secho("-" * terminal_width, bold=True)
|
|
||||||
|
|
||||||
result = self._run_platform()
|
|
||||||
is_error = result['returncode'] != 0
|
|
||||||
|
|
||||||
if self.silent and not is_error:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if is_error or CTX_META_TEST_IS_RUNNING not in self.cmd_ctx.meta:
|
|
||||||
print_header(
|
|
||||||
"[%s] Took %.2f seconds" %
|
|
||||||
((click.style("ERROR", fg="red", bold=True) if
|
|
||||||
is_error else click.style("SUCCESS", fg="green", bold=True)),
|
|
||||||
time() - start_time),
|
|
||||||
is_error=is_error)
|
|
||||||
|
|
||||||
return not is_error
|
|
||||||
|
|
||||||
def get_build_variables(self):
|
def get_build_variables(self):
|
||||||
variables = {"pioenv": self.name, "project_config": self.config.path}
|
variables = {"pioenv": self.name, "project_config": self.config.path}
|
||||||
|
|
||||||
@ -92,7 +53,7 @@ class EnvironmentProcessor(object):
|
|||||||
return [t for t in self.targets]
|
return [t for t in self.targets]
|
||||||
return self.config.get("env:" + self.name, "targets", [])
|
return self.config.get("env:" + self.name, "targets", [])
|
||||||
|
|
||||||
def _run_platform(self):
|
def process(self):
|
||||||
if "platform" not in self.options:
|
if "platform" not in self.options:
|
||||||
raise exception.UndefinedEnvPlatform(self.name)
|
raise exception.UndefinedEnvPlatform(self.name)
|
||||||
|
|
||||||
@ -113,5 +74,6 @@ class EnvironmentProcessor(object):
|
|||||||
skip_default_package=True)
|
skip_default_package=True)
|
||||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||||
|
|
||||||
return p.run(build_vars, build_targets, self.silent, self.verbose,
|
result = p.run(build_vars, build_targets, self.silent, self.verbose,
|
||||||
self.jobs)
|
self.jobs)
|
||||||
|
return result['returncode'] == 0
|
||||||
|
@ -13,11 +13,20 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import app
|
from platformio import app
|
||||||
from platformio.compat import string_types
|
from platformio.compat import string_types
|
||||||
|
|
||||||
|
|
||||||
|
def format_value(raw):
|
||||||
|
if isinstance(raw, bool):
|
||||||
|
return "Yes" if raw else "No"
|
||||||
|
if isinstance(raw, string_types):
|
||||||
|
return raw
|
||||||
|
return str(raw)
|
||||||
|
|
||||||
|
|
||||||
@click.group(short_help="Manage PlatformIO settings")
|
@click.group(short_help="Manage PlatformIO settings")
|
||||||
def cli():
|
def cli():
|
||||||
pass
|
pass
|
||||||
@ -26,40 +35,27 @@ def cli():
|
|||||||
@cli.command("get", short_help="Get existing setting/-s")
|
@cli.command("get", short_help="Get existing setting/-s")
|
||||||
@click.argument("name", required=False)
|
@click.argument("name", required=False)
|
||||||
def settings_get(name):
|
def settings_get(name):
|
||||||
|
tabular_data = []
|
||||||
|
for key, options in sorted(app.DEFAULT_SETTINGS.items()):
|
||||||
|
if name and name != key:
|
||||||
|
continue
|
||||||
|
raw_value = app.get_setting(key)
|
||||||
|
formatted_value = format_value(raw_value)
|
||||||
|
|
||||||
list_tpl = u"{name:<40} {value:<35} {description}"
|
if raw_value != options['value']:
|
||||||
terminal_width, _ = click.get_terminal_size()
|
default_formatted_value = format_value(options['value'])
|
||||||
|
formatted_value += "%s" % (
|
||||||
|
"\n" if len(default_formatted_value) > 10 else " ")
|
||||||
|
formatted_value += "[%s]" % click.style(default_formatted_value,
|
||||||
|
fg="yellow")
|
||||||
|
|
||||||
|
tabular_data.append(
|
||||||
|
(click.style(key,
|
||||||
|
fg="cyan"), formatted_value, options['description']))
|
||||||
|
|
||||||
click.echo(
|
click.echo(
|
||||||
list_tpl.format(name=click.style("Name", fg="cyan"),
|
tabulate(tabular_data,
|
||||||
value=(click.style("Value", fg="green") +
|
headers=["Name", "Current value [Default]", "Description"]))
|
||||||
click.style(" [Default]", fg="yellow")),
|
|
||||||
description="Description"))
|
|
||||||
click.echo("-" * terminal_width)
|
|
||||||
|
|
||||||
for _name, _data in sorted(app.DEFAULT_SETTINGS.items()):
|
|
||||||
if name and name != _name:
|
|
||||||
continue
|
|
||||||
_value = app.get_setting(_name)
|
|
||||||
|
|
||||||
_value_str = (str(_value)
|
|
||||||
if not isinstance(_value, string_types) else _value)
|
|
||||||
if isinstance(_value, bool):
|
|
||||||
_value_str = "Yes" if _value else "No"
|
|
||||||
_value_str = click.style(_value_str, fg="green")
|
|
||||||
|
|
||||||
if _value != _data['value']:
|
|
||||||
_defvalue_str = str(_data['value'])
|
|
||||||
if isinstance(_data['value'], bool):
|
|
||||||
_defvalue_str = "Yes" if _data['value'] else "No"
|
|
||||||
_value_str += click.style(" [%s]" % _defvalue_str, fg="yellow")
|
|
||||||
else:
|
|
||||||
_value_str += click.style(" ", fg="yellow")
|
|
||||||
|
|
||||||
click.echo(
|
|
||||||
list_tpl.format(name=click.style(_name, fg="cyan"),
|
|
||||||
value=_value_str,
|
|
||||||
description=_data['description']))
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command("set", short_help="Set new value for the setting")
|
@cli.command("set", short_help="Set new value for the setting")
|
||||||
|
@ -20,9 +20,9 @@ from os.path import isdir, join
|
|||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import exception, util
|
from platformio import exception, fs, util
|
||||||
from platformio.commands.run.helpers import print_header
|
|
||||||
from platformio.commands.test.embedded import EmbeddedTestProcessor
|
from platformio.commands.test.embedded import EmbeddedTestProcessor
|
||||||
from platformio.commands.test.native import NativeTestProcessor
|
from platformio.commands.test.native import NativeTestProcessor
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
@ -76,7 +76,7 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
ctx, environment, ignore, filter, upload_port, test_port, project_dir,
|
ctx, environment, ignore, filter, upload_port, test_port, project_dir,
|
||||||
project_conf, without_building, without_uploading, without_testing,
|
project_conf, without_building, without_uploading, without_testing,
|
||||||
no_reset, monitor_rts, monitor_dtr, verbose):
|
no_reset, monitor_rts, monitor_dtr, verbose):
|
||||||
with util.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
test_dir = get_project_test_dir()
|
test_dir = get_project_test_dir()
|
||||||
if not isdir(test_dir):
|
if not isdir(test_dir):
|
||||||
raise exception.TestDirNotExists(test_dir)
|
raise exception.TestDirNotExists(test_dir)
|
||||||
@ -87,12 +87,12 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
config.validate(envs=environment)
|
config.validate(envs=environment)
|
||||||
|
|
||||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||||
click.echo("Collected %d items" % len(test_names))
|
click.secho("Collected %d items" % len(test_names), bold=True)
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
start_time = time()
|
|
||||||
default_envs = config.default_envs()
|
default_envs = config.default_envs()
|
||||||
for testname in test_names:
|
for testname in test_names:
|
||||||
|
|
||||||
for envname in config.envs():
|
for envname in config.envs():
|
||||||
section = "env:%s" % envname
|
section = "env:%s" % envname
|
||||||
|
|
||||||
@ -114,9 +114,12 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
for p in patterns['ignore']]),
|
for p in patterns['ignore']]),
|
||||||
]
|
]
|
||||||
if any(skip_conditions):
|
if any(skip_conditions):
|
||||||
results.append((None, testname, envname))
|
results.append({"env": envname, "test": testname})
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
click.echo()
|
||||||
|
print_processing_header(testname, envname)
|
||||||
|
|
||||||
cls = (NativeTestProcessor
|
cls = (NativeTestProcessor
|
||||||
if config.get(section, "platform") == "native" else
|
if config.get(section, "platform") == "native" else
|
||||||
EmbeddedTestProcessor)
|
EmbeddedTestProcessor)
|
||||||
@ -133,43 +136,24 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
monitor_rts=monitor_rts,
|
monitor_rts=monitor_rts,
|
||||||
monitor_dtr=monitor_dtr,
|
monitor_dtr=monitor_dtr,
|
||||||
verbose=verbose))
|
verbose=verbose))
|
||||||
results.append((tp.process(), testname, envname))
|
result = {
|
||||||
|
"env": envname,
|
||||||
|
"test": testname,
|
||||||
|
"duration": time(),
|
||||||
|
"succeeded": tp.process()
|
||||||
|
}
|
||||||
|
result['duration'] = time() - result['duration']
|
||||||
|
results.append(result)
|
||||||
|
|
||||||
|
print_processing_footer(result)
|
||||||
|
|
||||||
if without_testing:
|
if without_testing:
|
||||||
return
|
return
|
||||||
|
|
||||||
passed_nums = 0
|
print_testing_summary(results)
|
||||||
failed_nums = 0
|
|
||||||
testname_max_len = max([len(r[1]) for r in results])
|
|
||||||
envname_max_len = max([len(click.style(r[2], fg="cyan")) for r in results])
|
|
||||||
|
|
||||||
print_header("[%s]" % click.style("TEST SUMMARY"))
|
command_failed = any(r.get("succeeded") is False for r in results)
|
||||||
click.echo()
|
if command_failed:
|
||||||
|
|
||||||
for result in results:
|
|
||||||
status, testname, envname = result
|
|
||||||
if status is False:
|
|
||||||
failed_nums += 1
|
|
||||||
status_str = click.style("FAILED", fg="red")
|
|
||||||
elif status is None:
|
|
||||||
status_str = click.style("IGNORED", fg="yellow")
|
|
||||||
else:
|
|
||||||
passed_nums += 1
|
|
||||||
status_str = click.style("PASSED", fg="green")
|
|
||||||
|
|
||||||
format_str = "test/{:<%d} > {:<%d}\t[{}]" % (testname_max_len,
|
|
||||||
envname_max_len)
|
|
||||||
click.echo(format_str.format(testname, click.style(envname, fg="cyan"),
|
|
||||||
status_str),
|
|
||||||
err=status is False)
|
|
||||||
|
|
||||||
print_header("%s%d passed in %.2f seconds" %
|
|
||||||
("%d failed, " % failed_nums if failed_nums else "",
|
|
||||||
passed_nums, time() - start_time),
|
|
||||||
is_error=failed_nums,
|
|
||||||
fg="red" if failed_nums else "green")
|
|
||||||
|
|
||||||
if failed_nums:
|
|
||||||
raise exception.ReturnErrorCode(1)
|
raise exception.ReturnErrorCode(1)
|
||||||
|
|
||||||
|
|
||||||
@ -181,3 +165,58 @@ def get_test_names(test_dir):
|
|||||||
if not names:
|
if not names:
|
||||||
names = ["*"]
|
names = ["*"]
|
||||||
return names
|
return names
|
||||||
|
|
||||||
|
|
||||||
|
def print_processing_header(test, env):
|
||||||
|
click.echo("Processing %s in %s environment" % (click.style(
|
||||||
|
test, fg="yellow", bold=True), click.style(env, fg="cyan", bold=True)))
|
||||||
|
terminal_width, _ = click.get_terminal_size()
|
||||||
|
click.secho("-" * terminal_width, bold=True)
|
||||||
|
|
||||||
|
|
||||||
|
def print_processing_footer(result):
|
||||||
|
is_failed = not result.get("succeeded")
|
||||||
|
util.print_labeled_bar(
|
||||||
|
"[%s] Took %.2f seconds" %
|
||||||
|
((click.style("FAILED", fg="red", bold=True) if is_failed else
|
||||||
|
click.style("PASSED", fg="green", bold=True)), result['duration']),
|
||||||
|
is_error=is_failed)
|
||||||
|
|
||||||
|
|
||||||
|
def print_testing_summary(results):
|
||||||
|
click.echo()
|
||||||
|
|
||||||
|
tabular_data = []
|
||||||
|
succeeded_nums = 0
|
||||||
|
failed_nums = 0
|
||||||
|
duration = 0
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
duration += result.get("duration", 0)
|
||||||
|
if result.get("succeeded") is False:
|
||||||
|
failed_nums += 1
|
||||||
|
status_str = click.style("FAILED", fg="red")
|
||||||
|
elif result.get("succeeded") is None:
|
||||||
|
status_str = "IGNORED"
|
||||||
|
else:
|
||||||
|
succeeded_nums += 1
|
||||||
|
status_str = click.style("PASSED", fg="green")
|
||||||
|
|
||||||
|
tabular_data.append(
|
||||||
|
(result['test'], click.style(result['env'], fg="cyan"), status_str,
|
||||||
|
util.humanize_duration_time(result.get("duration"))))
|
||||||
|
|
||||||
|
click.echo(tabulate(tabular_data,
|
||||||
|
headers=[
|
||||||
|
click.style(s, bold=True)
|
||||||
|
for s in ("Test", "Environment", "Status",
|
||||||
|
"Duration")
|
||||||
|
]),
|
||||||
|
err=failed_nums)
|
||||||
|
|
||||||
|
util.print_labeled_bar(
|
||||||
|
"%s%d succeeded in %s" %
|
||||||
|
("%d failed, " % failed_nums if failed_nums else "", succeeded_nums,
|
||||||
|
util.humanize_duration_time(duration)),
|
||||||
|
is_error=failed_nums,
|
||||||
|
fg="red" if failed_nums else "green")
|
||||||
|
@ -28,7 +28,7 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
|||||||
|
|
||||||
def process(self):
|
def process(self):
|
||||||
if not self.options['without_building']:
|
if not self.options['without_building']:
|
||||||
self.print_progress("Building... (1/3)")
|
self.print_progress("Building...")
|
||||||
target = ["__test"]
|
target = ["__test"]
|
||||||
if self.options['without_uploading']:
|
if self.options['without_uploading']:
|
||||||
target.append("checkprogsize")
|
target.append("checkprogsize")
|
||||||
@ -36,7 +36,7 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.options['without_uploading']:
|
if not self.options['without_uploading']:
|
||||||
self.print_progress("Uploading... (2/3)")
|
self.print_progress("Uploading...")
|
||||||
target = ["upload"]
|
target = ["upload"]
|
||||||
if self.options['without_building']:
|
if self.options['without_building']:
|
||||||
target.append("nobuild")
|
target.append("nobuild")
|
||||||
@ -48,7 +48,7 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
|||||||
if self.options['without_testing']:
|
if self.options['without_testing']:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
self.print_progress("Testing... (3/3)")
|
self.print_progress("Testing...")
|
||||||
return self.run()
|
return self.run()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
from platformio import util
|
from platformio import fs, proc
|
||||||
from platformio.commands.test.processor import TestProcessorBase
|
from platformio.commands.test.processor import TestProcessorBase
|
||||||
from platformio.proc import LineBufferedAsyncPipe
|
from platformio.proc import LineBufferedAsyncPipe
|
||||||
from platformio.project.helpers import get_project_build_dir
|
from platformio.project.helpers import get_project_build_dir
|
||||||
@ -24,18 +24,18 @@ class NativeTestProcessor(TestProcessorBase):
|
|||||||
|
|
||||||
def process(self):
|
def process(self):
|
||||||
if not self.options['without_building']:
|
if not self.options['without_building']:
|
||||||
self.print_progress("Building... (1/2)")
|
self.print_progress("Building...")
|
||||||
if not self.build_or_upload(["__test"]):
|
if not self.build_or_upload(["__test"]):
|
||||||
return False
|
return False
|
||||||
if self.options['without_testing']:
|
if self.options['without_testing']:
|
||||||
return None
|
return None
|
||||||
self.print_progress("Testing... (2/2)")
|
self.print_progress("Testing...")
|
||||||
return self.run()
|
return self.run()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
with util.cd(self.options['project_dir']):
|
with fs.cd(self.options['project_dir']):
|
||||||
build_dir = get_project_build_dir()
|
build_dir = get_project_build_dir()
|
||||||
result = util.exec_command(
|
result = proc.exec_command(
|
||||||
[join(build_dir, self.env_name, "program")],
|
[join(build_dir, self.env_name, "program")],
|
||||||
stdout=LineBufferedAsyncPipe(self.on_run_out),
|
stdout=LineBufferedAsyncPipe(self.on_run_out),
|
||||||
stderr=LineBufferedAsyncPipe(self.on_run_out))
|
stderr=LineBufferedAsyncPipe(self.on_run_out))
|
||||||
|
@ -20,7 +20,6 @@ from string import Template
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import exception
|
from platformio import exception
|
||||||
from platformio.commands.run.helpers import print_header
|
|
||||||
from platformio.project.helpers import get_project_test_dir
|
from platformio.project.helpers import get_project_test_dir
|
||||||
|
|
||||||
TRANSPORT_OPTIONS = {
|
TRANSPORT_OPTIONS = {
|
||||||
@ -40,14 +39,6 @@ TRANSPORT_OPTIONS = {
|
|||||||
"begin": "pc.baud($baudrate)",
|
"begin": "pc.baud($baudrate)",
|
||||||
"end": ""
|
"end": ""
|
||||||
},
|
},
|
||||||
"energia": {
|
|
||||||
"include": "#include <Energia.h>",
|
|
||||||
"object": "",
|
|
||||||
"putchar": "Serial.write(c)",
|
|
||||||
"flush": "Serial.flush()",
|
|
||||||
"begin": "Serial.begin($baudrate)",
|
|
||||||
"end": "Serial.end()"
|
|
||||||
},
|
|
||||||
"espidf": {
|
"espidf": {
|
||||||
"include": "#include <stdio.h>",
|
"include": "#include <stdio.h>",
|
||||||
"object": "",
|
"object": "",
|
||||||
@ -108,12 +99,8 @@ class TestProcessorBase(object):
|
|||||||
def get_baudrate(self):
|
def get_baudrate(self):
|
||||||
return int(self.env_options.get("test_speed", self.DEFAULT_BAUDRATE))
|
return int(self.env_options.get("test_speed", self.DEFAULT_BAUDRATE))
|
||||||
|
|
||||||
def print_progress(self, text, is_error=False):
|
def print_progress(self, text):
|
||||||
click.echo()
|
click.secho(text, bold=self.options.get("verbose"))
|
||||||
print_header("[test/%s > %s] %s" %
|
|
||||||
(click.style(self.test_name, fg="yellow"),
|
|
||||||
click.style(self.env_name, fg="cyan"), text),
|
|
||||||
is_error=is_error)
|
|
||||||
|
|
||||||
def build_or_upload(self, target):
|
def build_or_upload(self, target):
|
||||||
if not self._outputcpp_generated:
|
if not self._outputcpp_generated:
|
||||||
@ -123,9 +110,6 @@ class TestProcessorBase(object):
|
|||||||
if self.test_name != "*":
|
if self.test_name != "*":
|
||||||
self.cmd_ctx.meta[CTX_META_TEST_RUNNING_NAME] = self.test_name
|
self.cmd_ctx.meta[CTX_META_TEST_RUNNING_NAME] = self.test_name
|
||||||
|
|
||||||
if not self.options['verbose']:
|
|
||||||
click.echo("Please wait...")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from platformio.commands.run import cli as cmd_run
|
from platformio.commands.run import cli as cmd_run
|
||||||
return self.cmd_ctx.invoke(cmd_run,
|
return self.cmd_ctx.invoke(cmd_run,
|
||||||
@ -164,7 +148,11 @@ class TestProcessorBase(object):
|
|||||||
"",
|
"",
|
||||||
"$object",
|
"$object",
|
||||||
"",
|
"",
|
||||||
|
"#ifdef __GNUC__",
|
||||||
|
"void output_start(unsigned int baudrate __attribute__((unused)))",
|
||||||
|
"#else",
|
||||||
"void output_start(unsigned int baudrate)",
|
"void output_start(unsigned int baudrate)",
|
||||||
|
"#endif",
|
||||||
"{",
|
"{",
|
||||||
" $begin;",
|
" $begin;",
|
||||||
"}",
|
"}",
|
||||||
|
@ -21,7 +21,6 @@ import requests
|
|||||||
|
|
||||||
from platformio import VERSION, __version__, exception, util
|
from platformio import VERSION, __version__, exception, util
|
||||||
from platformio.compat import WINDOWS
|
from platformio.compat import WINDOWS
|
||||||
from platformio.managers.core import shutdown_piohome_servers
|
|
||||||
from platformio.proc import exec_command, get_pythonexe_path
|
from platformio.proc import exec_command, get_pythonexe_path
|
||||||
from platformio.project.helpers import get_project_cache_dir
|
from platformio.project.helpers import get_project_cache_dir
|
||||||
|
|
||||||
@ -38,9 +37,6 @@ def cli(dev):
|
|||||||
|
|
||||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||||
|
|
||||||
# kill all PIO Home servers, they block `pioplus` binary
|
|
||||||
shutdown_piohome_servers()
|
|
||||||
|
|
||||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||||
cmds = (["pip", "install", "--upgrade",
|
cmds = (["pip", "install", "--upgrade",
|
||||||
get_pip_package(to_develop)], ["platformio", "--version"])
|
get_pip_package(to_develop)], ["platformio", "--version"])
|
||||||
|
163
platformio/fs.py
Normal file
163
platformio/fs.py
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import sys
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from platformio import exception
|
||||||
|
from platformio.compat import get_file_contents, glob_escape
|
||||||
|
|
||||||
|
|
||||||
|
class cd(object):
|
||||||
|
|
||||||
|
def __init__(self, new_path):
|
||||||
|
self.new_path = new_path
|
||||||
|
self.prev_path = os.getcwd()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
os.chdir(self.new_path)
|
||||||
|
|
||||||
|
def __exit__(self, etype, value, traceback):
|
||||||
|
os.chdir(self.prev_path)
|
||||||
|
|
||||||
|
|
||||||
|
def get_source_dir():
|
||||||
|
curpath = os.path.abspath(__file__)
|
||||||
|
if not os.path.isfile(curpath):
|
||||||
|
for p in sys.path:
|
||||||
|
if os.path.isfile(os.path.join(p, __file__)):
|
||||||
|
curpath = os.path.join(p, __file__)
|
||||||
|
break
|
||||||
|
return os.path.dirname(curpath)
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(file_path):
|
||||||
|
try:
|
||||||
|
with open(file_path, "r") as f:
|
||||||
|
return json.load(f)
|
||||||
|
except ValueError:
|
||||||
|
raise exception.InvalidJSONFile(file_path)
|
||||||
|
|
||||||
|
|
||||||
|
def format_filesize(filesize):
|
||||||
|
base = 1024
|
||||||
|
unit = 0
|
||||||
|
suffix = "B"
|
||||||
|
filesize = float(filesize)
|
||||||
|
if filesize < base:
|
||||||
|
return "%d%s" % (filesize, suffix)
|
||||||
|
for i, suffix in enumerate("KMGTPEZY"):
|
||||||
|
unit = base**(i + 2)
|
||||||
|
if filesize >= unit:
|
||||||
|
continue
|
||||||
|
if filesize % (base**(i + 1)):
|
||||||
|
return "%.2f%sB" % ((base * filesize / unit), suffix)
|
||||||
|
break
|
||||||
|
return "%d%sB" % ((base * filesize / unit), suffix)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_udev_rules():
|
||||||
|
from platformio.util import get_systype
|
||||||
|
|
||||||
|
def _rules_to_set(rules_path):
|
||||||
|
return set(l.strip() for l in get_file_contents(rules_path).split("\n")
|
||||||
|
if l.strip() and not l.startswith("#"))
|
||||||
|
|
||||||
|
if "linux" not in get_systype():
|
||||||
|
return None
|
||||||
|
installed_rules = [
|
||||||
|
"/etc/udev/rules.d/99-platformio-udev.rules",
|
||||||
|
"/lib/udev/rules.d/99-platformio-udev.rules"
|
||||||
|
]
|
||||||
|
if not any(os.path.isfile(p) for p in installed_rules):
|
||||||
|
raise exception.MissedUdevRules
|
||||||
|
|
||||||
|
origin_path = os.path.abspath(
|
||||||
|
os.path.join(get_source_dir(), "..", "scripts",
|
||||||
|
"99-platformio-udev.rules"))
|
||||||
|
if not os.path.isfile(origin_path):
|
||||||
|
return None
|
||||||
|
|
||||||
|
origin_rules = _rules_to_set(origin_path)
|
||||||
|
for rules_path in installed_rules:
|
||||||
|
if not os.path.isfile(rules_path):
|
||||||
|
continue
|
||||||
|
current_rules = _rules_to_set(rules_path)
|
||||||
|
if not origin_rules <= current_rules:
|
||||||
|
raise exception.OutdatedUdevRules(rules_path)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def path_endswith_ext(path, extensions):
|
||||||
|
if not isinstance(extensions, (list, tuple)):
|
||||||
|
extensions = [extensions]
|
||||||
|
for ext in extensions:
|
||||||
|
if path.endswith("." + ext):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def match_src_files(src_dir, src_filter=None, src_exts=None):
|
||||||
|
|
||||||
|
def _append_build_item(items, item, src_dir):
|
||||||
|
if not src_exts or path_endswith_ext(item, src_exts):
|
||||||
|
items.add(item.replace(src_dir + os.sep, ""))
|
||||||
|
|
||||||
|
src_filter = src_filter or ""
|
||||||
|
if isinstance(src_filter, (list, tuple)):
|
||||||
|
src_filter = " ".join(src_filter)
|
||||||
|
|
||||||
|
matches = set()
|
||||||
|
# correct fs directory separator
|
||||||
|
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
|
||||||
|
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||||
|
items = set()
|
||||||
|
for item in glob(os.path.join(glob_escape(src_dir), pattern)):
|
||||||
|
if os.path.isdir(item):
|
||||||
|
for root, _, files in os.walk(item, followlinks=True):
|
||||||
|
for f in files:
|
||||||
|
_append_build_item(items, os.path.join(root, f),
|
||||||
|
src_dir)
|
||||||
|
else:
|
||||||
|
_append_build_item(items, item, src_dir)
|
||||||
|
if action == "+":
|
||||||
|
matches |= items
|
||||||
|
else:
|
||||||
|
matches -= items
|
||||||
|
return sorted(list(matches))
|
||||||
|
|
||||||
|
|
||||||
|
def rmtree(path):
|
||||||
|
|
||||||
|
def _onerror(func, path, __):
|
||||||
|
try:
|
||||||
|
st_mode = os.stat(path).st_mode
|
||||||
|
if st_mode & stat.S_IREAD:
|
||||||
|
os.chmod(path, st_mode | stat.S_IWRITE)
|
||||||
|
func(path)
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
|
click.secho("%s \nPlease manually remove the file `%s`" %
|
||||||
|
(str(e), path),
|
||||||
|
fg="red",
|
||||||
|
err=True)
|
||||||
|
|
||||||
|
return shutil.rmtree(path, onerror=_onerror)
|
@ -20,7 +20,7 @@ from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath
|
|||||||
|
|
||||||
import bottle
|
import bottle
|
||||||
|
|
||||||
from platformio import util
|
from platformio import fs, util
|
||||||
from platformio.compat import WINDOWS, get_file_contents
|
from platformio.compat import WINDOWS, get_file_contents
|
||||||
from platformio.proc import where_is_program
|
from platformio.proc import where_is_program
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
@ -32,54 +32,90 @@ from platformio.project.helpers import (get_project_lib_dir,
|
|||||||
|
|
||||||
class ProjectGenerator(object):
|
class ProjectGenerator(object):
|
||||||
|
|
||||||
def __init__(self, project_dir, ide, env_name):
|
def __init__(self, project_dir, ide, boards):
|
||||||
|
self.config = ProjectConfig.get_instance(
|
||||||
|
join(project_dir, "platformio.ini"))
|
||||||
|
self.config.validate()
|
||||||
self.project_dir = project_dir
|
self.project_dir = project_dir
|
||||||
self.ide = str(ide)
|
self.ide = str(ide)
|
||||||
self.env_name = str(env_name)
|
self.env_name = str(self.get_best_envname(boards))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_supported_ides():
|
def get_supported_ides():
|
||||||
tpls_dir = join(util.get_source_dir(), "ide", "tpls")
|
tpls_dir = join(fs.get_source_dir(), "ide", "tpls")
|
||||||
return sorted(
|
return sorted(
|
||||||
[d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
|
[d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
|
||||||
|
|
||||||
|
def get_best_envname(self, boards=None):
|
||||||
|
envname = None
|
||||||
|
default_envs = self.config.default_envs()
|
||||||
|
if default_envs:
|
||||||
|
envname = default_envs[0]
|
||||||
|
if not boards:
|
||||||
|
return envname
|
||||||
|
|
||||||
|
for env in self.config.envs():
|
||||||
|
if not boards:
|
||||||
|
return env
|
||||||
|
if not envname:
|
||||||
|
envname = env
|
||||||
|
items = self.config.items(env=env, as_dict=True)
|
||||||
|
if "board" in items and items.get("board") in boards:
|
||||||
|
return env
|
||||||
|
|
||||||
|
return envname
|
||||||
|
|
||||||
def _load_tplvars(self):
|
def _load_tplvars(self):
|
||||||
tpl_vars = {"env_name": self.env_name}
|
tpl_vars = {
|
||||||
|
"config": self.config,
|
||||||
|
"systype": util.get_systype(),
|
||||||
|
"project_name": basename(self.project_dir),
|
||||||
|
"project_dir": self.project_dir,
|
||||||
|
"env_name": self.env_name,
|
||||||
|
"user_home_dir": abspath(expanduser("~")),
|
||||||
|
"platformio_path":
|
||||||
|
sys.argv[0] if isfile(sys.argv[0])
|
||||||
|
else where_is_program("platformio"),
|
||||||
|
"env_path": os.getenv("PATH"),
|
||||||
|
"env_pathsep": os.pathsep
|
||||||
|
} # yapf: disable
|
||||||
|
|
||||||
# default env configuration
|
# default env configuration
|
||||||
tpl_vars.update(
|
tpl_vars.update(self.config.items(env=self.env_name, as_dict=True))
|
||||||
ProjectConfig.get_instance(join(
|
|
||||||
self.project_dir, "platformio.ini")).items(env=self.env_name,
|
|
||||||
as_dict=True))
|
|
||||||
# build data
|
# build data
|
||||||
tpl_vars.update(
|
tpl_vars.update(
|
||||||
load_project_ide_data(self.project_dir, self.env_name) or {})
|
load_project_ide_data(self.project_dir, self.env_name) or {})
|
||||||
|
|
||||||
with util.cd(self.project_dir):
|
with fs.cd(self.project_dir):
|
||||||
tpl_vars.update({
|
tpl_vars.update({
|
||||||
"project_name": basename(self.project_dir),
|
|
||||||
"src_files": self.get_src_files(),
|
"src_files": self.get_src_files(),
|
||||||
"user_home_dir": abspath(expanduser("~")),
|
|
||||||
"project_dir": self.project_dir,
|
|
||||||
"project_src_dir": get_project_src_dir(),
|
"project_src_dir": get_project_src_dir(),
|
||||||
"project_lib_dir": get_project_lib_dir(),
|
"project_lib_dir": get_project_lib_dir(),
|
||||||
"project_libdeps_dir": join(
|
"project_libdeps_dir": join(
|
||||||
get_project_libdeps_dir(), self.env_name),
|
get_project_libdeps_dir(), self.env_name)
|
||||||
"systype": util.get_systype(),
|
|
||||||
"platformio_path": self._fix_os_path(
|
|
||||||
sys.argv[0] if isfile(sys.argv[0])
|
|
||||||
else where_is_program("platformio")),
|
|
||||||
"env_pathsep": os.pathsep,
|
|
||||||
"env_path": self._fix_os_path(os.getenv("PATH"))
|
|
||||||
}) # yapf: disable
|
}) # yapf: disable
|
||||||
|
|
||||||
|
for key, value in tpl_vars.items():
|
||||||
|
if key.endswith(("_path", "_dir")):
|
||||||
|
tpl_vars[key] = self.to_unix_path(value)
|
||||||
|
for key in ("includes", "src_files", "libsource_dirs"):
|
||||||
|
if key not in tpl_vars:
|
||||||
|
continue
|
||||||
|
tpl_vars[key] = [self.to_unix_path(inc) for inc in tpl_vars[key]]
|
||||||
|
|
||||||
|
tpl_vars['to_unix_path'] = self.to_unix_path
|
||||||
return tpl_vars
|
return tpl_vars
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _fix_os_path(path):
|
def to_unix_path(path):
|
||||||
return (re.sub(r"[\\]+", '\\' * 4, path) if WINDOWS else path)
|
if not WINDOWS or not path:
|
||||||
|
return path
|
||||||
|
return re.sub(r"[\\]+", "/", path)
|
||||||
|
|
||||||
def get_src_files(self):
|
def get_src_files(self):
|
||||||
result = []
|
result = []
|
||||||
with util.cd(self.project_dir):
|
with fs.cd(self.project_dir):
|
||||||
for root, _, files in os.walk(get_project_src_dir()):
|
for root, _, files in os.walk(get_project_src_dir()):
|
||||||
for f in files:
|
for f in files:
|
||||||
result.append(relpath(join(root, f)))
|
result.append(relpath(join(root, f)))
|
||||||
@ -87,7 +123,7 @@ class ProjectGenerator(object):
|
|||||||
|
|
||||||
def get_tpls(self):
|
def get_tpls(self):
|
||||||
tpls = []
|
tpls = []
|
||||||
tpls_dir = join(util.get_source_dir(), "ide", "tpls", self.ide)
|
tpls_dir = join(fs.get_source_dir(), "ide", "tpls", self.ide)
|
||||||
for root, _, files in os.walk(tpls_dir):
|
for root, _, files in os.walk(tpls_dir):
|
||||||
for f in files:
|
for f in files:
|
||||||
if not f.endswith(".tpl"):
|
if not f.endswith(".tpl"):
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
% _defines = " ".join(["-D%s" % d for d in defines])
|
% _defines = " ".join(["-D%s" % d for d in defines])
|
||||||
{
|
{
|
||||||
"execPath": "{{ cxx_path.replace("\\", "/") }}",
|
"execPath": "{{ cxx_path }}",
|
||||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||||
"gccErrorLimit": 15,
|
"gccErrorLimit": 15,
|
||||||
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
|
"gccIncludePaths": "{{ ','.join(includes) }}",
|
||||||
"gccSuppressWarnings": false
|
"gccSuppressWarnings": false
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
% path = path.replace(user_home_dir, "$ENV{HOME}")
|
% path = path.replace(user_home_dir, "$ENV{HOME}")
|
||||||
% end
|
% end
|
||||||
% end
|
% end
|
||||||
% return path.replace("\\", "/")
|
% return path
|
||||||
% end
|
% end
|
||||||
|
|
||||||
set(PLATFORMIO_CMD "{{ _normalize_path(platformio_path) }}")
|
set(PLATFORMIO_CMD "{{ _normalize_path(platformio_path) }}")
|
||||||
|
@ -53,12 +53,12 @@
|
|||||||
<Add option="-D{{define}}"/>
|
<Add option="-D{{define}}"/>
|
||||||
% end
|
% end
|
||||||
% for include in includes:
|
% for include in includes:
|
||||||
<Add directory="{{include.replace("\\", "/")}}"/>
|
<Add directory="{{include}}"/>
|
||||||
% end
|
% end
|
||||||
</Compiler>
|
</Compiler>
|
||||||
<Unit filename="platformio.ini" />
|
<Unit filename="platformio.ini" />
|
||||||
% for file in src_files:
|
% for file in src_files:
|
||||||
<Unit filename="{{file.replace("\\", "/")}}"></Unit>
|
<Unit filename="{{file}}"></Unit>
|
||||||
% end
|
% end
|
||||||
</Project>
|
</Project>
|
||||||
</CodeBlocks_project_file>
|
</CodeBlocks_project_file>
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
% _defines = " ".join(["-D%s" % d for d in defines])
|
% _defines = " ".join(["-D%s" % d for d in defines])
|
||||||
{
|
{
|
||||||
"execPath": "{{ cxx_path.replace("\\", "/") }}",
|
"execPath": "{{ cxx_path }}",
|
||||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||||
"gccErrorLimit": 15,
|
"gccErrorLimit": 15,
|
||||||
"gccIncludePaths": "{{! ','.join("'{}'".format(w.replace("\\", '/')) for w in includes)}}",
|
"gccIncludePaths": "{{! ','.join("'{}'".format(inc) for inc in includes)}}",
|
||||||
"gccSuppressWarnings": false
|
"gccSuppressWarnings": false
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
% systype = platform.system().lower()
|
% systype = platform.system().lower()
|
||||||
%
|
%
|
||||||
% def _escape(text):
|
% def _escape(text):
|
||||||
% return text.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
|
% return to_unix_path(text).replace('"', '\\"')
|
||||||
% end
|
% end
|
||||||
%
|
%
|
||||||
% cleaned_includes = []
|
% cleaned_includes = []
|
||||||
@ -30,16 +30,15 @@
|
|||||||
% end
|
% end
|
||||||
"includePath": [
|
"includePath": [
|
||||||
% for include in cleaned_includes:
|
% for include in cleaned_includes:
|
||||||
"{{! _escape(include) }}",
|
"{{ include }}",
|
||||||
% end
|
% end
|
||||||
""
|
""
|
||||||
],
|
],
|
||||||
"browse": {
|
"browse": {
|
||||||
"limitSymbolsToIncludedHeaders": true,
|
"limitSymbolsToIncludedHeaders": true,
|
||||||
"databaseFilename": "${workspaceRoot}/.vscode/.browse.c_cpp.db",
|
|
||||||
"path": [
|
"path": [
|
||||||
% for include in cleaned_includes:
|
% for include in cleaned_includes:
|
||||||
"{{! _escape(include) }}",
|
"{{ include }}",
|
||||||
% end
|
% end
|
||||||
""
|
""
|
||||||
]
|
]
|
||||||
@ -65,7 +64,7 @@
|
|||||||
% if cxx_stds:
|
% if cxx_stds:
|
||||||
"cppStandard": "c++{{ cxx_stds[-1] }}",
|
"cppStandard": "c++{{ cxx_stds[-1] }}",
|
||||||
% end
|
% end
|
||||||
"compilerPath": "\"{{! _escape(cc_path) }}\" {{! _escape(cc_m_flags) }}"
|
"compilerPath": "\"{{cc_path}}\" {{! _escape(cc_m_flags) }}"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"version": 4
|
"version": 4
|
||||||
|
@ -19,7 +19,7 @@ from time import time
|
|||||||
import click
|
import click
|
||||||
import semantic_version
|
import semantic_version
|
||||||
|
|
||||||
from platformio import __version__, app, exception, telemetry, util
|
from platformio import __version__, app, exception, fs, telemetry, util
|
||||||
from platformio.commands import PlatformioCLI
|
from platformio.commands import PlatformioCLI
|
||||||
from platformio.commands.lib import CTX_META_STORAGE_DIRS_KEY
|
from platformio.commands.lib import CTX_META_STORAGE_DIRS_KEY
|
||||||
from platformio.commands.lib import lib_update as cmd_lib_update
|
from platformio.commands.lib import lib_update as cmd_lib_update
|
||||||
@ -208,7 +208,7 @@ def check_platformio_upgrade():
|
|||||||
fg="cyan",
|
fg="cyan",
|
||||||
nl=False)
|
nl=False)
|
||||||
click.secho("`.", fg="yellow")
|
click.secho("`.", fg="yellow")
|
||||||
elif join("Cellar", "platformio") in util.get_source_dir():
|
elif join("Cellar", "platformio") in fs.get_source_dir():
|
||||||
click.secho("brew update && brew upgrade", fg="cyan", nl=False)
|
click.secho("brew update && brew upgrade", fg="cyan", nl=False)
|
||||||
click.secho("` command.", fg="yellow")
|
click.secho("` command.", fg="yellow")
|
||||||
else:
|
else:
|
||||||
|
@ -16,11 +16,8 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from os.path import dirname, join
|
from os.path import dirname, join
|
||||||
from time import sleep
|
|
||||||
|
|
||||||
import requests
|
from platformio import __version__, exception, fs
|
||||||
|
|
||||||
from platformio import __version__, exception, util
|
|
||||||
from platformio.compat import PY2, WINDOWS
|
from platformio.compat import PY2, WINDOWS
|
||||||
from platformio.managers.package import PackageManager
|
from platformio.managers.package import PackageManager
|
||||||
from platformio.proc import copy_pythonpath_to_osenv, get_pythonexe_path
|
from platformio.proc import copy_pythonpath_to_osenv, get_pythonexe_path
|
||||||
@ -99,25 +96,10 @@ def update_core_packages(only_check=False, silent=False):
|
|||||||
if not pkg_dir:
|
if not pkg_dir:
|
||||||
continue
|
continue
|
||||||
if not silent or pm.outdated(pkg_dir, requirements):
|
if not silent or pm.outdated(pkg_dir, requirements):
|
||||||
if name == "tool-pioplus" and not only_check:
|
|
||||||
shutdown_piohome_servers()
|
|
||||||
if WINDOWS:
|
|
||||||
sleep(1)
|
|
||||||
pm.update(name, requirements, only_check=only_check)
|
pm.update(name, requirements, only_check=only_check)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def shutdown_piohome_servers():
|
|
||||||
port = 8010
|
|
||||||
while port < 8050:
|
|
||||||
try:
|
|
||||||
requests.get("http://127.0.0.1:%d?__shutdown__=1" % port,
|
|
||||||
timeout=0.01)
|
|
||||||
except: # pylint: disable=bare-except
|
|
||||||
pass
|
|
||||||
port += 1
|
|
||||||
|
|
||||||
|
|
||||||
def inject_contrib_pysite():
|
def inject_contrib_pysite():
|
||||||
from site import addsitedir
|
from site import addsitedir
|
||||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||||
@ -138,7 +120,7 @@ def pioplus_call(args, **kwargs):
|
|||||||
pythonexe_path = get_pythonexe_path()
|
pythonexe_path = get_pythonexe_path()
|
||||||
os.environ['PYTHONEXEPATH'] = pythonexe_path
|
os.environ['PYTHONEXEPATH'] = pythonexe_path
|
||||||
os.environ['PYTHONPYSITEDIR'] = get_core_package_dir("contrib-pysite")
|
os.environ['PYTHONPYSITEDIR'] = get_core_package_dir("contrib-pysite")
|
||||||
os.environ['PIOCOREPYSITEDIR'] = dirname(util.get_source_dir() or "")
|
os.environ['PIOCOREPYSITEDIR'] = dirname(fs.get_source_dir() or "")
|
||||||
if dirname(pythonexe_path) not in os.environ['PATH'].split(os.pathsep):
|
if dirname(pythonexe_path) not in os.environ['PATH'].split(os.pathsep):
|
||||||
os.environ['PATH'] = (os.pathsep).join(
|
os.environ['PATH'] = (os.pathsep).join(
|
||||||
[dirname(pythonexe_path), os.environ['PATH']])
|
[dirname(pythonexe_path), os.environ['PATH']])
|
||||||
|
@ -211,7 +211,7 @@ class LibraryManager(BasePkgManager):
|
|||||||
|
|
||||||
return self._install_from_url(
|
return self._install_from_url(
|
||||||
name, dl_data['url'].replace("http://", "https://")
|
name, dl_data['url'].replace("http://", "https://")
|
||||||
if app.get_setting("enable_ssl") else dl_data['url'], requirements)
|
if app.get_setting("strict_ssl") else dl_data['url'], requirements)
|
||||||
|
|
||||||
def search_lib_id( # pylint: disable=too-many-branches
|
def search_lib_id( # pylint: disable=too-many-branches
|
||||||
self,
|
self,
|
||||||
|
@ -25,7 +25,7 @@ import click
|
|||||||
import requests
|
import requests
|
||||||
import semantic_version
|
import semantic_version
|
||||||
|
|
||||||
from platformio import __version__, app, exception, telemetry, util
|
from platformio import __version__, app, exception, fs, telemetry, util
|
||||||
from platformio.compat import hashlib_encode_data
|
from platformio.compat import hashlib_encode_data
|
||||||
from platformio.downloader import FileDownloader
|
from platformio.downloader import FileDownloader
|
||||||
from platformio.lockfile import LockFile
|
from platformio.lockfile import LockFile
|
||||||
@ -359,13 +359,13 @@ class PkgInstallerMixin(object):
|
|||||||
manifest_path = self.get_manifest_path(pkg_dir)
|
manifest_path = self.get_manifest_path(pkg_dir)
|
||||||
src_manifest_path = self.get_src_manifest_path(pkg_dir)
|
src_manifest_path = self.get_src_manifest_path(pkg_dir)
|
||||||
if src_manifest_path:
|
if src_manifest_path:
|
||||||
src_manifest = util.load_json(src_manifest_path)
|
src_manifest = fs.load_json(src_manifest_path)
|
||||||
|
|
||||||
if not manifest_path and not src_manifest_path:
|
if not manifest_path and not src_manifest_path:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if manifest_path and manifest_path.endswith(".json"):
|
if manifest_path and manifest_path.endswith(".json"):
|
||||||
manifest = util.load_json(manifest_path)
|
manifest = fs.load_json(manifest_path)
|
||||||
elif manifest_path and manifest_path.endswith(".properties"):
|
elif manifest_path and manifest_path.endswith(".properties"):
|
||||||
with codecs.open(manifest_path, encoding="utf-8") as fp:
|
with codecs.open(manifest_path, encoding="utf-8") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
@ -498,7 +498,7 @@ class PkgInstallerMixin(object):
|
|||||||
if isfile(_url):
|
if isfile(_url):
|
||||||
self.unpack(_url, tmp_dir)
|
self.unpack(_url, tmp_dir)
|
||||||
else:
|
else:
|
||||||
util.rmtree_(tmp_dir)
|
fs.rmtree(tmp_dir)
|
||||||
shutil.copytree(_url, tmp_dir)
|
shutil.copytree(_url, tmp_dir)
|
||||||
elif url.startswith(("http://", "https://")):
|
elif url.startswith(("http://", "https://")):
|
||||||
dlpath = self.download(url, tmp_dir, sha1)
|
dlpath = self.download(url, tmp_dir, sha1)
|
||||||
@ -523,7 +523,7 @@ class PkgInstallerMixin(object):
|
|||||||
return self._install_from_tmp_dir(_tmp_dir, requirements)
|
return self._install_from_tmp_dir(_tmp_dir, requirements)
|
||||||
finally:
|
finally:
|
||||||
if isdir(tmp_dir):
|
if isdir(tmp_dir):
|
||||||
util.rmtree_(tmp_dir)
|
fs.rmtree(tmp_dir)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _update_src_manifest(self, data, src_dir):
|
def _update_src_manifest(self, data, src_dir):
|
||||||
@ -532,7 +532,7 @@ class PkgInstallerMixin(object):
|
|||||||
src_manifest_path = join(src_dir, self.SRC_MANIFEST_NAME)
|
src_manifest_path = join(src_dir, self.SRC_MANIFEST_NAME)
|
||||||
_data = {}
|
_data = {}
|
||||||
if isfile(src_manifest_path):
|
if isfile(src_manifest_path):
|
||||||
_data = util.load_json(src_manifest_path)
|
_data = fs.load_json(src_manifest_path)
|
||||||
_data.update(data)
|
_data.update(data)
|
||||||
with open(src_manifest_path, "w") as fp:
|
with open(src_manifest_path, "w") as fp:
|
||||||
json.dump(_data, fp)
|
json.dump(_data, fp)
|
||||||
@ -602,7 +602,7 @@ class PkgInstallerMixin(object):
|
|||||||
|
|
||||||
# remove previous/not-satisfied package
|
# remove previous/not-satisfied package
|
||||||
if isdir(pkg_dir):
|
if isdir(pkg_dir):
|
||||||
util.rmtree_(pkg_dir)
|
fs.rmtree(pkg_dir)
|
||||||
shutil.move(tmp_dir, pkg_dir)
|
shutil.move(tmp_dir, pkg_dir)
|
||||||
assert isdir(pkg_dir)
|
assert isdir(pkg_dir)
|
||||||
self.cache_reset()
|
self.cache_reset()
|
||||||
@ -768,7 +768,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
|||||||
if islink(pkg_dir):
|
if islink(pkg_dir):
|
||||||
os.unlink(pkg_dir)
|
os.unlink(pkg_dir)
|
||||||
else:
|
else:
|
||||||
util.rmtree_(pkg_dir)
|
fs.rmtree(pkg_dir)
|
||||||
self.cache_reset()
|
self.cache_reset()
|
||||||
|
|
||||||
# unfix package with the same name
|
# unfix package with the same name
|
||||||
|
@ -22,7 +22,7 @@ from os.path import basename, dirname, isdir, isfile, join
|
|||||||
import click
|
import click
|
||||||
import semantic_version
|
import semantic_version
|
||||||
|
|
||||||
from platformio import __version__, app, exception, util
|
from platformio import __version__, app, exception, fs, util
|
||||||
from platformio.compat import PY2, hashlib_encode_data, is_bytes
|
from platformio.compat import PY2, hashlib_encode_data, is_bytes
|
||||||
from platformio.managers.core import get_core_package_dir
|
from platformio.managers.core import get_core_package_dir
|
||||||
from platformio.managers.package import BasePkgManager, PackageManager
|
from platformio.managers.package import BasePkgManager, PackageManager
|
||||||
@ -47,7 +47,7 @@ class PlatformManager(BasePkgManager):
|
|||||||
repositories = [
|
repositories = [
|
||||||
"https://dl.bintray.com/platformio/dl-platforms/manifest.json",
|
"https://dl.bintray.com/platformio/dl-platforms/manifest.json",
|
||||||
"{0}://dl.platformio.org/platforms/manifest.json".format(
|
"{0}://dl.platformio.org/platforms/manifest.json".format(
|
||||||
"https" if app.get_setting("enable_ssl") else "http")
|
"https" if app.get_setting("strict_ssl") else "http")
|
||||||
]
|
]
|
||||||
BasePkgManager.__init__(self, package_dir
|
BasePkgManager.__init__(self, package_dir
|
||||||
or get_project_platforms_dir(), repositories)
|
or get_project_platforms_dir(), repositories)
|
||||||
@ -237,7 +237,7 @@ class PlatformFactory(object):
|
|||||||
name = pm.load_manifest(platform_dir)['name']
|
name = pm.load_manifest(platform_dir)['name']
|
||||||
elif name.endswith("platform.json") and isfile(name):
|
elif name.endswith("platform.json") and isfile(name):
|
||||||
platform_dir = dirname(name)
|
platform_dir = dirname(name)
|
||||||
name = util.load_json(name)['name']
|
name = fs.load_json(name)['name']
|
||||||
else:
|
else:
|
||||||
name, requirements, url = pm.parse_pkg_uri(name, requirements)
|
name, requirements, url = pm.parse_pkg_uri(name, requirements)
|
||||||
platform_dir = pm.get_package_dir(name, requirements, url)
|
platform_dir = pm.get_package_dir(name, requirements, url)
|
||||||
@ -404,7 +404,7 @@ class PlatformRunMixin(object):
|
|||||||
join(get_core_package_dir("tool-scons"), "script", "scons"),
|
join(get_core_package_dir("tool-scons"), "script", "scons"),
|
||||||
"-Q", "--warn=no-no-parallel-support",
|
"-Q", "--warn=no-no-parallel-support",
|
||||||
"--jobs", str(jobs),
|
"--jobs", str(jobs),
|
||||||
"--sconstruct", join(util.get_source_dir(), "builder", "main.py")
|
"--sconstruct", join(fs.get_source_dir(), "builder", "main.py")
|
||||||
] # yapf: disable
|
] # yapf: disable
|
||||||
args.append("PIOVERBOSE=%d" % (1 if self.verbose else 0))
|
args.append("PIOVERBOSE=%d" % (1 if self.verbose else 0))
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
@ -494,7 +494,7 @@ class PlatformBase( # pylint: disable=too-many-public-methods
|
|||||||
self.verbose = False
|
self.verbose = False
|
||||||
|
|
||||||
self._BOARDS_CACHE = {}
|
self._BOARDS_CACHE = {}
|
||||||
self._manifest = util.load_json(manifest_path)
|
self._manifest = fs.load_json(manifest_path)
|
||||||
self._custom_packages = None
|
self._custom_packages = None
|
||||||
|
|
||||||
self.pm = PackageManager(get_project_packages_dir(),
|
self.pm = PackageManager(get_project_packages_dir(),
|
||||||
@ -693,7 +693,7 @@ class PlatformBoardConfig(object):
|
|||||||
assert isfile(manifest_path)
|
assert isfile(manifest_path)
|
||||||
self.manifest_path = manifest_path
|
self.manifest_path = manifest_path
|
||||||
try:
|
try:
|
||||||
self._manifest = util.load_json(manifest_path)
|
self._manifest = fs.load_json(manifest_path)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise exception.InvalidBoardManifest(manifest_path)
|
raise exception.InvalidBoardManifest(manifest_path)
|
||||||
if not set(["name", "url", "vendor"]) <= set(self._manifest):
|
if not set(["name", "url", "vendor"]) <= set(self._manifest):
|
||||||
|
@ -16,7 +16,7 @@ import glob
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from os.path import isfile
|
from os.path import expanduser, isfile
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
@ -106,6 +106,8 @@ class ProjectConfig(object):
|
|||||||
|
|
||||||
# load extra configs
|
# load extra configs
|
||||||
for pattern in self.get("platformio", "extra_configs", []):
|
for pattern in self.get("platformio", "extra_configs", []):
|
||||||
|
if pattern.startswith("~"):
|
||||||
|
pattern = expanduser(pattern)
|
||||||
for item in glob.glob(pattern):
|
for item in glob.glob(pattern):
|
||||||
self.read(item)
|
self.read(item)
|
||||||
|
|
||||||
|
@ -165,23 +165,33 @@ def get_project_shared_dir():
|
|||||||
join(get_project_dir(), "shared"))
|
join(get_project_dir(), "shared"))
|
||||||
|
|
||||||
|
|
||||||
def calculate_project_hash():
|
def compute_project_checksum(config):
|
||||||
|
# rebuild when PIO Core version changes
|
||||||
|
checksum = sha1(hashlib_encode_data(__version__))
|
||||||
|
|
||||||
|
# configuration file state
|
||||||
|
checksum.update(hashlib_encode_data(config.to_json()))
|
||||||
|
|
||||||
|
# project file structure
|
||||||
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
|
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
|
||||||
chunks = [__version__]
|
for d in (get_project_include_dir(), get_project_src_dir(),
|
||||||
for d in (get_project_src_dir(), get_project_lib_dir()):
|
get_project_lib_dir()):
|
||||||
if not isdir(d):
|
if not isdir(d):
|
||||||
continue
|
continue
|
||||||
|
chunks = []
|
||||||
for root, _, files in walk(d):
|
for root, _, files in walk(d):
|
||||||
for f in files:
|
for f in files:
|
||||||
path = join(root, f)
|
path = join(root, f)
|
||||||
if path.endswith(check_suffixes):
|
if path.endswith(check_suffixes):
|
||||||
chunks.append(path)
|
chunks.append(path)
|
||||||
chunks_to_str = ",".join(sorted(chunks))
|
if not chunks:
|
||||||
if WINDOWS:
|
continue
|
||||||
# Fix issue with useless project rebuilding for case insensitive FS.
|
chunks_to_str = ",".join(sorted(chunks))
|
||||||
# A case of disk drive can differ...
|
if WINDOWS: # case insensitive OS
|
||||||
chunks_to_str = chunks_to_str.lower()
|
chunks_to_str = chunks_to_str.lower()
|
||||||
return sha1(hashlib_encode_data(chunks_to_str)).hexdigest()
|
checksum.update(hashlib_encode_data(chunks_to_str))
|
||||||
|
|
||||||
|
return checksum.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def load_project_ide_data(project_dir, env_name):
|
def load_project_ide_data(project_dir, env_name):
|
||||||
|
@ -284,32 +284,12 @@ def on_command():
|
|||||||
|
|
||||||
def measure_ci():
|
def measure_ci():
|
||||||
event = {"category": "CI", "action": "NoName", "label": None}
|
event = {"category": "CI", "action": "NoName", "label": None}
|
||||||
|
known_cis = ("TRAVIS", "APPVEYOR", "GITLAB_CI", "CIRCLECI", "SHIPPABLE",
|
||||||
envmap = {
|
"DRONE")
|
||||||
"APPVEYOR": {
|
for name in known_cis:
|
||||||
"label": getenv("APPVEYOR_REPO_NAME")
|
if getenv(name, "false").lower() == "true":
|
||||||
},
|
event['action'] = name
|
||||||
"CIRCLECI": {
|
break
|
||||||
"label":
|
|
||||||
"%s/%s" % (getenv("CIRCLE_PROJECT_USERNAME"),
|
|
||||||
getenv("CIRCLE_PROJECT_REPONAME"))
|
|
||||||
},
|
|
||||||
"TRAVIS": {
|
|
||||||
"label": getenv("TRAVIS_REPO_SLUG")
|
|
||||||
},
|
|
||||||
"SHIPPABLE": {
|
|
||||||
"label": getenv("REPO_NAME")
|
|
||||||
},
|
|
||||||
"DRONE": {
|
|
||||||
"label": getenv("DRONE_REPO_SLUG")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for key, value in envmap.items():
|
|
||||||
if getenv(key, "").lower() != "true":
|
|
||||||
continue
|
|
||||||
event.update({"action": key, "label": value['label']})
|
|
||||||
|
|
||||||
on_event(**event)
|
on_event(**event)
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,39 +13,30 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import math
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import stat
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from os.path import abspath, basename, dirname, isfile, join
|
|
||||||
from shutil import rmtree
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from platformio import __apiurl__, __version__, exception
|
from platformio import __apiurl__, __version__, exception
|
||||||
from platformio.commands import PlatformioCLI
|
from platformio.commands import PlatformioCLI
|
||||||
from platformio.compat import PY2, WINDOWS, get_file_contents
|
from platformio.compat import PY2, WINDOWS
|
||||||
from platformio.proc import exec_command, is_ci
|
from platformio.fs import cd # pylint: disable=unused-import
|
||||||
|
from platformio.fs import load_json # pylint: disable=unused-import
|
||||||
|
from platformio.fs import rmtree as rmtree_ # pylint: disable=unused-import
|
||||||
|
from platformio.proc import exec_command # pylint: disable=unused-import
|
||||||
|
from platformio.proc import is_ci # pylint: disable=unused-import
|
||||||
|
|
||||||
|
# KEEP unused imports for backward compatibility with PIO Core 3.0 API
|
||||||
class cd(object):
|
|
||||||
|
|
||||||
def __init__(self, new_path):
|
|
||||||
self.new_path = new_path
|
|
||||||
self.prev_path = os.getcwd()
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
os.chdir(self.new_path)
|
|
||||||
|
|
||||||
def __exit__(self, etype, value, traceback):
|
|
||||||
os.chdir(self.prev_path)
|
|
||||||
|
|
||||||
|
|
||||||
class memoized(object):
|
class memoized(object):
|
||||||
@ -119,14 +110,6 @@ def capture_std_streams(stdout, stderr=None):
|
|||||||
sys.stderr = _stderr
|
sys.stderr = _stderr
|
||||||
|
|
||||||
|
|
||||||
def load_json(file_path):
|
|
||||||
try:
|
|
||||||
with open(file_path, "r") as f:
|
|
||||||
return json.load(f)
|
|
||||||
except ValueError:
|
|
||||||
raise exception.InvalidJSONFile(file_path)
|
|
||||||
|
|
||||||
|
|
||||||
def get_systype():
|
def get_systype():
|
||||||
type_ = platform.system().lower()
|
type_ = platform.system().lower()
|
||||||
arch = platform.machine().lower()
|
arch = platform.machine().lower()
|
||||||
@ -141,16 +124,6 @@ def pioversion_to_intstr():
|
|||||||
return [int(i) for i in vermatch.group(1).split(".")[:3]]
|
return [int(i) for i in vermatch.group(1).split(".")[:3]]
|
||||||
|
|
||||||
|
|
||||||
def get_source_dir():
|
|
||||||
curpath = abspath(__file__)
|
|
||||||
if not isfile(curpath):
|
|
||||||
for p in sys.path:
|
|
||||||
if isfile(join(p, __file__)):
|
|
||||||
curpath = join(p, __file__)
|
|
||||||
break
|
|
||||||
return dirname(curpath)
|
|
||||||
|
|
||||||
|
|
||||||
def change_filemtime(path, mtime):
|
def change_filemtime(path, mtime):
|
||||||
os.utime(path, (mtime, mtime))
|
os.utime(path, (mtime, mtime))
|
||||||
|
|
||||||
@ -221,7 +194,7 @@ def get_logical_devices():
|
|||||||
continue
|
continue
|
||||||
items.append({
|
items.append({
|
||||||
"path": match.group(1),
|
"path": match.group(1),
|
||||||
"name": basename(match.group(1))
|
"name": os.path.basename(match.group(1))
|
||||||
})
|
})
|
||||||
return items
|
return items
|
||||||
|
|
||||||
@ -333,7 +306,7 @@ def _get_api_result(
|
|||||||
headers = get_request_defheaders()
|
headers = get_request_defheaders()
|
||||||
if not url.startswith("http"):
|
if not url.startswith("http"):
|
||||||
url = __apiurl__ + url
|
url = __apiurl__ + url
|
||||||
if not get_setting("enable_ssl"):
|
if not get_setting("strict_ssl"):
|
||||||
url = url.replace("https://", "http://")
|
url = url.replace("https://", "http://")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -461,23 +434,6 @@ def parse_date(datestr):
|
|||||||
return time.strptime(datestr)
|
return time.strptime(datestr)
|
||||||
|
|
||||||
|
|
||||||
def format_filesize(filesize):
|
|
||||||
base = 1024
|
|
||||||
unit = 0
|
|
||||||
suffix = "B"
|
|
||||||
filesize = float(filesize)
|
|
||||||
if filesize < base:
|
|
||||||
return "%d%s" % (filesize, suffix)
|
|
||||||
for i, suffix in enumerate("KMGTPEZY"):
|
|
||||||
unit = base**(i + 2)
|
|
||||||
if filesize >= unit:
|
|
||||||
continue
|
|
||||||
if filesize % (base**(i + 1)):
|
|
||||||
return "%.2f%sB" % ((base * filesize / unit), suffix)
|
|
||||||
break
|
|
||||||
return "%d%sB" % ((base * filesize / unit), suffix)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_dicts(d1, d2, path=None):
|
def merge_dicts(d1, d2, path=None):
|
||||||
if path is None:
|
if path is None:
|
||||||
path = []
|
path = []
|
||||||
@ -490,35 +446,25 @@ def merge_dicts(d1, d2, path=None):
|
|||||||
return d1
|
return d1
|
||||||
|
|
||||||
|
|
||||||
def ensure_udev_rules():
|
def print_labeled_bar(label, is_error=False, fg=None):
|
||||||
|
terminal_width, _ = click.get_terminal_size()
|
||||||
|
width = len(click.unstyle(label))
|
||||||
|
half_line = "=" * int((terminal_width - width - 2) / 2)
|
||||||
|
click.secho("%s %s %s" % (half_line, label, half_line),
|
||||||
|
fg=fg,
|
||||||
|
err=is_error)
|
||||||
|
|
||||||
def _rules_to_set(rules_path):
|
|
||||||
return set(l.strip() for l in get_file_contents(rules_path).split("\n")
|
|
||||||
if l.strip() and not l.startswith("#"))
|
|
||||||
|
|
||||||
if "linux" not in get_systype():
|
def humanize_duration_time(duration):
|
||||||
return None
|
if duration is None:
|
||||||
installed_rules = [
|
return duration
|
||||||
"/etc/udev/rules.d/99-platformio-udev.rules",
|
duration = duration * 1000
|
||||||
"/lib/udev/rules.d/99-platformio-udev.rules"
|
tokens = []
|
||||||
]
|
for multiplier in (3600000, 60000, 1000, 1):
|
||||||
if not any(isfile(p) for p in installed_rules):
|
fraction = math.floor(duration / multiplier)
|
||||||
raise exception.MissedUdevRules
|
tokens.append(int(round(duration) if multiplier == 1 else fraction))
|
||||||
|
duration -= fraction * multiplier
|
||||||
origin_path = abspath(
|
return "{:02d}:{:02d}:{:02d}.{:03d}".format(*tokens)
|
||||||
join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules"))
|
|
||||||
if not isfile(origin_path):
|
|
||||||
return None
|
|
||||||
|
|
||||||
origin_rules = _rules_to_set(origin_path)
|
|
||||||
for rules_path in installed_rules:
|
|
||||||
if not isfile(rules_path):
|
|
||||||
continue
|
|
||||||
current_rules = _rules_to_set(rules_path)
|
|
||||||
if not origin_rules <= current_rules:
|
|
||||||
raise exception.OutdatedUdevRules(rules_path)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def get_original_version(version):
|
def get_original_version(version):
|
||||||
@ -530,18 +476,3 @@ def get_original_version(version):
|
|||||||
if int(raw) <= 9999:
|
if int(raw) <= 9999:
|
||||||
return "%s.%s" % (raw[:-2], int(raw[-2:]))
|
return "%s.%s" % (raw[:-2], int(raw[-2:]))
|
||||||
return "%s.%s.%s" % (raw[:-4], int(raw[-4:-2]), int(raw[-2:]))
|
return "%s.%s.%s" % (raw[:-4], int(raw[-4:-2]), int(raw[-2:]))
|
||||||
|
|
||||||
|
|
||||||
def rmtree_(path):
|
|
||||||
|
|
||||||
def _onerror(_, name, __):
|
|
||||||
try:
|
|
||||||
os.chmod(name, stat.S_IWRITE)
|
|
||||||
os.remove(name)
|
|
||||||
except Exception as e: # pylint: disable=broad-except
|
|
||||||
click.secho("%s \nPlease manually remove the file `%s`" %
|
|
||||||
(str(e), name),
|
|
||||||
fg="red",
|
|
||||||
err=True)
|
|
||||||
|
|
||||||
return rmtree(path, onerror=_onerror)
|
|
||||||
|
@ -30,6 +30,9 @@ SUBSYSTEMS=="usb", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60", MODE:="066
|
|||||||
# FT232R USB UART
|
# FT232R USB UART
|
||||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", MODE:="0666"
|
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", MODE:="0666"
|
||||||
|
|
||||||
|
# FT231XS USB UART
|
||||||
|
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6015", MODE:="0666"
|
||||||
|
|
||||||
# Prolific Technology, Inc. PL2303 Serial Port
|
# Prolific Technology, Inc. PL2303 Serial Port
|
||||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666"
|
SUBSYSTEMS=="usb", ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666"
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ from sys import path
|
|||||||
|
|
||||||
path.append("..")
|
path.append("..")
|
||||||
|
|
||||||
from platformio import util
|
from platformio import fs, util
|
||||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||||
|
|
||||||
RST_COPYRIGHT = """.. Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
RST_COPYRIGHT = """.. Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
@ -97,8 +97,8 @@ def generate_boards_table(boards, skip_columns=None):
|
|||||||
debug=debug,
|
debug=debug,
|
||||||
mcu=data['mcu'].upper(),
|
mcu=data['mcu'].upper(),
|
||||||
f_cpu=int(data['fcpu']) / 1000000,
|
f_cpu=int(data['fcpu']) / 1000000,
|
||||||
ram=util.format_filesize(data['ram']),
|
ram=fs.format_filesize(data['ram']),
|
||||||
rom=util.format_filesize(data['rom']))
|
rom=fs.format_filesize(data['rom']))
|
||||||
|
|
||||||
for (name, template) in columns:
|
for (name, template) in columns:
|
||||||
if skip_columns and name in skip_columns:
|
if skip_columns and name in skip_columns:
|
||||||
@ -280,7 +280,7 @@ Packages
|
|||||||
|
|
||||||
|
|
||||||
def generate_platform(name, rst_dir):
|
def generate_platform(name, rst_dir):
|
||||||
print "Processing platform: %s" % name
|
print("Processing platform: %s" % name)
|
||||||
|
|
||||||
compatible_boards = [
|
compatible_boards = [
|
||||||
board for board in BOARDS if name == board['platform']
|
board for board in BOARDS if name == board['platform']
|
||||||
@ -439,7 +439,7 @@ def update_platform_docs():
|
|||||||
|
|
||||||
|
|
||||||
def generate_framework(type_, data, rst_dir=None):
|
def generate_framework(type_, data, rst_dir=None):
|
||||||
print "Processing framework: %s" % type_
|
print("Processing framework: %s" % type_)
|
||||||
|
|
||||||
compatible_platforms = [
|
compatible_platforms = [
|
||||||
m for m in PLATFORM_MANIFESTS
|
m for m in PLATFORM_MANIFESTS
|
||||||
@ -614,8 +614,8 @@ def update_embedded_board(rst_path, board):
|
|||||||
mcu_upper=board['mcu'].upper(),
|
mcu_upper=board['mcu'].upper(),
|
||||||
f_cpu=board['fcpu'],
|
f_cpu=board['fcpu'],
|
||||||
f_cpu_mhz=int(board['fcpu']) / 1000000,
|
f_cpu_mhz=int(board['fcpu']) / 1000000,
|
||||||
ram=util.format_filesize(board['ram']),
|
ram=fs.format_filesize(board['ram']),
|
||||||
rom=util.format_filesize(board['rom']),
|
rom=fs.format_filesize(board['rom']),
|
||||||
vendor=board['vendor'],
|
vendor=board['vendor'],
|
||||||
board_manifest_url=board_manifest_url,
|
board_manifest_url=board_manifest_url,
|
||||||
upload_protocol=board_config.get("upload.protocol", ""))
|
upload_protocol=board_config.get("upload.protocol", ""))
|
||||||
@ -811,7 +811,7 @@ Boards
|
|||||||
|
|
||||||
# save
|
# save
|
||||||
with open(
|
with open(
|
||||||
join(util.get_source_dir(), "..", "docs", "plus", "debugging.rst"),
|
join(fs.get_source_dir(), "..", "docs", "plus", "debugging.rst"),
|
||||||
"r+") as fp:
|
"r+") as fp:
|
||||||
content = fp.read()
|
content = fp.read()
|
||||||
fp.seek(0)
|
fp.seek(0)
|
||||||
@ -880,7 +880,7 @@ def update_project_examples():
|
|||||||
{examples}
|
{examples}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
project_examples_dir = join(util.get_source_dir(), "..", "examples")
|
project_examples_dir = join(fs.get_source_dir(), "..", "examples")
|
||||||
framework_examples_md_lines = {}
|
framework_examples_md_lines = {}
|
||||||
embedded = []
|
embedded = []
|
||||||
desktop = []
|
desktop = []
|
||||||
|
@ -18,7 +18,7 @@ from sys import exit as sys_exit
|
|||||||
|
|
||||||
|
|
||||||
def fix_symlink(root, fname, brokenlink):
|
def fix_symlink(root, fname, brokenlink):
|
||||||
print root, fname, brokenlink
|
print(root, fname, brokenlink)
|
||||||
prevcwd = getcwd()
|
prevcwd = getcwd()
|
||||||
|
|
||||||
chdir(root)
|
chdir(root)
|
||||||
|
4
setup.py
4
setup.py
@ -23,7 +23,8 @@ install_requires = [
|
|||||||
"colorama",
|
"colorama",
|
||||||
"pyserial>=3,<4,!=3.3",
|
"pyserial>=3,<4,!=3.3",
|
||||||
"requests>=2.4.0,<3",
|
"requests>=2.4.0,<3",
|
||||||
"semantic_version>=2.5.0,<3"
|
"semantic_version>=2.5.0,<3",
|
||||||
|
"tabulate>=0.8.3"
|
||||||
]
|
]
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
@ -65,6 +66,7 @@ setup(
|
|||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
"Programming Language :: C",
|
"Programming Language :: C",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 2",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Topic :: Software Development",
|
"Topic :: Software Development",
|
||||||
"Topic :: Software Development :: Build Tools",
|
"Topic :: Software Development :: Build Tools",
|
||||||
|
@ -27,9 +27,9 @@ def test_board_json_output(clirunner, validate_cliresult):
|
|||||||
|
|
||||||
|
|
||||||
def test_board_raw_output(clirunner, validate_cliresult):
|
def test_board_raw_output(clirunner, validate_cliresult):
|
||||||
result = clirunner.invoke(cmd_boards, ["energia"])
|
result = clirunner.invoke(cmd_boards, ["espidf"])
|
||||||
validate_cliresult(result)
|
validate_cliresult(result)
|
||||||
assert "titiva" in result.output
|
assert "espressif32" in result.output
|
||||||
|
|
||||||
|
|
||||||
def test_board_options(clirunner, validate_cliresult):
|
def test_board_options(clirunner, validate_cliresult):
|
||||||
|
@ -26,5 +26,5 @@ def test_local_env():
|
|||||||
])
|
])
|
||||||
if result['returncode'] != 1:
|
if result['returncode'] != 1:
|
||||||
pytest.fail(result)
|
pytest.fail(result)
|
||||||
assert all([s in result['out']
|
assert all([s in result['err']
|
||||||
for s in ("PASSED", "IGNORED", "FAILED")]), result['out']
|
for s in ("PASSED", "IGNORED", "FAILED")]), result['out']
|
||||||
|
Reference in New Issue
Block a user