mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-22 23:03:20 +01:00
Compare commits
322 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f07854879a | ||
|
|
9ca53c57f4 | ||
|
|
ee420cc35e | ||
|
|
d49d91269d | ||
|
|
20f28383a0 | ||
|
|
e6a7cc2036 | ||
|
|
4d615416f3 | ||
|
|
be24c6ab4d | ||
|
|
4edfb8f6cc | ||
|
|
29cf1c8596 | ||
|
|
7d9e10095e | ||
|
|
7746f7eeee | ||
|
|
f182a6dcae | ||
|
|
6b17183cff | ||
|
|
8cd35fb537 | ||
|
|
760499c095 | ||
|
|
00b162608e | ||
|
|
e33b0fe291 | ||
|
|
06fe557a20 | ||
|
|
9c1cc97776 | ||
|
|
d7e2d05f60 | ||
|
|
86e4641101 | ||
|
|
326eb4a681 | ||
|
|
e37d34b92f | ||
|
|
b562541f20 | ||
|
|
736a1404b4 | ||
|
|
9639626ab3 | ||
|
|
b99494671a | ||
|
|
9778778830 | ||
|
|
d2c2171ef9 | ||
|
|
856798488b | ||
|
|
00ba88911f | ||
|
|
639c086728 | ||
|
|
4504080027 | ||
|
|
367e4d663c | ||
|
|
28bca48eca | ||
|
|
69065d8bd6 | ||
|
|
db0bbcc043 | ||
|
|
b594c11718 | ||
|
|
d627a42268 | ||
|
|
f058b8f18f | ||
|
|
13430aa628 | ||
|
|
56cd55ba7d | ||
|
|
19b5285d50 | ||
|
|
d5d95092c4 | ||
|
|
68e3f9dc00 | ||
|
|
fabaadec60 | ||
|
|
7f697961ec | ||
|
|
9334f31ff2 | ||
|
|
579de32d4e | ||
|
|
c3702391ea | ||
|
|
826418a443 | ||
|
|
4dfa885a85 | ||
|
|
9f4dde4b5e | ||
|
|
3748219cac | ||
|
|
4b55767fb9 | ||
|
|
5aef182652 | ||
|
|
6db47cec2b | ||
|
|
6f8b9d70bc | ||
|
|
d8cbe99f2c | ||
|
|
a690b8c085 | ||
|
|
b874359482 | ||
|
|
3a18e668c2 | ||
|
|
3ca9527da4 | ||
|
|
f539513376 | ||
|
|
afdfaeec68 | ||
|
|
676c87d081 | ||
|
|
db3b0499c9 | ||
|
|
98032ec548 | ||
|
|
8ef6ea8053 | ||
|
|
d87ee0b286 | ||
|
|
6f01f10f59 | ||
|
|
59a0d2b618 | ||
|
|
16df5474e4 | ||
|
|
33ea6ef123 | ||
|
|
a485e563f0 | ||
|
|
cf35f9dbf8 | ||
|
|
710b150fcd | ||
|
|
13731b4461 | ||
|
|
3d52710935 | ||
|
|
d475f44e49 | ||
|
|
7574798a3a | ||
|
|
9ef8d4cfe0 | ||
|
|
b42d0efa73 | ||
|
|
4a17a9b5b3 | ||
|
|
d3909bdfa2 | ||
|
|
a2b0b2893b | ||
|
|
9d2499ab98 | ||
|
|
579a973512 | ||
|
|
b861e9c192 | ||
|
|
375006ee65 | ||
|
|
23af9c9027 | ||
|
|
7322df26ad | ||
|
|
32bb9c9d83 | ||
|
|
b22ca10f8c | ||
|
|
95beb03aad | ||
|
|
f65ab58c88 | ||
|
|
c06a018d88 | ||
|
|
7789e3bc62 | ||
|
|
1287e51bf8 | ||
|
|
151823f80e | ||
|
|
09d58d0d49 | ||
|
|
0a6fb68840 | ||
|
|
38fb5b2234 | ||
|
|
ab6a323aca | ||
|
|
50ed828e7a | ||
|
|
692af90161 | ||
|
|
543a1dddae | ||
|
|
fce84b5a48 | ||
|
|
67a6f66a35 | ||
|
|
fdbebb178c | ||
|
|
0747fe9dea | ||
|
|
331cd0aa0d | ||
|
|
8b74b12990 | ||
|
|
7a0c1e13f3 | ||
|
|
e94d758131 | ||
|
|
080369f597 | ||
|
|
729178731c | ||
|
|
5c278b54f7 | ||
|
|
2007491be9 | ||
|
|
e96078b4e3 | ||
|
|
118f22bed3 | ||
|
|
2134022565 | ||
|
|
cf2a2395e5 | ||
|
|
8947b63e41 | ||
|
|
fc8bffdd81 | ||
|
|
75105e18ba | ||
|
|
3507290a20 | ||
|
|
7cc4e8ce15 | ||
|
|
08dc5dec89 | ||
|
|
d92349c8f7 | ||
|
|
92289d373b | ||
|
|
4b9e8f0ba4 | ||
|
|
bfee896378 | ||
|
|
e4c112608b | ||
|
|
04eb531ac2 | ||
|
|
8e3020c0f8 | ||
|
|
51acd02421 | ||
|
|
8a1b94b48c | ||
|
|
e11013189b | ||
|
|
98deefc4f5 | ||
|
|
058a5e854d | ||
|
|
7b998c8cda | ||
|
|
98a1fd79b6 | ||
|
|
e344194f86 | ||
|
|
05b656e6b0 | ||
|
|
9c30472777 | ||
|
|
016caa731d | ||
|
|
5b0befef45 | ||
|
|
4b588a589d | ||
|
|
1598b0632a | ||
|
|
a32c67a0ce | ||
|
|
1183105557 | ||
|
|
d1e4f22e7f | ||
|
|
8a5b3a90cb | ||
|
|
2b53ecb111 | ||
|
|
0159b1cf7f | ||
|
|
d9dd83e327 | ||
|
|
05fe52bda9 | ||
|
|
6294580e25 | ||
|
|
69d01c4bc1 | ||
|
|
d4e553fb5a | ||
|
|
ff8fefb797 | ||
|
|
b77fb79cd6 | ||
|
|
00b173f13f | ||
|
|
13ff30788e | ||
|
|
842db2643d | ||
|
|
aee0c7b9c2 | ||
|
|
f67cc1770d | ||
|
|
159cd7c073 | ||
|
|
e83a11d02a | ||
|
|
ba2275fbba | ||
|
|
59a3a7dd55 | ||
|
|
0a7d6fb814 | ||
|
|
94bf067639 | ||
|
|
4cd13b9d47 | ||
|
|
34325dbc4c | ||
|
|
ec9fbca181 | ||
|
|
e9f2334e59 | ||
|
|
c10b8633ab | ||
|
|
18a8b05214 | ||
|
|
22ceae0149 | ||
|
|
e6fa8654ad | ||
|
|
24f97ef768 | ||
|
|
f0a91df2cf | ||
|
|
a3e3c30d0d | ||
|
|
421694ce0c | ||
|
|
3c4d978c1c | ||
|
|
e5fc18fddb | ||
|
|
535048c420 | ||
|
|
b7ac59066f | ||
|
|
4b2a63db1f | ||
|
|
a477e8cb23 | ||
|
|
7108b2fdd4 | ||
|
|
e6e629d2c5 | ||
|
|
f54d32843a | ||
|
|
ce47b6f69f | ||
|
|
4f0c60edfa | ||
|
|
6caa7f30ac | ||
|
|
b43f243f6a | ||
|
|
abbe30ef97 | ||
|
|
8d1ff91af1 | ||
|
|
78c383eb68 | ||
|
|
476a878733 | ||
|
|
d109e4756d | ||
|
|
d448a0ec5c | ||
|
|
d009b997bc | ||
|
|
9258763491 | ||
|
|
79e6df7263 | ||
|
|
4ff013c0fe | ||
|
|
71cdc9fe78 | ||
|
|
e3d17d132a | ||
|
|
70eedfbeec | ||
|
|
c3d598f488 | ||
|
|
6d5dc60b47 | ||
|
|
129146e82e | ||
|
|
df923bf17e | ||
|
|
8f19dd50fe | ||
|
|
ab1d1f248c | ||
|
|
617f51b9ea | ||
|
|
41432d4075 | ||
|
|
ae964fa729 | ||
|
|
9a5f9843b9 | ||
|
|
44175f87b1 | ||
|
|
2f2cfc2d84 | ||
|
|
d9e908fceb | ||
|
|
98aa47c885 | ||
|
|
c777b0095d | ||
|
|
9191ea97fe | ||
|
|
fedf3162f1 | ||
|
|
89fc77d87a | ||
|
|
2004c9b079 | ||
|
|
5aaa9cf205 | ||
|
|
9b15ec417b | ||
|
|
42540d4207 | ||
|
|
72bfa4a1e2 | ||
|
|
7b8342cd9a | ||
|
|
a206b2e4fd | ||
|
|
6e8ce56206 | ||
|
|
7c2c0ba1aa | ||
|
|
f130b5bfb6 | ||
|
|
bf23d85005 | ||
|
|
903b41b336 | ||
|
|
ab24ca4ff6 | ||
|
|
fd8b603910 | ||
|
|
e4462d7546 | ||
|
|
aa796959c9 | ||
|
|
ff3ce2d69e | ||
|
|
ff59dcefe0 | ||
|
|
3f2f79ade4 | ||
|
|
bc380714bd | ||
|
|
2ba41cddc4 | ||
|
|
4a14cc686c | ||
|
|
734cb5c7aa | ||
|
|
da89f57046 | ||
|
|
4a3b616b0f | ||
|
|
a14f2d291e | ||
|
|
72d260c295 | ||
|
|
e1578dabac | ||
|
|
f2c4ba1895 | ||
|
|
695a850979 | ||
|
|
1d7d518ec5 | ||
|
|
44a926b30a | ||
|
|
735cfbf850 | ||
|
|
6b6c60e82c | ||
|
|
cb7717eaf6 | ||
|
|
28a76eb389 | ||
|
|
0fdfb273c6 | ||
|
|
eced1c4c2a | ||
|
|
7d6192b069 | ||
|
|
23f0ffdfeb | ||
|
|
f7ac71d48e | ||
|
|
9af715e872 | ||
|
|
dae50a32c0 | ||
|
|
ca37190da4 | ||
|
|
7c5c5b5f70 | ||
|
|
83ccf96f36 | ||
|
|
c1f4b729ea | ||
|
|
1c8ac97073 | ||
|
|
fd88a249b4 | ||
|
|
d8329a6868 | ||
|
|
5c48233259 | ||
|
|
5efe0e4f8c | ||
|
|
8f88939aa0 | ||
|
|
a58535d95c | ||
|
|
a4173f5de1 | ||
|
|
3956dae01e | ||
|
|
19711d75e0 | ||
|
|
3e0feeabb4 | ||
|
|
e21ac05e71 | ||
|
|
9bd033e288 | ||
|
|
7564e00fc4 | ||
|
|
f1a8add795 | ||
|
|
c8f6907d02 | ||
|
|
dfecc04901 | ||
|
|
0ba9b341cd | ||
|
|
9cff2d3206 | ||
|
|
ab8497e7ce | ||
|
|
f0cd122952 | ||
|
|
89d403879e | ||
|
|
b7ad64226e | ||
|
|
08759700b6 | ||
|
|
7cac351d25 | ||
|
|
f62bde0e38 | ||
|
|
ee69c13b2d | ||
|
|
3c6f57ac5c | ||
|
|
4d48c365f5 | ||
|
|
a3cda59d70 | ||
|
|
22b5e4e5c0 | ||
|
|
19844c89c1 | ||
|
|
6f905e319f | ||
|
|
38c74b3f78 | ||
|
|
0f9c213796 | ||
|
|
a6831d9783 | ||
|
|
2ba7c47603 | ||
|
|
2ff88837ec | ||
|
|
4528ca0365 | ||
|
|
bfc94d36e3 | ||
|
|
2fb8128791 | ||
|
|
20c1ce40d3 | ||
|
|
ecaa9d90b3 | ||
|
|
551f0c1514 |
@@ -1,12 +1,18 @@
|
||||
build: off
|
||||
environment:
|
||||
|
||||
platform:
|
||||
- x86
|
||||
- x64
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- TOXENV: "py27"
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
- cmd: SET PATH=%PATH%;C:\Python27\Scripts;C:\MinGW\bin
|
||||
- cmd: SET PATH=C:\MinGW\bin;%PATH%
|
||||
- if %PLATFORM% == x64 SET PATH=C:\Python27-x64;C:\Python27-x64\Scripts;%PATH%
|
||||
- if %PLATFORM% == x86 SET PATH=C:\Python27;C:\Python27\Scripts;%PATH%
|
||||
- cmd: pip install tox
|
||||
|
||||
test_script:
|
||||
|
||||
28
.github/ISSUE_TEMPLATE.md
vendored
28
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,22 +1,28 @@
|
||||
What kind of issue is this?
|
||||
|
||||
- [ ] Question. This issue tracker is not the place for questions. If you want to ask how to do
|
||||
something, or to understand why something isn't working the way you expect it to, use
|
||||
our Community Forums https://community.platformio.org
|
||||
- [ ] **Question**.
|
||||
This issue tracker is not the place for questions. If you want to ask how to do something,
|
||||
or to understand why something isn't working the way you expect it to,
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] PlatformIO IDE. All issues related to PlatformIO IDE should be reported to appropriate repository
|
||||
https://github.com/platformio/platformio-atom-ide/issues
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
|
||||
- [ ] Development Platform or Board. All issues related to Development Platforms or Embedded Boards
|
||||
should be reported to appropriate repository.
|
||||
See full list with repositories and search for "platform-xxx" repository related to your hardware
|
||||
https://github.com/platformio?query=platform-
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
should be reported to appropriate repository related to your hardware
|
||||
https://github.com/topics/platformio-platform
|
||||
|
||||
- [ ] Feature Request. Start by telling us what problem you’re trying to solve. Often a solution
|
||||
- [ ] **Feature Request**.
|
||||
Start by telling us what problem you’re trying to solve. Often a solution
|
||||
already exists! Don’t send pull requests to implement new features without first getting our
|
||||
support. Sometimes we leave features out on purpose to keep the project small.
|
||||
|
||||
- [ ] PlatformIO Core. If you’ve found a bug, please provide an information below.
|
||||
- [ ] **PlatformIO Core**.
|
||||
If you’ve found a bug, please provide an information below.
|
||||
|
||||
|
||||
*You can erase any parts of this template not applicable to your Issue.*
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[settings]
|
||||
line_length=79
|
||||
known_third_party=bottle,click,lockfile,python-dateutil,pytest,requests,SCons,semantic_version,serial
|
||||
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial
|
||||
|
||||
@@ -20,4 +20,4 @@ confidence=
|
||||
# --disable=W"
|
||||
# disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating
|
||||
|
||||
disable=locally-disabled,missing-docstring,invalid-name,too-few-public-methods,redefined-variable-type,import-error,similarities,unsupported-membership-test,unsubscriptable-object,ungrouped-imports,cyclic-import
|
||||
disable=locally-disabled,missing-docstring,invalid-name,too-few-public-methods,redefined-variable-type,import-error,similarities,unsupported-membership-test,unsubscriptable-object,ungrouped-imports,cyclic-import,superfluous-parens
|
||||
|
||||
@@ -21,7 +21,7 @@ matrix:
|
||||
install:
|
||||
- git submodule update --init --recursive
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install tox; else pip install -U tox; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install "tox==3.0.0"; else pip install -U tox; fi
|
||||
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
@@ -29,10 +29,6 @@ install:
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
after_success:
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then tox -e coverage; fi
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then bash <(curl -s https://codecov.io/bash); fi
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
|
||||
631
HISTORY.rst
631
HISTORY.rst
File diff suppressed because it is too large
Load Diff
53
README.rst
53
README.rst
@@ -7,9 +7,6 @@ PlatformIO
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
|
||||
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
|
||||
:alt: AppVeyor.CI Build Status
|
||||
.. image:: https://requires.io/github/platformio/platformio-core/requirements.svg?branch=develop
|
||||
:target: https://requires.io/github/platformio/platformio-core/requirements/?branch=develop
|
||||
:alt: Requirements Status
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
@@ -19,19 +16,20 @@ PlatformIO
|
||||
.. image:: https://img.shields.io/PlatformIO/Community.png
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/PlatformIO/Plus.png?color=orange
|
||||
:alt: PlatformIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
.. image:: https://img.shields.io/PIO/Plus.png?color=orange
|
||||
:alt: PIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
:target: https://platformio.org/pricing?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Home Page <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
|
||||
`Docs <http://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
@@ -47,18 +45,27 @@ firmware updates.
|
||||
Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <http://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Products
|
||||
--------
|
||||
Open Source
|
||||
-----------
|
||||
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <http://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Remote™ <http://docs.platformio.org/en/latest/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <http://docs.platformio.org/en/latest/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <http://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Delivery™ <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Cloud Builder <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-cloud-builder>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
PIO Plus
|
||||
--------
|
||||
|
||||
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `Cloud IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Integration Services <https://platformio.org/pricing?utm_source=github&utm_medium=core#enterprise-features>`_
|
||||
|
||||
Registry
|
||||
--------
|
||||
@@ -68,14 +75,6 @@ Registry
|
||||
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
|
||||
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `Library Manager <http://docs.platformio.org/en/latest/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Cloud IDEs Integration <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-cloud-ide>`_
|
||||
* `Standalone IDEs Integration <http://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#other-ide>`_
|
||||
* `Continuous Integration <http://docs.platformio.org/en/latest/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
@@ -84,13 +83,17 @@ Development Platforms
|
||||
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
|
||||
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
|
||||
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
|
||||
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
|
||||
* `Intel MCS-51 (8051) <https://platformio.org/platforms/intel_mcs51?utm_source=github&utm_medium=core>`_
|
||||
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
|
||||
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
|
||||
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
|
||||
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
|
||||
* `Samsung ARTIK <https://platformio.org/platforms/samsung_artik?utm_source=github&utm_medium=core>`_
|
||||
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
|
||||
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
|
||||
|
||||
2
docs
2
docs
Submodule docs updated: 0b8ac5fbf7...0c29f9671f
2
examples
2
examples
Submodule examples updated: 41f3396c58...9c16f551d7
@@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (3, 5, 4)
|
||||
VERSION = (3, 6, 7)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@@ -26,8 +26,8 @@ __description__ = (
|
||||
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
__author__ = "Ivan Kravets"
|
||||
__email__ = "me@ikravets.com"
|
||||
__author__ = "PlatformIO"
|
||||
__email__ = "contact@platformio.org"
|
||||
|
||||
__license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
|
||||
@@ -107,8 +107,8 @@ def configure():
|
||||
try:
|
||||
click_echo_origin[origin](*args, **kwargs)
|
||||
except IOError:
|
||||
(sys.stderr.write if kwargs.get("err") else
|
||||
sys.stdout.write)("%s\n" % (args[0] if args else ""))
|
||||
(sys.stderr.write if kwargs.get("err") else sys.stdout.write)(
|
||||
"%s\n" % (args[0] if args else ""))
|
||||
|
||||
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
|
||||
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)
|
||||
@@ -135,7 +135,7 @@ An unexpected error occurred. Further steps:
|
||||
`pip install -U platformio` command
|
||||
|
||||
* Try to find answer in FAQ Troubleshooting section
|
||||
http://docs.platformio.org/page/faq.html
|
||||
https://docs.platformio.org/page/faq.html
|
||||
|
||||
* Report this problem to the developers
|
||||
https://github.com/platformio/platformio-core/issues
|
||||
|
||||
@@ -19,13 +19,12 @@ import os
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
from os import environ, getenv, listdir, remove
|
||||
from os.path import abspath, dirname, expanduser, getmtime, isdir, isfile, join
|
||||
from os.path import abspath, dirname, expanduser, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import requests
|
||||
from lockfile import LockFailed, LockFile
|
||||
|
||||
from platformio import __version__, exception, util
|
||||
from platformio import exception, lockfile, util
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
@@ -64,7 +63,7 @@ DEFAULT_SETTINGS = {
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description":
|
||||
("Telemetry service <http://docs.platformio.org/page/"
|
||||
("Telemetry service <https://docs.platformio.org/page/"
|
||||
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
||||
"value":
|
||||
True
|
||||
@@ -108,10 +107,7 @@ class State(object):
|
||||
if self._prev_state != self._state:
|
||||
try:
|
||||
with codecs.open(self.path, "w", encoding="utf8") as fp:
|
||||
if "dev" in __version__:
|
||||
json.dump(self._state, fp, indent=4)
|
||||
else:
|
||||
json.dump(self._state, fp)
|
||||
json.dump(self._state, fp)
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(util.get_home_dir())
|
||||
self._unlock_state_file()
|
||||
@@ -119,21 +115,19 @@ class State(object):
|
||||
def _lock_state_file(self):
|
||||
if not self.lock:
|
||||
return
|
||||
self._lockfile = LockFile(self.path)
|
||||
|
||||
if self._lockfile.is_locked() and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
self._lockfile = lockfile.LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(dirname(self.path))
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if self._lockfile:
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
self._lockfile.release()
|
||||
|
||||
def __del__(self):
|
||||
self._unlock_state_file()
|
||||
|
||||
|
||||
class ContentCache(object):
|
||||
|
||||
@@ -155,15 +149,10 @@ class ContentCache(object):
|
||||
def _lock_dbindex(self):
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
if self._lockfile.is_locked() and \
|
||||
isfile(self._lockfile.lock_file) and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
self._lockfile = lockfile.LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -237,9 +226,9 @@ class ContentCache(object):
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if "=" not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
expire, path = line.split("=")
|
||||
if time() < int(expire) and isfile(path) and \
|
||||
path not in paths_for_delete:
|
||||
@@ -363,8 +352,8 @@ def get_cid():
|
||||
pass
|
||||
cid = str(
|
||||
uuid.UUID(
|
||||
bytes=hashlib.md5(str(_uid if _uid else uuid.getnode()))
|
||||
.digest()))
|
||||
bytes=hashlib.md5(str(
|
||||
_uid if _uid else uuid.getnode())).digest()))
|
||||
if "windows" in util.get_systype() or os.getuid() > 0:
|
||||
set_state_item("cid", cid)
|
||||
return cid
|
||||
|
||||
@@ -20,7 +20,7 @@ from os.path import expanduser, join
|
||||
from time import time
|
||||
|
||||
from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS,
|
||||
Action, AllowSubstExceptions, AlwaysBuild,
|
||||
AllowSubstExceptions, AlwaysBuild, Default,
|
||||
DefaultEnvironment, Variables)
|
||||
|
||||
from platformio import util
|
||||
@@ -68,6 +68,9 @@ commonvars.AddVariables(
|
||||
("UPLOAD_FLAGS",),
|
||||
("UPLOAD_RESETMETHOD",),
|
||||
|
||||
# test options
|
||||
("TEST_BUILD_PROJECT_SRC",),
|
||||
|
||||
# debug options
|
||||
("DEBUG_TOOL",),
|
||||
("DEBUG_SVD_PATH",),
|
||||
@@ -76,7 +79,8 @@ commonvars.AddVariables(
|
||||
|
||||
MULTILINE_VARS = [
|
||||
"EXTRA_SCRIPTS", "PIOFRAMEWORK", "BUILD_FLAGS", "SRC_BUILD_FLAGS",
|
||||
"BUILD_UNFLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE", "LIB_EXTRA_DIRS"
|
||||
"BUILD_UNFLAGS", "UPLOAD_FLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE",
|
||||
"LIB_EXTRA_DIRS"
|
||||
]
|
||||
|
||||
DEFAULT_ENV_OPTIONS = dict(
|
||||
@@ -131,7 +135,7 @@ if env.GetOption('clean'):
|
||||
env.PioClean(env.subst("$BUILD_DIR"))
|
||||
env.Exit(0)
|
||||
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print "Verbose mode can be enabled via `-v, --verbose` option"
|
||||
print("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
# Handle custom variables from system environment
|
||||
for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPTS",
|
||||
@@ -164,32 +168,48 @@ for item in env.GetExtraScripts("pre"):
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS + ["size"]))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"]))
|
||||
|
||||
if "UPLOAD_FLAGS" in env:
|
||||
env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Checking program size
|
||||
if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.Depends(["upload", "program"], "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
Default(_new_targets)
|
||||
Default("checkprogsize")
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(
|
||||
["upload", "program"],
|
||||
env.VerboseAction(lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
|
||||
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
##############################################################################
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
print env.Dump()
|
||||
print(env.Dump())
|
||||
env.Exit(0)
|
||||
|
||||
if "idedata" in COMMAND_LINE_TARGETS:
|
||||
try:
|
||||
print "\n%s\n" % util.path_to_unicode(
|
||||
json.dumps(env.DumpIDEData(), ensure_ascii=False))
|
||||
print("\n%s\n" % util.path_to_unicode(
|
||||
json.dumps(env.DumpIDEData(), ensure_ascii=False)))
|
||||
env.Exit(0)
|
||||
except UnicodeDecodeError:
|
||||
sys.stderr.write(
|
||||
"\nUnicodeDecodeError: Non-ASCII characters found in build "
|
||||
"environment\n"
|
||||
"See explanation in FAQ > Troubleshooting > Building\n"
|
||||
"http://docs.platformio.org/page/faq.html\n\n")
|
||||
"https://docs.platformio.org/page/faq.html\n\n")
|
||||
env.Exit(1)
|
||||
|
||||
env.AddPreAction(["upload", "program"],
|
||||
Action(lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
|
||||
@@ -53,6 +53,10 @@ def _dump_includes(env):
|
||||
if unity_dir:
|
||||
includes.append(unity_dir)
|
||||
|
||||
includes.extend(
|
||||
[env.subst("$PROJECTINCLUDE_DIR"),
|
||||
env.subst("$PROJECTSRC_DIR")])
|
||||
|
||||
# remove duplicates
|
||||
result = []
|
||||
for item in includes:
|
||||
@@ -97,8 +101,8 @@ def _dump_defines(env):
|
||||
board_mcu = env.BoardConfig().get("build.mcu")
|
||||
if board_mcu:
|
||||
defines.append(
|
||||
str("__AVR_%s__" % board_mcu.upper()
|
||||
.replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
|
||||
str("__AVR_%s__" % board_mcu.upper().replace(
|
||||
"ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
|
||||
|
||||
# built-in GCC marcos
|
||||
# if env.GetCompilerType() == "gcc":
|
||||
@@ -130,8 +134,8 @@ def _get_svd_path(env):
|
||||
|
||||
|
||||
def DumpIDEData(env):
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS"
|
||||
|
||||
data = {
|
||||
"libsource_dirs":
|
||||
@@ -152,6 +156,10 @@ def DumpIDEData(env):
|
||||
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path":
|
||||
env.subst("$PROG_PATH"),
|
||||
"flash_extra_images": [{
|
||||
"offset": item[0],
|
||||
"path": env.subst(item[1])
|
||||
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
|
||||
"svd_path":
|
||||
_get_svd_path(env),
|
||||
"compiler_type":
|
||||
|
||||
@@ -12,13 +12,14 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=no-member, no-self-use, unused-argument
|
||||
# pylint: disable=no-member, no-self-use, unused-argument, too-many-lines
|
||||
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from glob import glob
|
||||
from os.path import (basename, commonprefix, dirname, isdir, isfile, join,
|
||||
@@ -64,17 +65,22 @@ class LibBuilderFactory(object):
|
||||
if isfile(join(path, "module.json")):
|
||||
return ["mbed"]
|
||||
|
||||
include_re = re.compile(
|
||||
r'^#include\s+(<|")(Arduino|mbed)\.h(<|")', flags=re.MULTILINE)
|
||||
|
||||
# check source files
|
||||
for root, _, files in os.walk(path, followlinks=True):
|
||||
if "mbed_lib.json" in files:
|
||||
return ["mbed"]
|
||||
for fname in files:
|
||||
if not env.IsFileWithExt(
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
with open(join(root, fname)) as f:
|
||||
content = f.read()
|
||||
if "Arduino.h" in content:
|
||||
if "Arduino.h" in content and include_re.search(content):
|
||||
return ["arduino"]
|
||||
elif "mbed.h" in content:
|
||||
elif "mbed.h" in content and include_re.search(content):
|
||||
return ["mbed"]
|
||||
return []
|
||||
|
||||
@@ -171,10 +177,11 @@ class LibBuilderBase(object):
|
||||
if isdir(join(self.path, "src")) else self.path)
|
||||
|
||||
def get_include_dirs(self):
|
||||
items = [self.src_dir]
|
||||
items = []
|
||||
include_dir = self.include_dir
|
||||
if include_dir and include_dir not in items:
|
||||
items.append(include_dir)
|
||||
items.append(self.src_dir)
|
||||
return items
|
||||
|
||||
@property
|
||||
@@ -256,7 +263,6 @@ class LibBuilderBase(object):
|
||||
|
||||
def process_extra_options(self):
|
||||
with util.cd(self.path):
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
self.env.ProcessFlags(self.build_flags)
|
||||
if self.extra_script:
|
||||
self.env.SConscriptChdir(1)
|
||||
@@ -266,6 +272,7 @@ class LibBuilderBase(object):
|
||||
"env": self.env,
|
||||
"pio_lib_builder": self
|
||||
})
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
|
||||
def process_dependencies(self):
|
||||
if not self.dependencies:
|
||||
@@ -347,7 +354,7 @@ class LibBuilderBase(object):
|
||||
for path in self._validate_search_files(search_files):
|
||||
try:
|
||||
assert "+" in self.lib_ldf_mode
|
||||
incs = LibBuilderBase.CCONDITIONAL_SCANNER(
|
||||
candidates = LibBuilderBase.CCONDITIONAL_SCANNER(
|
||||
self.env.File(path),
|
||||
self.env,
|
||||
tuple(include_dirs),
|
||||
@@ -357,26 +364,26 @@ class LibBuilderBase(object):
|
||||
sys.stderr.write(
|
||||
"Warning! Classic Pre Processor is used for `%s`, "
|
||||
"advanced has failed with `%s`\n" % (path, e))
|
||||
_incs = LibBuilderBase.CLASSIC_SCANNER(
|
||||
candidates = LibBuilderBase.CLASSIC_SCANNER(
|
||||
self.env.File(path), self.env, tuple(include_dirs))
|
||||
incs = []
|
||||
for inc in _incs:
|
||||
incs.append(inc)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
|
||||
# print(path, map(lambda n: n.get_abspath(), candidates))
|
||||
for item in candidates:
|
||||
if item not in result:
|
||||
result.append(item)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
continue
|
||||
_h_path = item.get_abspath()
|
||||
if not self.env.IsFileWithExt(_h_path, piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
_f_part = _h_path[:_h_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT:
|
||||
if not isfile("%s.%s" % (_f_part, ext)):
|
||||
continue
|
||||
_h_path = inc.get_abspath()
|
||||
if not self.env.IsFileWithExt(_h_path,
|
||||
piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
_f_part = _h_path[:_h_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT:
|
||||
if isfile("%s.%s" % (_f_part, ext)):
|
||||
incs.append(
|
||||
self.env.File("%s.%s" % (_f_part, ext)))
|
||||
# print path, map(lambda n: n.get_abspath(), incs)
|
||||
for inc in incs:
|
||||
if inc not in result:
|
||||
result.append(inc)
|
||||
_c_path = self.env.File("%s.%s" % (_f_part, ext))
|
||||
if _c_path not in result:
|
||||
result.append(_c_path)
|
||||
|
||||
return result
|
||||
|
||||
def depend_recursive(self, lb, search_files=None):
|
||||
@@ -432,23 +439,23 @@ class LibBuilderBase(object):
|
||||
libs.extend(lb.build())
|
||||
# copy shared information to self env
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.AppendUnique(**{key: lb.env.get(key)})
|
||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||
|
||||
for lb in self._circular_deps:
|
||||
self.env.AppendUnique(CPPPATH=lb.get_include_dirs())
|
||||
self.env.PrependUnique(CPPPATH=lb.get_include_dirs())
|
||||
|
||||
if self._is_built:
|
||||
return libs
|
||||
self._is_built = True
|
||||
|
||||
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if self == lb or not lb.is_built:
|
||||
continue
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.AppendUnique(**{key: lb.env.get(key)})
|
||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||
|
||||
if self.lib_archive:
|
||||
libs.append(
|
||||
@@ -488,11 +495,29 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
if isdir(join(self.path, "src")):
|
||||
return LibBuilderBase.src_filter.fget(self)
|
||||
src_dir = join(self.path, "src")
|
||||
if isdir(src_dir):
|
||||
src_filter = LibBuilderBase.src_filter.fget(self)
|
||||
for root, _, files in os.walk(src_dir, followlinks=True):
|
||||
found = False
|
||||
for fname in files:
|
||||
if fname.lower().endswith("asm"):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
continue
|
||||
rel_path = root.replace(src_dir, "")
|
||||
if rel_path.startswith(sep):
|
||||
rel_path = rel_path[1:] + sep
|
||||
src_filter.append("-<%s*.[aA][sS][mM]>" % rel_path)
|
||||
return src_filter
|
||||
|
||||
src_filter = []
|
||||
is_utility = isdir(join(self.path, "utility"))
|
||||
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
|
||||
# arduino ide ignores files with .asm or .ASM extensions
|
||||
if ext.lower() == "asm":
|
||||
continue
|
||||
src_filter.append("+<*.%s>" % ext)
|
||||
if is_utility:
|
||||
src_filter.append("+<utility%s*.%s>" % (sep, ext))
|
||||
@@ -566,6 +591,111 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, ["mbed"])
|
||||
|
||||
def process_extra_options(self):
|
||||
self._process_mbed_lib_confs()
|
||||
return super(MbedLibBuilder, self).process_extra_options()
|
||||
|
||||
def _process_mbed_lib_confs(self):
|
||||
mbed_lib_paths = [
|
||||
join(root, "mbed_lib.json")
|
||||
for root, _, files in os.walk(self.path)
|
||||
if "mbed_lib.json" in files
|
||||
]
|
||||
if not mbed_lib_paths:
|
||||
return None
|
||||
|
||||
mbed_config_path = None
|
||||
for p in self.env.get("CPPPATH"):
|
||||
mbed_config_path = join(self.env.subst(p), "mbed_config.h")
|
||||
if isfile(mbed_config_path):
|
||||
break
|
||||
else:
|
||||
mbed_config_path = None
|
||||
if not mbed_config_path:
|
||||
return None
|
||||
|
||||
macros = {}
|
||||
for mbed_lib_path in mbed_lib_paths:
|
||||
macros.update(self._mbed_lib_conf_parse_macros(mbed_lib_path))
|
||||
|
||||
self._mbed_conf_append_macros(mbed_config_path, macros)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _mbed_normalize_macro(macro):
|
||||
name = macro
|
||||
value = None
|
||||
if "=" in macro:
|
||||
name, value = macro.split("=", 1)
|
||||
return dict(name=name, value=value)
|
||||
|
||||
def _mbed_lib_conf_parse_macros(self, mbed_lib_path):
|
||||
macros = {}
|
||||
cppdefines = str(self.env.Flatten(self.env.subst("$CPPDEFINES")))
|
||||
manifest = util.load_json(mbed_lib_path)
|
||||
|
||||
# default macros
|
||||
for macro in manifest.get("macros", []):
|
||||
macro = self._mbed_normalize_macro(macro)
|
||||
macros[macro['name']] = macro
|
||||
|
||||
# configuration items
|
||||
for key, options in manifest.get("config", {}).items():
|
||||
if "value" not in options:
|
||||
continue
|
||||
macros[key] = dict(
|
||||
name=options.get("macro_name"), value=options.get("value"))
|
||||
|
||||
# overrode items per target
|
||||
for target, options in manifest.get("target_overrides", {}).items():
|
||||
if target != "*" and "TARGET_" + target not in cppdefines:
|
||||
continue
|
||||
for macro in options.get("target.macros_add", []):
|
||||
macro = self._mbed_normalize_macro(macro)
|
||||
macros[macro['name']] = macro
|
||||
for key, value in options.items():
|
||||
if not key.startswith("target.") and key in macros:
|
||||
macros[key]['value'] = value
|
||||
|
||||
# normalize macro names
|
||||
for key, macro in macros.items():
|
||||
if not macro['name']:
|
||||
macro['name'] = key
|
||||
if "." not in macro['name']:
|
||||
macro['name'] = "%s.%s" % (manifest.get("name"),
|
||||
macro['name'])
|
||||
macro['name'] = re.sub(
|
||||
r"[^a-z\d]+", "_", macro['name'], flags=re.I).upper()
|
||||
macro['name'] = "MBED_CONF_" + macro['name']
|
||||
if isinstance(macro['value'], bool):
|
||||
macro['value'] = 1 if macro['value'] else 0
|
||||
|
||||
return {macro["name"]: macro["value"] for macro in macros.values()}
|
||||
|
||||
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
||||
lines = []
|
||||
with open(mbed_config_path) as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if line == "#endif":
|
||||
lines.append(
|
||||
"// PlatformIO Library Dependency Finder (LDF)")
|
||||
lines.extend([
|
||||
"#define %s %s" % (name,
|
||||
value if value is not None else "")
|
||||
for name, value in macros.items()
|
||||
])
|
||||
lines.append("")
|
||||
if not line.startswith("#define"):
|
||||
lines.append(line)
|
||||
continue
|
||||
tokens = line.split()
|
||||
if len(tokens) < 2 or tokens[1] not in macros:
|
||||
lines.append(line)
|
||||
lines.append("")
|
||||
with open(mbed_config_path, "w") as fp:
|
||||
fp.write("\n".join(lines))
|
||||
|
||||
|
||||
class PlatformIOLibBuilder(LibBuilderBase):
|
||||
|
||||
@@ -586,15 +716,6 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
def _is_arduino_manifest(self):
|
||||
return isfile(join(self.path, "library.properties"))
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if all([
|
||||
"srcFilter" in self._manifest.get("build", {})
|
||||
or self.env['SRC_FILTER'], not self._is_arduino_manifest()
|
||||
]):
|
||||
return self.path
|
||||
return LibBuilderBase.src_dir.fget(self)
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
if "srcFilter" in self._manifest.get("build", {}):
|
||||
@@ -672,6 +793,12 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
|
||||
class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
def __init__(self, env, *args, **kwargs):
|
||||
# backup original value, will be reset in base.__init__
|
||||
project_src_filter = env.get("SRC_FILTER")
|
||||
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
|
||||
self.env['SRC_FILTER'] = project_src_filter
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
||||
@@ -682,10 +809,11 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
return self.env.subst("$PROJECTSRC_DIR")
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
include_dirs = []
|
||||
project_include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
||||
if isdir(project_include_dir):
|
||||
include_dirs.append(project_include_dir)
|
||||
include_dirs.extend(LibBuilderBase.get_include_dirs(self))
|
||||
return include_dirs
|
||||
|
||||
def get_search_files(self):
|
||||
@@ -710,7 +838,8 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
return self.env.get("SRC_FILTER", LibBuilderBase.src_filter.fget(self))
|
||||
return (self.env.get("SRC_FILTER")
|
||||
or LibBuilderBase.src_filter.fget(self))
|
||||
|
||||
def process_extra_options(self):
|
||||
# skip for project, options are already processed
|
||||
@@ -752,8 +881,9 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
def build(self):
|
||||
self._is_built = True # do not build Project now
|
||||
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
|
||||
return LibBuilderBase.build(self)
|
||||
result = LibBuilderBase.build(self)
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
return result
|
||||
|
||||
|
||||
def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
@@ -817,7 +947,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
if verbose and found_incompat:
|
||||
sys.stderr.write(
|
||||
"More details about \"Library Compatibility Mode\": "
|
||||
"http://docs.platformio.org/page/librarymanager/ldf.html#"
|
||||
"https://docs.platformio.org/page/librarymanager/ldf.html#"
|
||||
"ldf-compat-mode\n")
|
||||
|
||||
DefaultEnvironment()['__PIO_LIB_BUILDERS'] = items
|
||||
@@ -860,24 +990,24 @@ def ConfigureProjectLibBuilder(env):
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
|
||||
|
||||
print "Library Dependency Finder -> http://bit.ly/configure-pio-ldf"
|
||||
print "LDF MODES: FINDER(%s) COMPATIBILITY(%s)" % (ldf_mode,
|
||||
project.lib_compat_mode)
|
||||
print("Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
|
||||
print("LDF MODES: FINDER(%s) COMPATIBILITY(%s)" %
|
||||
(ldf_mode, project.lib_compat_mode))
|
||||
|
||||
lib_builders = env.GetLibBuilders()
|
||||
print "Collected %d compatible libraries" % len(lib_builders)
|
||||
print("Collected %d compatible libraries" % len(lib_builders))
|
||||
|
||||
print "Scanning dependencies..."
|
||||
print("Scanning dependencies...")
|
||||
project.search_deps_recursive()
|
||||
|
||||
if ldf_mode.startswith("chain") and project.depbuilders:
|
||||
correct_found_libs(lib_builders)
|
||||
|
||||
if project.depbuilders:
|
||||
print "Dependency Graph"
|
||||
print("Dependency Graph")
|
||||
print_deps_tree(project)
|
||||
else:
|
||||
print "No dependencies"
|
||||
print("No dependencies")
|
||||
|
||||
return project
|
||||
|
||||
|
||||
@@ -33,10 +33,10 @@ class InoToCPPConverter(object):
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*\{ # must end with {
|
||||
)\s*(\{|;) # must end with `{` or `;`
|
||||
""", re.X | re.M | re.I)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
@@ -162,7 +162,14 @@ class InoToCPPConverter(object):
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
prototype_names = set([m.group(3).strip() for m in prototypes])
|
||||
# skip already declared prototypes
|
||||
declared = set(
|
||||
m.group(1).strip() for m in prototypes if m.group(4) == ";")
|
||||
prototypes = [
|
||||
m for m in prototypes if m.group(1).strip() not in declared
|
||||
]
|
||||
|
||||
prototype_names = set(m.group(3).strip() for m in prototypes)
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
@@ -274,13 +281,13 @@ def VerboseAction(_, act, actstr):
|
||||
|
||||
def PioClean(env, clean_dir):
|
||||
if not isdir(clean_dir):
|
||||
print "Build environment is clean"
|
||||
print("Build environment is clean")
|
||||
env.Exit(0)
|
||||
for root, _, files in walk(clean_dir):
|
||||
for file_ in files:
|
||||
remove(join(root, file_))
|
||||
print "Removed %s" % relpath(join(root, file_))
|
||||
print "Done cleaning"
|
||||
print("Removed %s" % relpath(join(root, file_)))
|
||||
print("Done cleaning")
|
||||
util.rmtree_(clean_dir)
|
||||
env.Exit(0)
|
||||
|
||||
@@ -289,8 +296,12 @@ def ProcessDebug(env):
|
||||
if not env.subst("$PIODEBUGFLAGS"):
|
||||
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
|
||||
env.Append(
|
||||
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []),
|
||||
BUILD_UNFLAGS=["-Os", "-O0", "-O1", "-O2", "-O3"])
|
||||
BUILD_FLAGS=list(env['PIODEBUGFLAGS']) + ["-D__PLATFORMIO_DEBUG__"])
|
||||
unflags = ["-Os"]
|
||||
for level in [0, 1, 2]:
|
||||
for flag in ("O", "g", "ggdb"):
|
||||
unflags.append("-%s%d" % (flag, level))
|
||||
env.Append(BUILD_UNFLAGS=unflags)
|
||||
|
||||
|
||||
def ProcessTest(env):
|
||||
|
||||
@@ -23,7 +23,7 @@ from SCons.Script import COMMAND_LINE_TARGETS
|
||||
from platformio import exception, util
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
# pylint: disable=too-many-branches
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
|
||||
@util.memoized()
|
||||
@@ -69,15 +69,22 @@ def LoadPioPlatform(env, variables):
|
||||
# Ensure real platform name
|
||||
env['PIOPLATFORM'] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
systype = util.get_systype()
|
||||
for name in installed_packages:
|
||||
type_ = p.get_package_type(name)
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
continue
|
||||
path = p.get_package_dir(name)
|
||||
if isdir(join(path, "bin")):
|
||||
path = join(path, "bin")
|
||||
env.PrependENVPath("PATH", path)
|
||||
pkg_dir = p.get_package_dir(name)
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
|
||||
if ("windows" not in systype and isdir(join(pkg_dir, "lib"))
|
||||
and type_ != "toolchain"):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH"
|
||||
if "darwin" in systype else "LD_LIBRARY_PATH",
|
||||
join(pkg_dir, "lib"))
|
||||
|
||||
# Platform specific LD Scripts
|
||||
if isdir(join(p.get_dir(), "ldscripts")):
|
||||
@@ -88,8 +95,10 @@ def LoadPioPlatform(env, variables):
|
||||
for key, value in variables.UnknownVariables().items():
|
||||
if not key.startswith("BOARD_"):
|
||||
continue
|
||||
env.Replace(
|
||||
**{key.upper().replace("BUILD.", ""): base64.b64decode(value)})
|
||||
env.Replace(**{
|
||||
key.upper().replace("BUILD.", ""):
|
||||
base64.b64decode(value)
|
||||
})
|
||||
return
|
||||
|
||||
# update board manifest with a custom data
|
||||
@@ -115,15 +124,17 @@ def LoadPioPlatform(env, variables):
|
||||
|
||||
|
||||
def PrintConfiguration(env):
|
||||
platform_data = ["PLATFORM: %s >" % env.PioPlatform().title]
|
||||
system_data = ["SYSTEM:"]
|
||||
platform = env.PioPlatform()
|
||||
platform_data = ["PLATFORM: %s >" % platform.title]
|
||||
hardware_data = ["HARDWARE:"]
|
||||
configuration_data = ["CONFIGURATION:"]
|
||||
mcu = env.subst("$BOARD_MCU")
|
||||
f_cpu = env.subst("$BOARD_F_CPU")
|
||||
if mcu:
|
||||
system_data.append(mcu.upper())
|
||||
hardware_data.append(mcu.upper())
|
||||
if f_cpu:
|
||||
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
|
||||
system_data.append("%dMHz" % (f_cpu / 1000000))
|
||||
hardware_data.append("%dMHz" % (f_cpu / 1000000))
|
||||
|
||||
debug_tools = None
|
||||
if "BOARD" in env:
|
||||
@@ -133,13 +144,16 @@ def PrintConfiguration(env):
|
||||
debug_tools = board_config.get("debug", {}).get("tools")
|
||||
ram = board_config.get("upload", {}).get("maximum_ram_size")
|
||||
flash = board_config.get("upload", {}).get("maximum_size")
|
||||
system_data.append("%s RAM (%s Flash)" % (util.format_filesize(ram),
|
||||
util.format_filesize(flash)))
|
||||
hardware_data.append(
|
||||
"%s RAM (%s Flash)" % (util.format_filesize(ram),
|
||||
util.format_filesize(flash)))
|
||||
configuration_data.append(
|
||||
"https://docs.platformio.org/page/boards/%s/%s.html" %
|
||||
(platform.name, board_config.id))
|
||||
|
||||
if platform_data:
|
||||
print " ".join(platform_data)
|
||||
if system_data:
|
||||
print " ".join(system_data)
|
||||
for data in (configuration_data, platform_data, hardware_data):
|
||||
if len(data) > 1:
|
||||
print(" ".join(data))
|
||||
|
||||
# Debugging
|
||||
if not debug_tools:
|
||||
@@ -161,7 +175,7 @@ def PrintConfiguration(env):
|
||||
if external:
|
||||
data.append("EXTERNAL(%s)" % ", ".join(sorted(external)))
|
||||
|
||||
print "DEBUG: %s" % " ".join(data)
|
||||
print("DEBUG: %s" % " ".join(data))
|
||||
|
||||
|
||||
def exists(_):
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import environ
|
||||
@@ -21,10 +22,12 @@ from os.path import isfile, join
|
||||
from shutil import copyfile
|
||||
from time import sleep
|
||||
|
||||
from SCons.Node.Alias import Alias
|
||||
from SCons.Script import ARGUMENTS
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import util
|
||||
from platformio import exception, util
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
def FlushSerialBuffer(env, port):
|
||||
@@ -40,18 +43,18 @@ def FlushSerialBuffer(env, port):
|
||||
|
||||
def TouchSerialPort(env, port, baudrate):
|
||||
port = env.subst(port)
|
||||
print "Forcing reset using %dbps open/close on port %s" % (baudrate, port)
|
||||
print("Forcing reset using %dbps open/close on port %s" % (baudrate, port))
|
||||
try:
|
||||
s = Serial(port=port, baudrate=baudrate)
|
||||
s.setDTR(False)
|
||||
s.close()
|
||||
except: # pylint: disable=W0702
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
sleep(0.4) # DO NOT REMOVE THAT (required by SAM-BA based boards)
|
||||
|
||||
|
||||
def WaitForNewSerialPort(env, before):
|
||||
print "Waiting for the new upload port..."
|
||||
print("Waiting for the new upload port...")
|
||||
prev_port = env.subst("$UPLOAD_PORT")
|
||||
new_port = None
|
||||
elapsed = 0
|
||||
@@ -88,7 +91,7 @@ def WaitForNewSerialPort(env, before):
|
||||
return new_port
|
||||
|
||||
|
||||
def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
def AutodetectUploadPort(*args, **kwargs):
|
||||
env = args[0]
|
||||
|
||||
def _get_pattern():
|
||||
@@ -143,7 +146,7 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
return port
|
||||
|
||||
if "UPLOAD_PORT" in env and not _get_pattern():
|
||||
print env.subst("Use manually specified: $UPLOAD_PORT")
|
||||
print(env.subst("Use manually specified: $UPLOAD_PORT"))
|
||||
return
|
||||
|
||||
if (env.subst("$UPLOAD_PROTOCOL") == "mbed"
|
||||
@@ -151,19 +154,14 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
and not env.subst("$UPLOAD_PROTOCOL"))):
|
||||
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
||||
else:
|
||||
if ("linux" in util.get_systype() and not any([
|
||||
isfile("/etc/udev/rules.d/99-platformio-udev.rules"),
|
||||
isfile("/lib/udev/rules.d/99-platformio-udev.rules")
|
||||
])):
|
||||
sys.stderr.write(
|
||||
"\nWarning! Please install `99-platformio-udev.rules` and "
|
||||
"check that your board's PID and VID are listed in the rules."
|
||||
"\n http://docs.platformio.org/en/latest/faq.html"
|
||||
"#platformio-udev-rules\n")
|
||||
try:
|
||||
util.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
sys.stderr.write("\n%s\n\n" % e)
|
||||
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
print env.subst("Auto-detected: $UPLOAD_PORT")
|
||||
print(env.subst("Auto-detected: $UPLOAD_PORT"))
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: Please specify `upload_port` for environment or use "
|
||||
@@ -173,7 +171,7 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
def UploadToDisk(_, target, source, env):
|
||||
assert "UPLOAD_PORT" in env
|
||||
progname = env.subst("$PROGNAME")
|
||||
for ext in ("bin", "hex"):
|
||||
@@ -182,36 +180,91 @@ def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
continue
|
||||
copyfile(fpath,
|
||||
join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
|
||||
print "Firmware has been successfully uploaded.\n"\
|
||||
"(Some boards may require manual hard reset)"
|
||||
print("Firmware has been successfully uploaded.\n"
|
||||
"(Some boards may require manual hard reset)")
|
||||
|
||||
|
||||
def CheckUploadSize(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
if "BOARD" not in env:
|
||||
return
|
||||
max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
if max_size == 0 or "SIZETOOL" not in env:
|
||||
return
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
cmd = [
|
||||
env.subst("$SIZETOOL"), "-B",
|
||||
str(source[0] if isinstance(target[0], Alias) else target[0])
|
||||
def CheckUploadSize(_, target, source, env):
|
||||
check_conditions = [
|
||||
env.get("BOARD"),
|
||||
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
|
||||
]
|
||||
result = util.exec_command(cmd, env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
if not all(check_conditions):
|
||||
return
|
||||
program_max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
data_max_size = int(env.BoardConfig().get("upload.maximum_ram_size", 0))
|
||||
if program_max_size == 0:
|
||||
return
|
||||
print result['out'].strip()
|
||||
|
||||
line = result['out'].strip().splitlines()[1]
|
||||
values = [v.strip() for v in line.split("\t")]
|
||||
used_size = int(values[0]) + int(values[1])
|
||||
def _configure_defaults():
|
||||
env.Replace(
|
||||
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
|
||||
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
|
||||
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
|
||||
|
||||
if used_size > max_size:
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (used_size, max_size))
|
||||
def _get_size_output():
|
||||
cmd = env.get("SIZECHECKCMD")
|
||||
if not cmd:
|
||||
return None
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command(env.subst(cmd), env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
return None
|
||||
return result['out'].strip()
|
||||
|
||||
def _calculate_size(output, pattern):
|
||||
if not output or not pattern:
|
||||
return -1
|
||||
size = 0
|
||||
regexp = re.compile(pattern)
|
||||
for line in output.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
match = regexp.search(line)
|
||||
if not match:
|
||||
continue
|
||||
size += sum(int(value) for value in match.groups())
|
||||
return size
|
||||
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
||||
if used_blocks > blocks_per_progress:
|
||||
used_blocks = blocks_per_progress
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
|
||||
|
||||
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
|
||||
_configure_defaults()
|
||||
output = _get_size_output()
|
||||
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
|
||||
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
|
||||
|
||||
print("Memory Usage -> http://bit.ly/pio-memory-usage")
|
||||
if data_max_size and data_size > -1:
|
||||
print("DATA: %s" % _format_availale_bytes(data_size, data_max_size))
|
||||
if program_size > -1:
|
||||
print("PROGRAM: %s" % _format_availale_bytes(program_size,
|
||||
program_max_size))
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write("Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" %
|
||||
(program_size, program_max_size))
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
@@ -222,9 +275,9 @@ def PrintUploadInfo(env):
|
||||
available.extend(env.BoardConfig().get("upload", {}).get(
|
||||
"protocols", []))
|
||||
if available:
|
||||
print "AVAILABLE: %s" % ", ".join(sorted(set(available)))
|
||||
print("AVAILABLE: %s" % ", ".join(sorted(set(available))))
|
||||
if configured:
|
||||
print "CURRENT: upload_protocol = %s" % configured
|
||||
print("CURRENT: upload_protocol = %s" % configured)
|
||||
|
||||
|
||||
def exists(_):
|
||||
|
||||
@@ -44,31 +44,31 @@ def scons_patched_match_splitext(path, suffixes=None):
|
||||
def _build_project_deps(env):
|
||||
project_lib_builder = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# append project libs to the beginning of list
|
||||
# prepend project libs to the beginning of list
|
||||
env.Prepend(LIBS=project_lib_builder.build())
|
||||
# append extra linker related options from libs
|
||||
env.AppendUnique(
|
||||
# prepend extra linker related options from libs
|
||||
env.PrependUnique(
|
||||
**{
|
||||
key: project_lib_builder.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if project_lib_builder.env.get(key)
|
||||
})
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessTest()
|
||||
projenv = env.Clone()
|
||||
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER")
|
||||
else:
|
||||
projenv = env.Clone()
|
||||
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
|
||||
env.get("SRC_FILTER"))
|
||||
projenv = env.Clone()
|
||||
|
||||
# CPPPATH from dependencies
|
||||
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
|
||||
# extra build flags from `platformio.ini`
|
||||
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
is_test = "__test" in COMMAND_LINE_TARGETS
|
||||
if is_test:
|
||||
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER")
|
||||
if not is_test or env.get("TEST_BUILD_PROJECT_SRC") == "true":
|
||||
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
|
||||
env.get("SRC_FILTER"))
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
@@ -93,7 +93,7 @@ def BuildProgram(env):
|
||||
if not Util.case_sensitive_suffixes(".s", ".S"):
|
||||
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
|
||||
|
||||
if "__debug" in COMMAND_LINE_TARGETS:
|
||||
if set(["__debug", "debug"]) & set(COMMAND_LINE_TARGETS):
|
||||
env.ProcessDebug()
|
||||
|
||||
# process extra flags from board
|
||||
@@ -112,6 +112,9 @@ def BuildProgram(env):
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessTest()
|
||||
|
||||
# build project with dependencies
|
||||
_build_project_deps(env)
|
||||
|
||||
@@ -127,20 +130,26 @@ def BuildProgram(env):
|
||||
|
||||
program = env.Program(
|
||||
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
checksize_action = env.VerboseAction(env.CheckUploadSize,
|
||||
"Checking program size")
|
||||
AlwaysBuild(env.Alias("checkprogsize", program, checksize_action))
|
||||
if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS):
|
||||
env.AddPostAction(program, checksize_action)
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"checkprogsize", program,
|
||||
env.VerboseAction(env.CheckUploadSize,
|
||||
"Checking size $PIOMAINPROG")))
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags):
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
result = env.ParseFlags(str(flags))
|
||||
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if not isinstance(flags, list):
|
||||
flags = [flags]
|
||||
result = {}
|
||||
for raw in flags:
|
||||
for key, value in env.ParseFlags(str(raw)).items():
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
result[key].extend(value)
|
||||
|
||||
cppdefines = []
|
||||
for item in result['CPPDEFINES']:
|
||||
@@ -303,7 +312,8 @@ def BuildFrameworks(env, frameworks):
|
||||
if f in ("arduino", "energia"):
|
||||
# Arduino IDE appends .o the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
env.ConvertInoToCpp()
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if f in board_frameworks:
|
||||
SConscript(env.GetFrameworkScript(f), exports="env")
|
||||
@@ -314,6 +324,7 @@ def BuildFrameworks(env, frameworks):
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
return env.StaticLibrary(
|
||||
env.subst(variant_dir),
|
||||
env.CollectBuildFiles(variant_dir, src_dir, src_filter))
|
||||
|
||||
@@ -55,7 +55,6 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
"--build-dir",
|
||||
default=mkdtemp,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
@@ -73,7 +72,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf,
|
||||
project_option, verbose):
|
||||
|
||||
@@ -84,9 +83,13 @@ def cli( # pylint: disable=too-many-arguments
|
||||
|
||||
try:
|
||||
app.set_session_var("force_option", True)
|
||||
_clean_dir(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).iteritems():
|
||||
if not keep_build_dir and isdir(build_dir):
|
||||
util.rmtree_(build_dir)
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).items():
|
||||
if not patterns:
|
||||
continue
|
||||
contents = []
|
||||
@@ -116,11 +119,6 @@ def cli( # pylint: disable=too-many-arguments
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
|
||||
def _clean_dir(dirpath):
|
||||
util.rmtree_(dirpath)
|
||||
makedirs(dirpath)
|
||||
|
||||
|
||||
def _copy_contents(dst_dir, contents):
|
||||
items = {"dirs": set(), "files": set()}
|
||||
|
||||
@@ -135,7 +133,8 @@ def _copy_contents(dst_dir, contents):
|
||||
if dst_dir_name == "src" and len(items['dirs']) == 1:
|
||||
copytree(list(items['dirs']).pop(), dst_dir, symlinks=True)
|
||||
else:
|
||||
makedirs(dst_dir)
|
||||
if not isdir(dst_dir):
|
||||
makedirs(dst_dir)
|
||||
for d in items['dirs']:
|
||||
copytree(d, join(dst_dir, basename(d)), symlinks=True)
|
||||
|
||||
@@ -146,7 +145,10 @@ def _copy_contents(dst_dir, contents):
|
||||
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
|
||||
|
||||
for f in items['files']:
|
||||
copyfile(f, join(dst_dir, basename(f)))
|
||||
dst_file = join(dst_dir, basename(f))
|
||||
if f == dst_file:
|
||||
continue
|
||||
copyfile(f, dst_file)
|
||||
|
||||
|
||||
def _exclude_contents(dst_dir, patterns):
|
||||
|
||||
@@ -55,7 +55,7 @@ def device_list( # pylint: disable=too-many-branches
|
||||
"mdns": "Multicast DNS Services"
|
||||
}
|
||||
|
||||
for key, value in data.iteritems():
|
||||
for key, value in data.items():
|
||||
if not single_key:
|
||||
click.secho(titles[key], bold=True)
|
||||
click.echo("=" * len(titles[key]))
|
||||
@@ -85,7 +85,7 @@ def device_list( # pylint: disable=too-many-branches
|
||||
if item['properties']:
|
||||
click.echo("Properties: %s" % ("; ".join([
|
||||
"%s=%s" % (k, v)
|
||||
for k, v in item['properties'].iteritems()
|
||||
for k, v in item['properties'].items()
|
||||
])))
|
||||
click.echo("")
|
||||
|
||||
@@ -182,7 +182,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
kwargs['port'] = ports[0]['port']
|
||||
|
||||
sys.argv = ["monitor"]
|
||||
for k, v in kwargs.iteritems():
|
||||
for k, v in kwargs.items():
|
||||
if k in ("port", "baud", "rts", "dtr", "environment", "project_dir"):
|
||||
continue
|
||||
k = "--" + k.replace("_", "-")
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
import sys
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
@@ -30,13 +29,3 @@ from platformio.managers.core import pioplus_call
|
||||
@click.option("--no-open", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
def shutdown_servers():
|
||||
port = 8010
|
||||
while port < 9000:
|
||||
try:
|
||||
requests.get("http://127.0.0.1:%d?__shutdown__=1" % port)
|
||||
port += 1
|
||||
except: # pylint: disable=bare-except
|
||||
return
|
||||
|
||||
@@ -23,6 +23,7 @@ import click
|
||||
from platformio import exception, util
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cli_platform_install
|
||||
from platformio.commands.run import check_project_envs
|
||||
from platformio.ide.projectgenerator import ProjectGenerator
|
||||
from platformio.managers.platform import PlatformManager
|
||||
|
||||
@@ -73,22 +74,21 @@ def cli(
|
||||
click.secho(
|
||||
"\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho(
|
||||
"will be used for project.\n"
|
||||
"You can specify another project directory via\n"
|
||||
"`platformio init -d %PATH_TO_THE_PROJECT_DIR%` command.",
|
||||
fg="yellow")
|
||||
click.secho("will be used for the project.", fg="yellow")
|
||||
click.echo("")
|
||||
|
||||
click.echo("The next files/directories have been created in %s" %
|
||||
click.style(project_dir, fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" % click.style(
|
||||
"platformio.ini", fg="cyan"))
|
||||
click.echo(
|
||||
"%s - Put your source files here" % click.style("src", fg="cyan"))
|
||||
click.echo("%s - Put project header files here" % click.style(
|
||||
"include", fg="cyan"))
|
||||
click.echo("%s - Put here project specific (private) libraries" %
|
||||
click.style("lib", fg="cyan"))
|
||||
click.echo("%s - Put project source files here" % click.style(
|
||||
"src", fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" % click.style(
|
||||
"platformio.ini", fg="cyan"))
|
||||
|
||||
is_new_project = not util.is_platformio_project(project_dir)
|
||||
init_base_project(project_dir)
|
||||
|
||||
if board:
|
||||
@@ -102,16 +102,28 @@ def cli(
|
||||
pg = ProjectGenerator(project_dir, ide, env_name)
|
||||
pg.generate()
|
||||
|
||||
if not silent:
|
||||
if is_new_project:
|
||||
init_ci_conf(project_dir)
|
||||
init_cvs_ignore(project_dir)
|
||||
|
||||
if silent:
|
||||
return
|
||||
|
||||
if ide:
|
||||
click.secho(
|
||||
"\nProject has been successfully initialized!\nUseful commands:\n"
|
||||
"`platformio run` - process/build project from the current "
|
||||
"directory\n"
|
||||
"`platformio run --target upload` or `platformio run -t upload` "
|
||||
"- upload firmware to embedded board\n"
|
||||
"`platformio run --target clean` - clean project (remove compiled "
|
||||
"files)\n"
|
||||
"`platformio run --help` - additional information",
|
||||
"\nProject has been successfully %s including configuration files "
|
||||
"for `%s` IDE." % ("initialized" if is_new_project else "updated",
|
||||
ide),
|
||||
fg="green")
|
||||
else:
|
||||
click.secho(
|
||||
"\nProject has been successfully %s! Useful commands:\n"
|
||||
"`pio run` - process/build project from the current directory\n"
|
||||
"`pio run --target upload` or `pio run -t upload` "
|
||||
"- upload firmware to a target\n"
|
||||
"`pio run --target clean` - clean project (remove compiled files)"
|
||||
"\n`pio run --help` - additional information" %
|
||||
("initialized" if is_new_project else "updated"),
|
||||
fg="green")
|
||||
|
||||
|
||||
@@ -119,10 +131,11 @@ def get_best_envname(project_dir, boards=None):
|
||||
config = util.load_project_config(project_dir)
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
env_default = util.parse_conf_multi_values(
|
||||
config.get("platformio", "env_default"))
|
||||
check_project_envs(config, env_default)
|
||||
if env_default:
|
||||
return env_default
|
||||
return env_default[0]
|
||||
section = None
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
@@ -134,35 +147,82 @@ def get_best_envname(project_dir, boards=None):
|
||||
|
||||
|
||||
def init_base_project(project_dir):
|
||||
if not util.is_platformio_project(project_dir):
|
||||
copyfile(
|
||||
join(util.get_source_dir(), "projectconftpl.ini"),
|
||||
join(project_dir, "platformio.ini"))
|
||||
if util.is_platformio_project(project_dir):
|
||||
return
|
||||
|
||||
copyfile(
|
||||
join(util.get_source_dir(), "projectconftpl.ini"),
|
||||
join(project_dir, "platformio.ini"))
|
||||
|
||||
with util.cd(project_dir):
|
||||
lib_dir = util.get_projectlib_dir()
|
||||
src_dir = util.get_projectsrc_dir()
|
||||
for d in (src_dir, lib_dir):
|
||||
if not isdir(d):
|
||||
makedirs(d)
|
||||
dir_to_readme = [
|
||||
(util.get_projectsrc_dir(), None),
|
||||
(util.get_projectinclude_dir(), init_include_readme),
|
||||
(util.get_projectlib_dir(), init_lib_readme),
|
||||
(util.get_projecttest_dir(), init_test_readme),
|
||||
]
|
||||
for (path, cb) in dir_to_readme:
|
||||
if isdir(path):
|
||||
continue
|
||||
makedirs(path)
|
||||
if cb:
|
||||
cb(path)
|
||||
|
||||
init_lib_readme(lib_dir)
|
||||
init_ci_conf(project_dir)
|
||||
init_cvs_ignore(project_dir)
|
||||
|
||||
def init_include_readme(include_dir):
|
||||
with open(join(include_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for project header files.
|
||||
|
||||
A header file is a file containing C declarations and macro definitions
|
||||
to be shared between several project source files. You request the use of a
|
||||
header file in your project source file (C, C++, etc) located in `src` folder
|
||||
by including it, with the C preprocessing directive `#include'.
|
||||
|
||||
```src/main.c
|
||||
|
||||
#include "header.h"
|
||||
|
||||
int main (void)
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Including a header file produces the same results as copying the header file
|
||||
into each source file that needs it. Such copying would be time-consuming
|
||||
and error-prone. With a header file, the related declarations appear
|
||||
in only one place. If they need to be changed, they can be changed in one
|
||||
place, and programs that include the header file will automatically use the
|
||||
new version when next recompiled. The header file eliminates the labor of
|
||||
finding and changing all the copies as well as the risk that a failure to
|
||||
find one copy will result in inconsistencies within a program.
|
||||
|
||||
In C, the usual convention is to give header files names that end with `.h'.
|
||||
It is most portable to use only letters, digits, dashes, and underscores in
|
||||
header file names, and at most one dot.
|
||||
|
||||
Read more about using header files in official GCC documentation:
|
||||
|
||||
* Include Syntax
|
||||
* Include Operation
|
||||
* Once-Only Headers
|
||||
* Computed Includes
|
||||
|
||||
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
||||
""")
|
||||
|
||||
|
||||
def init_lib_readme(lib_dir):
|
||||
if isfile(join(lib_dir, "readme.txt")):
|
||||
return
|
||||
with open(join(lib_dir, "readme.txt"), "w") as f:
|
||||
with open(join(lib_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for the project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link to executable file.
|
||||
This directory is intended for project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link into executable file.
|
||||
|
||||
The source code of each library should be placed in separate directory, like
|
||||
"lib/private_lib/[here are source files]".
|
||||
The source code of each library should be placed in a an own separate directory
|
||||
("lib/your_library_name/[here are source files]").
|
||||
|
||||
For example, see how can be organized `Foo` and `Bar` libraries:
|
||||
For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
|
||||
|--lib
|
||||
| |
|
||||
@@ -172,40 +232,62 @@ For example, see how can be organized `Foo` and `Bar` libraries:
|
||||
| | |--src
|
||||
| | |- Bar.c
|
||||
| | |- Bar.h
|
||||
| | |- library.json (optional, custom build options, etc) http://docs.platformio.org/page/librarymanager/config.html
|
||||
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| |
|
||||
| |--Foo
|
||||
| | |- Foo.c
|
||||
| | |- Foo.h
|
||||
| |
|
||||
| |- readme.txt --> THIS FILE
|
||||
| |- README --> THIS FILE
|
||||
|
|
||||
|- platformio.ini
|
||||
|--src
|
||||
|- main.c
|
||||
|
||||
Then in `src/main.c` you should use:
|
||||
|
||||
and a contents of `src/main.c`:
|
||||
```
|
||||
#include <Foo.h>
|
||||
#include <Bar.h>
|
||||
|
||||
// rest H/C/CPP code
|
||||
int main (void)
|
||||
{
|
||||
...
|
||||
}
|
||||
|
||||
PlatformIO will find your libraries automatically, configure preprocessor's
|
||||
include paths and build them.
|
||||
```
|
||||
|
||||
PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries scanning project source files.
|
||||
|
||||
More information about PlatformIO Library Dependency Finder
|
||||
- http://docs.platformio.org/page/librarymanager/ldf.html
|
||||
- https://docs.platformio.org/page/librarymanager/ldf.html
|
||||
""")
|
||||
|
||||
|
||||
def init_test_readme(test_dir):
|
||||
with open(join(test_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for PIO Unit Testing and project tests.
|
||||
|
||||
Unit Testing is a software testing method by which individual units of
|
||||
source code, sets of one or more MCU program modules together with associated
|
||||
control data, usage procedures, and operating procedures, are tested to
|
||||
determine whether they are fit for use. Unit testing finds problems early
|
||||
in the development cycle.
|
||||
|
||||
More information about PIO Unit Testing:
|
||||
- https://docs.platformio.org/page/plus/unit-testing.html
|
||||
""")
|
||||
|
||||
|
||||
def init_ci_conf(project_dir):
|
||||
if isfile(join(project_dir, ".travis.yml")):
|
||||
conf_path = join(project_dir, ".travis.yml")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(join(project_dir, ".travis.yml"), "w") as f:
|
||||
with open(conf_path, "w") as f:
|
||||
f.write("""# Continuous Integration (CI) is the practice, in software
|
||||
# engineering, of merging all developer working copies with a shared mainline
|
||||
# several times a day < http://docs.platformio.org/page/ci/index.html >
|
||||
# several times a day < https://docs.platformio.org/page/ci/index.html >
|
||||
#
|
||||
# Documentation:
|
||||
#
|
||||
@@ -213,13 +295,13 @@ def init_ci_conf(project_dir):
|
||||
# < https://docs.travis-ci.com/user/integration/platformio/ >
|
||||
#
|
||||
# * PlatformIO integration with Travis CI
|
||||
# < http://docs.platformio.org/page/ci/travis.html >
|
||||
# < https://docs.platformio.org/page/ci/travis.html >
|
||||
#
|
||||
# * User Guide for `platformio ci` command
|
||||
# < http://docs.platformio.org/page/userguide/cmd_ci.html >
|
||||
# < https://docs.platformio.org/page/userguide/cmd_ci.html >
|
||||
#
|
||||
#
|
||||
# Please choice one of the following templates (proposed below) and uncomment
|
||||
# Please choose one of the following templates (proposed below) and uncomment
|
||||
# it (remove "# " before each line) or use own configuration according to the
|
||||
# Travis CI documentation (see above).
|
||||
#
|
||||
@@ -247,7 +329,7 @@ def init_ci_conf(project_dir):
|
||||
|
||||
|
||||
#
|
||||
# Template #2: The project is intended to by used as a library with examples
|
||||
# Template #2: The project is intended to be used as a library with examples.
|
||||
#
|
||||
|
||||
# language: python
|
||||
@@ -274,23 +356,11 @@ def init_ci_conf(project_dir):
|
||||
|
||||
|
||||
def init_cvs_ignore(project_dir):
|
||||
ignore_path = join(project_dir, ".gitignore")
|
||||
default = [".pioenvs\n", ".piolibdeps\n"]
|
||||
current = []
|
||||
modified = False
|
||||
if isfile(ignore_path):
|
||||
with open(ignore_path) as fp:
|
||||
current = fp.readlines()
|
||||
if current and not current[-1].endswith("\n"):
|
||||
current[-1] += "\n"
|
||||
for d in default:
|
||||
if d not in current:
|
||||
modified = True
|
||||
current.append(d)
|
||||
if not modified:
|
||||
conf_path = join(project_dir, ".gitignore")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(ignore_path, "w") as fp:
|
||||
fp.writelines(current)
|
||||
with open(conf_path, "w") as fp:
|
||||
fp.writelines([".pio\n", ".pioenvs\n", ".piolibdeps\n"])
|
||||
|
||||
|
||||
def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
|
||||
|
||||
@@ -203,7 +203,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
if not isinstance(query, list):
|
||||
query = list(query)
|
||||
|
||||
for key, values in filters.iteritems():
|
||||
for key, values in filters.items():
|
||||
for value in values:
|
||||
query.append('%s:"%s"' % (key, value))
|
||||
|
||||
@@ -228,7 +228,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
click.echo("For more examples and advanced search syntax, "
|
||||
"please use documentation:")
|
||||
click.secho(
|
||||
"http://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
|
||||
"https://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
|
||||
fg="cyan")
|
||||
return
|
||||
|
||||
@@ -307,13 +307,12 @@ def lib_builtin(storage, json_output):
|
||||
def lib_show(library, json_output):
|
||||
lm = LibraryManager()
|
||||
name, requirements, _ = lm.parse_pkg_uri(library)
|
||||
lib_id = lm.search_lib_id(
|
||||
{
|
||||
"name": name,
|
||||
"requirements": requirements
|
||||
},
|
||||
silent=json_output,
|
||||
interactive=not json_output)
|
||||
lib_id = lm.search_lib_id({
|
||||
"name": name,
|
||||
"requirements": requirements
|
||||
},
|
||||
silent=json_output,
|
||||
interactive=not json_output)
|
||||
lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
|
||||
if json_output:
|
||||
return click.echo(json.dumps(lib))
|
||||
@@ -423,16 +422,16 @@ def lib_stats(json_output):
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
def _print_lib_item(item):
|
||||
click.echo((printitemdate_tpl
|
||||
if "date" in item else printitem_tpl).format(
|
||||
name=click.style(item['name'], fg="cyan"),
|
||||
date=str(
|
||||
time.strftime("%c", util.parse_date(item['date']))
|
||||
if "date" in item else ""),
|
||||
url=click.style(
|
||||
"https://platformio.org/lib/show/%s/%s" %
|
||||
(item['id'], quote(item['name'])),
|
||||
fg="blue")))
|
||||
date = str(
|
||||
time.strftime("%c", util.parse_date(item['date'])) if "date" in
|
||||
item else "")
|
||||
url = click.style(
|
||||
"https://platformio.org/lib/show/%s/%s" % (item['id'],
|
||||
quote(item['name'])),
|
||||
fg="blue")
|
||||
click.echo(
|
||||
(printitemdate_tpl if "date" in item else printitem_tpl).format(
|
||||
name=click.style(item['name'], fg="cyan"), date=date, url=url))
|
||||
|
||||
def _print_tag_item(name):
|
||||
click.echo(
|
||||
|
||||
@@ -273,8 +273,8 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
if item['type']:
|
||||
click.echo("Type: %s" % item['type'])
|
||||
click.echo("Requirements: %s" % item['requirements'])
|
||||
click.echo("Installed: %s" % ("Yes" if item.get("version") else
|
||||
"No (optional)"))
|
||||
click.echo("Installed: %s" %
|
||||
("Yes" if item.get("version") else "No (optional)"))
|
||||
if "version" in item:
|
||||
click.echo("Version: %s" % item['version'])
|
||||
if "originalVersion" in item:
|
||||
@@ -365,8 +365,8 @@ def platform_update(platforms, only_packages, only_check, json_output):
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = pm.outdated(pkg_dir, requirements)
|
||||
if (not latest and not PlatformFactory.newPlatform(pkg_dir)
|
||||
.are_outdated_packages()):
|
||||
if (not latest and not PlatformFactory.newPlatform(
|
||||
pkg_dir).are_outdated_packages()):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg_dir, with_boards=False, expose_packages=False)
|
||||
|
||||
@@ -108,7 +108,9 @@ def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
|
||||
results.append(result)
|
||||
if result[1] and "monitor" in ep.get_build_targets() and \
|
||||
"nobuild" not in ep.get_build_targets():
|
||||
ctx.invoke(cmd_device_monitor)
|
||||
ctx.invoke(
|
||||
cmd_device_monitor,
|
||||
environment=environment[0] if environment else None)
|
||||
|
||||
found_error = any(status is False for (_, status) in results)
|
||||
|
||||
@@ -125,32 +127,33 @@ class EnvironmentProcessor(object):
|
||||
|
||||
DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board")
|
||||
|
||||
KNOWN_PLATFORMIO_OPTIONS = ("description", "env_default", "home_dir",
|
||||
"lib_dir", "libdeps_dir", "include_dir",
|
||||
"src_dir", "build_dir", "data_dir", "test_dir",
|
||||
"boards_dir", "lib_extra_dirs")
|
||||
KNOWN_PLATFORMIO_OPTIONS = [
|
||||
"description", "env_default", "home_dir", "lib_dir", "libdeps_dir",
|
||||
"include_dir", "src_dir", "build_dir", "data_dir", "test_dir",
|
||||
"boards_dir", "lib_extra_dirs"
|
||||
]
|
||||
|
||||
KNOWN_ENV_OPTIONS = ("platform", "framework", "board", "build_flags",
|
||||
"src_build_flags", "build_unflags", "src_filter",
|
||||
"extra_scripts", "targets", "upload_port",
|
||||
"upload_protocol", "upload_speed", "upload_flags",
|
||||
"upload_resetmethod", "lib_deps", "lib_ignore",
|
||||
"lib_extra_dirs", "lib_ldf_mode", "lib_compat_mode",
|
||||
"lib_archive", "piotest", "test_transport",
|
||||
"test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd",
|
||||
"debug_load_mode", "debug_svd_path", "monitor_port",
|
||||
"monitor_speed", "monitor_rts", "monitor_dtr")
|
||||
KNOWN_ENV_OPTIONS = [
|
||||
"platform", "framework", "board", "build_flags", "src_build_flags",
|
||||
"build_unflags", "src_filter", "extra_scripts", "targets",
|
||||
"upload_port", "upload_protocol", "upload_speed", "upload_flags",
|
||||
"upload_resetmethod", "lib_deps", "lib_ignore", "lib_extra_dirs",
|
||||
"lib_ldf_mode", "lib_compat_mode", "lib_archive", "piotest",
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "test_build_project_src", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd", "debug_load_mode",
|
||||
"debug_svd_path", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
IGNORE_BUILD_OPTIONS = ("test_transport", "test_filter", "test_ignore",
|
||||
"test_port", "test_speed", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break",
|
||||
"debug_load_cmd", "debug_load_mode",
|
||||
"monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr")
|
||||
IGNORE_BUILD_OPTIONS = [
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "debug_port", "debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break", "debug_load_cmd",
|
||||
"debug_load_mode", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
|
||||
|
||||
@@ -209,10 +212,10 @@ class EnvironmentProcessor(object):
|
||||
|
||||
if is_error or "piotest_processor" not in self.cmd_ctx.meta:
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("ERROR", fg="red", bold=True)
|
||||
if is_error else click.style(
|
||||
"SUCCESS", fg="green", bold=True)), time() - start_time),
|
||||
"[%s] Took %.2f seconds" % (
|
||||
(click.style("ERROR", fg="red", bold=True) if is_error else
|
||||
click.style("SUCCESS", fg="green", bold=True)),
|
||||
time() - start_time),
|
||||
is_error=is_error)
|
||||
|
||||
return not is_error
|
||||
@@ -383,9 +386,8 @@ def print_summary(results, start_time):
|
||||
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" % (
|
||||
(click.style("SUCCESS", fg="green", bold=True)
|
||||
if successed else click.style("ERROR", fg="red", bold=True)),
|
||||
time() - start_time),
|
||||
(click.style("SUCCESS", fg="green", bold=True) if successed else
|
||||
click.style("ERROR", fg="red", bold=True)), time() - start_time),
|
||||
is_error=not successed)
|
||||
|
||||
|
||||
|
||||
@@ -32,14 +32,14 @@ from platformio.managers.lib import LibraryManager
|
||||
help="Do not update, only check for new version")
|
||||
@click.pass_context
|
||||
def cli(ctx, core_packages, only_check):
|
||||
# cleanup lib search results, cached board and platform lists
|
||||
app.clean_cache()
|
||||
|
||||
update_core_packages(only_check)
|
||||
|
||||
if core_packages:
|
||||
return
|
||||
|
||||
# cleanup lib search results, cached board and platform lists
|
||||
app.clean_cache()
|
||||
|
||||
click.echo()
|
||||
click.echo("Platform Manager")
|
||||
click.echo("================")
|
||||
|
||||
@@ -20,7 +20,7 @@ import click
|
||||
import requests
|
||||
|
||||
from platformio import VERSION, __version__, exception, util
|
||||
from platformio.commands.home import shutdown_servers
|
||||
from platformio.managers.core import shutdown_piohome_servers
|
||||
|
||||
|
||||
@click.command(
|
||||
@@ -36,7 +36,7 @@ def cli(dev):
|
||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||
|
||||
# kill all PIO Home servers, they block `pioplus` binary
|
||||
shutdown_servers()
|
||||
shutdown_piohome_servers()
|
||||
|
||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||
cmds = (["pip", "install", "--upgrade",
|
||||
@@ -63,7 +63,7 @@ def cli(dev):
|
||||
fg="green")
|
||||
click.echo("Release notes: ", nl=False)
|
||||
click.secho(
|
||||
"http://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
"https://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
||||
@@ -102,8 +102,9 @@ def get_pip_package(to_develop):
|
||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||
try:
|
||||
with open(pkg_name, "w") as fp:
|
||||
r = util.exec_command(
|
||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True)
|
||||
r = util.exec_command(["curl", "-fsSL", dl_url],
|
||||
stdout=fp,
|
||||
universal_newlines=True)
|
||||
assert r['returncode'] == 0
|
||||
# check ZIP structure
|
||||
with ZipFile(pkg_name) as zp:
|
||||
@@ -150,7 +151,7 @@ def get_develop_latest_version():
|
||||
|
||||
def get_pypi_latest_version():
|
||||
r = requests.get(
|
||||
"https://pypi.python.org/pypi/platformio/json",
|
||||
"https://pypi.org/pypi/platformio/json",
|
||||
headers=util.get_request_defheaders())
|
||||
r.raise_for_status()
|
||||
return r.json()['info']['version']
|
||||
|
||||
@@ -28,6 +28,10 @@ class ReturnErrorCode(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class LockFileTimeoutError(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class MinitermException(PlatformioException):
|
||||
pass
|
||||
|
||||
@@ -102,6 +106,13 @@ class PackageInstallError(PlatformioException):
|
||||
"Please try this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class ExtractArchiveItemError(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
|
||||
"tool or check this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class FDUnrecognizedStatusCode(PlatformioException):
|
||||
|
||||
MESSAGE = "Got an unrecognized status code '{0}' when downloaded {1}"
|
||||
@@ -167,7 +178,7 @@ class InternetIsOffline(PlatformioException):
|
||||
MESSAGE = (
|
||||
"You are not connected to the Internet.\n"
|
||||
"If you build a project first time, we need Internet connection "
|
||||
"to install all dependencies and toolchain.")
|
||||
"to install all dependencies and toolchains.")
|
||||
|
||||
|
||||
class LibNotFound(PlatformioException):
|
||||
@@ -224,13 +235,32 @@ class CIBuildEnvsEmpty(PlatformioException):
|
||||
"predefined environments using `--project-conf` option")
|
||||
|
||||
|
||||
class InvalidUdevRules(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class MissedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Please install `99-platformio-udev.rules`. \nMode details: "
|
||||
"https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules")
|
||||
|
||||
|
||||
class OutdatedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Your `{0}` are outdated. Please update or reinstall them."
|
||||
"\n Mode details: https://docs.platformio.org"
|
||||
"/en/latest/faq.html#platformio-udev-rules")
|
||||
|
||||
|
||||
class UpgradeError(PlatformioException):
|
||||
|
||||
MESSAGE = """{0}
|
||||
|
||||
* Upgrade using `pip install -U platformio`
|
||||
* Try different installation/upgrading steps:
|
||||
http://docs.platformio.org/page/installation.html
|
||||
https://docs.platformio.org/page/installation.html
|
||||
"""
|
||||
|
||||
|
||||
@@ -254,7 +284,7 @@ class DebugSupportError(PlatformioException):
|
||||
|
||||
MESSAGE = ("Currently, PlatformIO does not support debugging for `{0}`.\n"
|
||||
"Please contact support@pioplus.com or visit "
|
||||
"< http://docs.platformio.org/page/plus/debugging.html >")
|
||||
"< https://docs.platformio.org/page/plus/debugging.html >")
|
||||
|
||||
|
||||
class DebugInvalidOptions(PlatformioException):
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath
|
||||
|
||||
import bottle
|
||||
@@ -127,22 +128,8 @@ class ProjectGenerator(object):
|
||||
|
||||
@staticmethod
|
||||
def _merge_contents(dst_path, contents):
|
||||
file_name = basename(dst_path)
|
||||
|
||||
# merge .gitignore
|
||||
if file_name == ".gitignore" and isfile(dst_path):
|
||||
modified = False
|
||||
default = [l.strip() for l in contents.split("\n")]
|
||||
with open(dst_path) as fp:
|
||||
current = [l.strip() for l in fp.readlines()]
|
||||
for d in default:
|
||||
if d and d not in current:
|
||||
modified = True
|
||||
current.append(d)
|
||||
if not modified:
|
||||
return
|
||||
contents = "\n".join(current) + "\n"
|
||||
|
||||
if basename(dst_path) == ".gitignore" and isfile(dst_path):
|
||||
return
|
||||
with open(dst_path, "w") as f:
|
||||
f.write(contents)
|
||||
|
||||
@@ -160,7 +147,8 @@ class ProjectGenerator(object):
|
||||
"project_libdeps_dir": util.get_projectlibdeps_dir(),
|
||||
"systype": util.get_systype(),
|
||||
"platformio_path": self._fix_os_path(
|
||||
util.where_is_program("platformio")),
|
||||
sys.argv[0] if isfile(sys.argv[0])
|
||||
else util.where_is_program("platformio")),
|
||||
"env_pathsep": os.pathsep,
|
||||
"env_path": self._fix_os_path(os.getenv("PATH"))
|
||||
}) # yapf: disable
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
% end
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
% _defines = " ".join(["-D%s" % d for d in defines])
|
||||
{
|
||||
"execPath": "{{ cxx_path.replace("\\", "/") }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
|
||||
"gccSuppressWarnings": false
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
.pio
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.clang_complete
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
.pio
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
CMakeListsPrivate.txt
|
||||
|
||||
3
platformio/ide/tpls/clion/.idea/misc.xml.tpl
generated
3
platformio/ide/tpls/clion/.idea/misc.xml.tpl
generated
@@ -9,6 +9,9 @@
|
||||
<file path="$PROJECT_DIR$/lib" />
|
||||
<file path="$PROJECT_DIR$/.piolibdeps" />
|
||||
</libraryRoots>
|
||||
<excludeRoots>
|
||||
<file path="$PROJECT_DIR$/.pio" />
|
||||
</excludeRoots>
|
||||
<excludeRoots>
|
||||
<file path="$PROJECT_DIR$/.pioenvs" />
|
||||
</excludeRoots>
|
||||
|
||||
@@ -9,6 +9,12 @@ add_custom_target(
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_BUILD_VERBOSE ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion run --verbose
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_UPLOAD ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion run --target upload
|
||||
@@ -21,6 +27,12 @@ add_custom_target(
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_MONITOR ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion device monitor
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_TEST ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion test
|
||||
|
||||
@@ -1,8 +1,24 @@
|
||||
set(ENV{PATH} "{{env_path}}")
|
||||
set(PLATFORMIO_CMD "{{platformio_path}}")
|
||||
# !!! WARNING !!!
|
||||
# PLEASE DO NOT MODIFY THIS FILE!
|
||||
# USE https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
||||
|
||||
SET(CMAKE_C_COMPILER "{{cc_path.replace("\\", "/")}}")
|
||||
SET(CMAKE_CXX_COMPILER "{{cxx_path.replace("\\", "/")}}")
|
||||
% def _normalize_path(path):
|
||||
% if project_dir in path:
|
||||
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
|
||||
% elif user_home_dir in path:
|
||||
% if "windows" in systype:
|
||||
% path = path.replace(user_home_dir, "$ENV{HOMEDRIVE}$ENV{HOMEPATH}")
|
||||
% else:
|
||||
% path = path.replace(user_home_dir, "$ENV{HOME}")
|
||||
% end
|
||||
% end
|
||||
% return path.replace("\\", "/")
|
||||
% end
|
||||
|
||||
set(PLATFORMIO_CMD "{{ _normalize_path(platformio_path) }}")
|
||||
|
||||
SET(CMAKE_C_COMPILER "{{ _normalize_path(cc_path) }}")
|
||||
SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
|
||||
SET(CMAKE_CXX_FLAGS_DISTRIBUTION "{{cxx_flags}}")
|
||||
SET(CMAKE_C_FLAGS_DISTRIBUTION "{{cc_flags}}")
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
@@ -13,15 +29,7 @@ add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
|
||||
% end
|
||||
|
||||
% for include in includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
include_directories("$ENV{HOMEDRIVE}$ENV{HOMEPATH}{{include.replace(user_home_dir, '').replace("\\", "/")}}")
|
||||
% else:
|
||||
include_directories("$ENV{HOME}{{include.replace(user_home_dir, '').replace("\\", "/")}}")
|
||||
% end
|
||||
% else:
|
||||
include_directories("{{include.replace("\\", "/")}}")
|
||||
% end
|
||||
include_directories("{{ _normalize_path(include) }}")
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE SRC_LIST "{{project_src_dir.replace("\\", "/")}}/*.*" "{{project_lib_dir.replace("\\", "/")}}/*.*" "{{project_libdeps_dir.replace("\\", "/")}}/*.*")
|
||||
FILE(GLOB_RECURSE SRC_LIST "{{ _normalize_path(project_src_dir) }}/*.*" "{{ _normalize_path(project_lib_dir) }}/*.*" "{{ _normalize_path(project_libdeps_dir) }}/*.*")
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"(\-std=[a-z\+]+\d+)")
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% cxx_std = cxx_stds[-1] if cxx_stds else ""
|
||||
%
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project>
|
||||
<configuration id="0.910961921" name="Default">
|
||||
@@ -6,9 +11,9 @@
|
||||
<provider-reference id="org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider" ref="shared-provider"/>
|
||||
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
|
||||
% if "windows" in systype:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% else:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% end
|
||||
<language-scope id="org.eclipse.cdt.core.gcc"/>
|
||||
<language-scope id="org.eclipse.cdt.core.g++"/>
|
||||
@@ -21,9 +26,9 @@
|
||||
<provider-reference id="org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider" ref="shared-provider"/>
|
||||
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
|
||||
% if "windows" in systype:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% else:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% end
|
||||
<language-scope id="org.eclipse.cdt.core.gcc"/>
|
||||
<language-scope id="org.eclipse.cdt.core.g++"/>
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
% end
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
.pio
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.clang_complete
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<itemPath>nbproject/private/launcher.properties</itemPath>
|
||||
</logicalFolder>
|
||||
</logicalFolder>
|
||||
<sourceFolderFilter>^(nbproject|.pioenvs)$</sourceFolderFilter>
|
||||
<sourceFolderFilter>^(nbproject|.pio|.pioenvs)$</sourceFolderFilter>
|
||||
<sourceRootList>
|
||||
<Elem>.</Elem>
|
||||
</sourceRootList>
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
@@ -14,7 +14,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
@@ -23,27 +23,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"clean"
|
||||
],
|
||||
"name": "Clean"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"-f", "-c", "sublimetext",
|
||||
"test"
|
||||
],
|
||||
"name": "Test"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
@@ -54,7 +34,27 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"clean"
|
||||
],
|
||||
"name": "Clean"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"test"
|
||||
],
|
||||
"name": "Test"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
@@ -65,7 +65,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
@@ -76,16 +76,24 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"update"
|
||||
],
|
||||
"name": "Update platforms and libraries"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"upgrade"
|
||||
],
|
||||
"name": "Upgrade PlatformIO Core"
|
||||
}
|
||||
],
|
||||
"working_dir": "${project_path:${folder}}",
|
||||
"selector": "source.c, source.c++",
|
||||
"path": "{{env_path}}"
|
||||
"selector": "source.c, source.c++"
|
||||
}
|
||||
],
|
||||
"folders":
|
||||
@@ -97,8 +105,8 @@
|
||||
"settings":
|
||||
{
|
||||
"sublimegdb_workingdir": "{{project_dir}}",
|
||||
"sublimegdb_exec_cmd": "-exec-continue",
|
||||
"sublimegdb_commandline": "{{platformio_path}} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
|
||||
"sublimegdb_exec_cmd": "",
|
||||
"sublimegdb_commandline": "{{ platformio_path }} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
% for include in includes:
|
||||
-I{{include}}
|
||||
-I"{{include}}"
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
% end
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
% _defines = " ".join(["-D%s" % d for d in defines])
|
||||
{
|
||||
"execPath": "{{ cxx_path.replace("\\", "/") }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
|
||||
"gccIncludePaths": "{{! ','.join("'{}'".format(w.replace("\\", '/')) for w in includes)}}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
.pio
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.clang_complete
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
.pio
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.vscode/.browse.c_cpp.db*
|
||||
|
||||
@@ -1,16 +1,22 @@
|
||||
{
|
||||
"!!! WARNING !!!": "PLEASE DO NOT MODIFY THIS FILE! USE http://docs.platformio.org/page/projectconf/section_env_build.html#build-flags",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "!!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags"
|
||||
},
|
||||
{
|
||||
% import platform
|
||||
% from os.path import commonprefix, dirname
|
||||
% from os.path import commonprefix, dirname, isdir
|
||||
%
|
||||
% systype = platform.system().lower()
|
||||
%
|
||||
% def _escape(text):
|
||||
% return text.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
|
||||
% end
|
||||
%
|
||||
% cleaned_includes = []
|
||||
% for include in includes:
|
||||
% if "toolchain-" not in dirname(commonprefix([include, cc_path])):
|
||||
% cleaned_includes.append(include)
|
||||
% if "toolchain-" not in dirname(commonprefix([include, cc_path])) and isdir(include):
|
||||
% cleaned_includes.append(include)
|
||||
% end
|
||||
% end
|
||||
%
|
||||
@@ -24,7 +30,7 @@
|
||||
% end
|
||||
"includePath": [
|
||||
% for include in cleaned_includes:
|
||||
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
"{{! _escape(include) }}",
|
||||
% end
|
||||
""
|
||||
],
|
||||
@@ -33,14 +39,14 @@
|
||||
"databaseFilename": "${workspaceRoot}/.vscode/.browse.c_cpp.db",
|
||||
"path": [
|
||||
% for include in cleaned_includes:
|
||||
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
"{{! _escape(include) }}",
|
||||
% end
|
||||
""
|
||||
]
|
||||
},
|
||||
"defines": [
|
||||
% for define in defines:
|
||||
"{{!define.replace('"', '\\"')}}",
|
||||
"{{! _escape(define) }}",
|
||||
% end
|
||||
""
|
||||
],
|
||||
@@ -50,13 +56,17 @@
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
% # pass only architecture specific flags
|
||||
% cc_m_flags = " ".join([f.strip() for f in cc_flags.split(" ") if f.strip().startswith("-m")])
|
||||
%
|
||||
% if cc_stds:
|
||||
"cStandard": "c{{ cc_stds[-1] }}",
|
||||
% end
|
||||
% if cxx_stds:
|
||||
"cppStandard": "c++{{ cxx_stds[-1] }}",
|
||||
% end
|
||||
"compilerPath": "{{ cc_path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"') }}"
|
||||
"compilerPath": "\"{{! _escape(cc_path) }}\" {{! _escape(cc_m_flags) }}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"version": 4
|
||||
}
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
// PIO Unified Debugger
|
||||
//
|
||||
// Documentation: http://docs.platformio.org/page/plus/debugging.html
|
||||
// Configuration: http://docs.platformio.org/page/projectconf/section_env_debug.html
|
||||
// Documentation: https://docs.platformio.org/page/plus/debugging.html
|
||||
// Configuration: https://docs.platformio.org/page/projectconf/section_env_debug.html
|
||||
|
||||
% from os.path import dirname, join
|
||||
%
|
||||
@@ -17,19 +17,22 @@
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PlatformIO Debugger",
|
||||
"name": "PIO Debug",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
% end
|
||||
"preLaunchTask": "PlatformIO: Pre-Debug",
|
||||
"preLaunchTask": {
|
||||
"type": "PlatformIO",
|
||||
"task": "Pre-Debug"
|
||||
},
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PlatformIO Debugger (Skip Pre-Debug)",
|
||||
"name": "PIO Debug (skip Pre-Debug)",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
|
||||
108
platformio/lockfile.py
Normal file
108
platformio/lockfile.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import remove
|
||||
from os.path import abspath, exists, getmtime
|
||||
from time import sleep, time
|
||||
|
||||
from platformio import exception
|
||||
|
||||
LOCKFILE_TIMEOUT = 3600 # in seconds, 1 hour
|
||||
LOCKFILE_DELAY = 0.2
|
||||
|
||||
LOCKFILE_INTERFACE_FCNTL = 1
|
||||
LOCKFILE_INTERFACE_MSVCRT = 2
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_FCNTL
|
||||
except ImportError:
|
||||
try:
|
||||
import msvcrt
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_MSVCRT
|
||||
except ImportError:
|
||||
LOCKFILE_CURRENT_INTERFACE = None
|
||||
|
||||
|
||||
class LockFileExists(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class LockFile(object):
|
||||
|
||||
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
|
||||
self.timeout = timeout
|
||||
self.delay = delay
|
||||
self._lock_path = abspath(path) + ".lock"
|
||||
self._fp = None
|
||||
|
||||
def _lock(self):
|
||||
if not LOCKFILE_CURRENT_INTERFACE and exists(self._lock_path):
|
||||
# remove stale lock
|
||||
if time() - getmtime(self._lock_path) > 10:
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise LockFileExists
|
||||
|
||||
self._fp = open(self._lock_path, "w")
|
||||
try:
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_NBLCK, 1)
|
||||
except IOError:
|
||||
self._fp = None
|
||||
raise LockFileExists
|
||||
return True
|
||||
|
||||
def _unlock(self):
|
||||
if not self._fp:
|
||||
return
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_UN)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_UNLCK, 1)
|
||||
self._fp.close()
|
||||
self._fp = None
|
||||
|
||||
def acquire(self):
|
||||
elapsed = 0
|
||||
while elapsed < self.timeout:
|
||||
try:
|
||||
return self._lock()
|
||||
except LockFileExists:
|
||||
sleep(self.delay)
|
||||
elapsed += self.delay
|
||||
|
||||
raise exception.LockFileTimeoutError()
|
||||
|
||||
def release(self):
|
||||
self._unlock()
|
||||
if exists(self._lock_path):
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.acquire()
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
self.release()
|
||||
|
||||
def __del__(self):
|
||||
self.release()
|
||||
@@ -66,11 +66,12 @@ def on_platformio_exception(e):
|
||||
|
||||
def in_silence(ctx=None):
|
||||
ctx = ctx or app.get_session_var("command_ctx")
|
||||
assert ctx
|
||||
ctx_args = ctx.args or []
|
||||
return ctx_args and any([
|
||||
ctx.args[0] == "upgrade", "--json-output" in ctx_args,
|
||||
"--version" in ctx_args
|
||||
if not ctx:
|
||||
return True
|
||||
return ctx.args and any([
|
||||
ctx.args[0] == "debug" and "--interpreter" in " ".join(ctx.args),
|
||||
ctx.args[0] == "upgrade", "--json-output" in ctx.args,
|
||||
"--version" in ctx.args
|
||||
])
|
||||
|
||||
|
||||
@@ -179,7 +180,7 @@ def after_upgrade(ctx):
|
||||
click.secho(
|
||||
"Please remove multiple PIO Cores from a system:", fg="yellow")
|
||||
click.secho(
|
||||
"http://docs.platformio.org/page/faq.html"
|
||||
"https://docs.platformio.org/page/faq.html"
|
||||
"#multiple-pio-cores-in-a-system",
|
||||
fg="cyan")
|
||||
click.secho("*" * terminal_width, fg="yellow")
|
||||
@@ -274,7 +275,8 @@ def check_platformio_upgrade():
|
||||
click.secho("pip install -U platformio", fg="cyan", nl=False)
|
||||
click.secho("` command.", fg="yellow")
|
||||
click.secho("Changes: ", fg="yellow", nl=False)
|
||||
click.secho("http://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
click.secho(
|
||||
"https://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
click.echo("*" * terminal_width)
|
||||
click.echo("")
|
||||
|
||||
@@ -318,8 +320,8 @@ def check_internal_updates(ctx, what):
|
||||
if not app.get_setting("auto_update_" + what):
|
||||
click.secho("Please update them via ", fg="yellow", nl=False)
|
||||
click.secho(
|
||||
"`platformio %s update`" % ("lib --global" if what == "libraries"
|
||||
else "platform"),
|
||||
"`platformio %s update`" %
|
||||
("lib --global" if what == "libraries" else "platform"),
|
||||
fg="cyan",
|
||||
nl=False)
|
||||
click.secho(" command.\n", fg="yellow")
|
||||
|
||||
@@ -16,16 +16,20 @@ import os
|
||||
import subprocess
|
||||
import sys
|
||||
from os.path import dirname, join
|
||||
from time import sleep
|
||||
|
||||
import requests
|
||||
|
||||
from platformio import __version__, exception, util
|
||||
from platformio.managers.package import PackageManager
|
||||
|
||||
CORE_PACKAGES = {
|
||||
"contrib-piohome": ">=0.9.5,<2",
|
||||
"contrib-pysite": ">=0.2.0,<2",
|
||||
"tool-pioplus": ">=1.3.1,<2",
|
||||
"contrib-piohome": "^2.0.1",
|
||||
"contrib-pysite":
|
||||
"~2.%d%d.190418" % (sys.version_info[0], sys.version_info[1]),
|
||||
"tool-pioplus": "^2.1.4",
|
||||
"tool-unity": "~1.20403.0",
|
||||
"tool-scons": "~2.20501.4"
|
||||
"tool-scons": "~2.20501.7"
|
||||
}
|
||||
|
||||
PIOPLUS_AUTO_UPDATES_MAX = 100
|
||||
@@ -36,11 +40,12 @@ PIOPLUS_AUTO_UPDATES_MAX = 100
|
||||
class CorePackageManager(PackageManager):
|
||||
|
||||
def __init__(self):
|
||||
PackageManager.__init__(self, join(util.get_home_dir(), "packages"), [
|
||||
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
|
||||
"http%s://dl.platformio.org/packages/manifest.json" %
|
||||
("" if sys.version_info < (2, 7, 9) else "s")
|
||||
])
|
||||
super(CorePackageManager, self).__init__(
|
||||
join(util.get_home_dir(), "packages"), [
|
||||
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
|
||||
"http%s://dl.platformio.org/packages/manifest.json" %
|
||||
("" if sys.version_info < (2, 7, 9) else "s")
|
||||
])
|
||||
|
||||
def install( # pylint: disable=keyword-arg-before-vararg
|
||||
self,
|
||||
@@ -92,10 +97,25 @@ def update_core_packages(only_check=False, silent=False):
|
||||
if not pkg_dir:
|
||||
continue
|
||||
if not silent or pm.outdated(pkg_dir, requirements):
|
||||
if name == "tool-pioplus" and not only_check:
|
||||
shutdown_piohome_servers()
|
||||
if "windows" in util.get_systype():
|
||||
sleep(1)
|
||||
pm.update(name, requirements, only_check=only_check)
|
||||
return True
|
||||
|
||||
|
||||
def shutdown_piohome_servers():
|
||||
port = 8010
|
||||
while port < 8050:
|
||||
try:
|
||||
requests.get(
|
||||
"http://127.0.0.1:%d?__shutdown__=1" % port, timeout=0.01)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
port += 1
|
||||
|
||||
|
||||
def pioplus_call(args, **kwargs):
|
||||
if "windows" in util.get_systype() and sys.version_info < (2, 7, 6):
|
||||
raise exception.PlatformioException(
|
||||
|
||||
@@ -32,7 +32,7 @@ class LibraryManager(BasePkgManager):
|
||||
def __init__(self, package_dir=None):
|
||||
if not package_dir:
|
||||
package_dir = join(util.get_home_dir(), "lib")
|
||||
BasePkgManager.__init__(self, package_dir)
|
||||
super(LibraryManager, self).__init__(package_dir)
|
||||
|
||||
@property
|
||||
def manifest_names(self):
|
||||
@@ -237,9 +237,8 @@ class LibraryManager(BasePkgManager):
|
||||
if not isinstance(values, list):
|
||||
values = [v.strip() for v in values.split(",") if v]
|
||||
for value in values:
|
||||
query.append(
|
||||
'%s:"%s"' % (key[:-1]
|
||||
if key.endswith("s") else key, value))
|
||||
query.append('%s:"%s"' %
|
||||
(key[:-1] if key.endswith("s") else key, value))
|
||||
|
||||
lib_info = None
|
||||
result = util.get_api_result(
|
||||
@@ -337,13 +336,12 @@ class LibraryManager(BasePkgManager):
|
||||
force=False):
|
||||
_name, _requirements, _url = self.parse_pkg_uri(name, requirements)
|
||||
if not _url:
|
||||
name = "id=%d" % self.search_lib_id(
|
||||
{
|
||||
"name": _name,
|
||||
"requirements": _requirements
|
||||
},
|
||||
silent=silent,
|
||||
interactive=interactive)
|
||||
name = "id=%d" % self.search_lib_id({
|
||||
"name": _name,
|
||||
"requirements": _requirements
|
||||
},
|
||||
silent=silent,
|
||||
interactive=interactive)
|
||||
requirements = _requirements
|
||||
pkg_dir = BasePkgManager.install(
|
||||
self,
|
||||
|
||||
@@ -27,6 +27,7 @@ import semantic_version
|
||||
|
||||
from platformio import __version__, app, exception, telemetry, util
|
||||
from platformio.downloader import FileDownloader
|
||||
from platformio.lockfile import LockFile
|
||||
from platformio.unpacker import FileUnpacker
|
||||
from platformio.vcsclient import VCSClientFactory
|
||||
|
||||
@@ -258,6 +259,69 @@ class PkgInstallerMixin(object):
|
||||
raise e
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def parse_pkg_uri( # pylint: disable=too-many-branches
|
||||
text, requirements=None):
|
||||
text = str(text)
|
||||
name, url = None, None
|
||||
|
||||
# Parse requirements
|
||||
req_conditions = [
|
||||
"@" in text, not requirements, ":" not in text
|
||||
or text.rfind("/") < text.rfind("@")
|
||||
]
|
||||
if all(req_conditions):
|
||||
text, requirements = text.rsplit("@", 1)
|
||||
|
||||
# Handle PIO Library Registry ID
|
||||
if text.isdigit():
|
||||
text = "id=" + text
|
||||
# Parse custom name
|
||||
elif "=" in text and not text.startswith("id="):
|
||||
name, text = text.split("=", 1)
|
||||
|
||||
# Parse URL
|
||||
# if valid URL with scheme vcs+protocol://
|
||||
if "+" in text and text.find("+") < text.find("://"):
|
||||
url = text
|
||||
elif "/" in text or "\\" in text:
|
||||
git_conditions = [
|
||||
# Handle GitHub URL (https://github.com/user/package)
|
||||
text.startswith("https://github.com/") and not text.endswith(
|
||||
(".zip", ".tar.gz")),
|
||||
(text.split("#", 1)[0]
|
||||
if "#" in text else text).endswith(".git")
|
||||
]
|
||||
hg_conditions = [
|
||||
# Handle Developer Mbed URL
|
||||
# (https://developer.mbed.org/users/user/code/package/)
|
||||
# (https://os.mbed.com/users/user/code/package/)
|
||||
text.startswith("https://developer.mbed.org"),
|
||||
text.startswith("https://os.mbed.com")
|
||||
]
|
||||
if any(git_conditions):
|
||||
url = "git+" + text
|
||||
elif any(hg_conditions):
|
||||
url = "hg+" + text
|
||||
elif "://" not in text and (isfile(text) or isdir(text)):
|
||||
url = "file://" + text
|
||||
elif "://" in text:
|
||||
url = text
|
||||
# Handle short version of GitHub URL
|
||||
elif text.count("/") == 1:
|
||||
url = "git+https://github.com/" + text
|
||||
|
||||
# Parse name from URL
|
||||
if url and not name:
|
||||
_url = url.split("#", 1)[0] if "#" in url else url
|
||||
if _url.endswith(("\\", "/")):
|
||||
_url = _url[:-1]
|
||||
name = basename(_url)
|
||||
if "." in name and not name.startswith("."):
|
||||
name = name.rsplit(".", 1)[0]
|
||||
|
||||
return (name or text, requirements, url)
|
||||
|
||||
@staticmethod
|
||||
def get_install_dirname(manifest):
|
||||
name = re.sub(r"[^\da-z\_\-\. ]", "_", manifest['name'], flags=re.I)
|
||||
@@ -315,11 +379,13 @@ class PkgInstallerMixin(object):
|
||||
manifest[key.strip()] = value.strip()
|
||||
|
||||
if src_manifest:
|
||||
if "name" not in manifest:
|
||||
manifest['name'] = src_manifest['name']
|
||||
if "version" in src_manifest:
|
||||
manifest['version'] = src_manifest['version']
|
||||
manifest['__src_url'] = src_manifest['url']
|
||||
# handle a custom package name
|
||||
autogen_name = self.parse_pkg_uri(manifest['__src_url'])[0]
|
||||
if "name" not in manifest or autogen_name != src_manifest['name']:
|
||||
manifest['name'] = src_manifest['name']
|
||||
|
||||
if "name" not in manifest:
|
||||
manifest['name'] = basename(pkg_dir)
|
||||
@@ -413,6 +479,7 @@ class PkgInstallerMixin(object):
|
||||
click.secho("Looking for another mirror...", fg="yellow")
|
||||
|
||||
if versions is None:
|
||||
util.internet_on(raise_exception=True)
|
||||
raise exception.UnknownPackage(name)
|
||||
elif not pkgdata:
|
||||
raise exception.UndefinedPackageVersion(requirements or "latest",
|
||||
@@ -561,73 +628,10 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
||||
def print_message(self, message, nl=True):
|
||||
click.echo("%s: %s" % (self.__class__.__name__, message), nl=nl)
|
||||
|
||||
@staticmethod
|
||||
def parse_pkg_uri( # pylint: disable=too-many-branches
|
||||
text, requirements=None):
|
||||
text = str(text)
|
||||
name, url = None, None
|
||||
|
||||
# Parse requirements
|
||||
req_conditions = [
|
||||
"@" in text, not requirements, ":" not in text
|
||||
or text.rfind("/") < text.rfind("@")
|
||||
]
|
||||
if all(req_conditions):
|
||||
text, requirements = text.rsplit("@", 1)
|
||||
|
||||
# Handle PIO Library Registry ID
|
||||
if text.isdigit():
|
||||
text = "id=" + text
|
||||
# Parse custom name
|
||||
elif "=" in text and not text.startswith("id="):
|
||||
name, text = text.split("=", 1)
|
||||
|
||||
# Parse URL
|
||||
# if valid URL with scheme vcs+protocol://
|
||||
if "+" in text and text.find("+") < text.find("://"):
|
||||
url = text
|
||||
elif "/" in text or "\\" in text:
|
||||
git_conditions = [
|
||||
# Handle GitHub URL (https://github.com/user/package)
|
||||
text.startswith("https://github.com/") and not text.endswith(
|
||||
(".zip", ".tar.gz")),
|
||||
(text.split("#", 1)[0]
|
||||
if "#" in text else text).endswith(".git")
|
||||
]
|
||||
hg_conditions = [
|
||||
# Handle Developer Mbed URL
|
||||
# (https://developer.mbed.org/users/user/code/package/)
|
||||
# (https://os.mbed.com/users/user/code/package/)
|
||||
text.startswith("https://developer.mbed.org"),
|
||||
text.startswith("https://os.mbed.com")
|
||||
]
|
||||
if any(git_conditions):
|
||||
url = "git+" + text
|
||||
elif any(hg_conditions):
|
||||
url = "hg+" + text
|
||||
elif "://" not in text and (isfile(text) or isdir(text)):
|
||||
url = "file://" + text
|
||||
elif "://" in text:
|
||||
url = text
|
||||
# Handle short version of GitHub URL
|
||||
elif text.count("/") == 1:
|
||||
url = "git+https://github.com/" + text
|
||||
|
||||
# Parse name from URL
|
||||
if url and not name:
|
||||
_url = url.split("#", 1)[0] if "#" in url else url
|
||||
if _url.endswith(("\\", "/")):
|
||||
_url = _url[:-1]
|
||||
name = basename(_url)
|
||||
if "." in name and not name.startswith("."):
|
||||
name = name.rsplit(".", 1)[0]
|
||||
|
||||
return (name or text, requirements, url)
|
||||
|
||||
def outdated(self, pkg_dir, requirements=None):
|
||||
"""
|
||||
Has 3 different results:
|
||||
`None` - unknown package, VCS is fixed to commit
|
||||
`None` - unknown package, VCS is detached to commit
|
||||
`False` - package is up-to-date
|
||||
`String` - a found latest version
|
||||
"""
|
||||
@@ -635,7 +639,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
||||
return None
|
||||
latest = None
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
# skip fixed package to a specific version
|
||||
# skip detached package to a specific version
|
||||
if "@" in pkg_dir and "__src_url" not in manifest and not requirements:
|
||||
return None
|
||||
|
||||
@@ -679,99 +683,110 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
||||
silent=False,
|
||||
after_update=False,
|
||||
force=False):
|
||||
name, requirements, url = self.parse_pkg_uri(name, requirements)
|
||||
package_dir = self.get_package_dir(name, requirements, url)
|
||||
pkg_dir = None
|
||||
# interprocess lock
|
||||
with LockFile(self.package_dir):
|
||||
self.cache_reset()
|
||||
|
||||
# avoid circle dependencies
|
||||
if not self.INSTALL_HISTORY:
|
||||
self.INSTALL_HISTORY = []
|
||||
history_key = "%s-%s-%s" % (name, requirements or "", url or "")
|
||||
if history_key in self.INSTALL_HISTORY:
|
||||
return package_dir
|
||||
self.INSTALL_HISTORY.append(history_key)
|
||||
name, requirements, url = self.parse_pkg_uri(name, requirements)
|
||||
package_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if package_dir and force:
|
||||
self.uninstall(package_dir)
|
||||
package_dir = None
|
||||
# avoid circle dependencies
|
||||
if not self.INSTALL_HISTORY:
|
||||
self.INSTALL_HISTORY = []
|
||||
history_key = "%s-%s-%s" % (name, requirements or "", url or "")
|
||||
if history_key in self.INSTALL_HISTORY:
|
||||
return package_dir
|
||||
self.INSTALL_HISTORY.append(history_key)
|
||||
|
||||
if package_dir and force:
|
||||
self.uninstall(package_dir)
|
||||
package_dir = None
|
||||
|
||||
if not package_dir or not silent:
|
||||
msg = "Installing " + click.style(name, fg="cyan")
|
||||
if requirements:
|
||||
msg += " @ " + requirements
|
||||
self.print_message(msg)
|
||||
if package_dir:
|
||||
if not silent:
|
||||
click.secho(
|
||||
"{name} @ {version} is already installed".format(
|
||||
**self.load_manifest(package_dir)),
|
||||
fg="yellow")
|
||||
return package_dir
|
||||
|
||||
if url:
|
||||
pkg_dir = self._install_from_url(
|
||||
name, url, requirements, track=True)
|
||||
else:
|
||||
pkg_dir = self._install_from_piorepo(name, requirements)
|
||||
|
||||
if not pkg_dir or not self.manifest_exists(pkg_dir):
|
||||
raise exception.PackageInstallError(name, requirements or "*",
|
||||
util.get_systype())
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
assert manifest
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Install",
|
||||
label=manifest['name'])
|
||||
|
||||
if not package_dir or not silent:
|
||||
msg = "Installing " + click.style(name, fg="cyan")
|
||||
if requirements:
|
||||
msg += " @ " + requirements
|
||||
self.print_message(msg)
|
||||
if package_dir:
|
||||
if not silent:
|
||||
click.secho(
|
||||
"{name} @ {version} is already installed".format(
|
||||
**self.load_manifest(package_dir)),
|
||||
fg="yellow")
|
||||
return package_dir
|
||||
|
||||
if url:
|
||||
pkg_dir = self._install_from_url(
|
||||
name, url, requirements, track=True)
|
||||
else:
|
||||
pkg_dir = self._install_from_piorepo(name, requirements)
|
||||
|
||||
if not pkg_dir or not self.manifest_exists(pkg_dir):
|
||||
raise exception.PackageInstallError(name, requirements or "*",
|
||||
util.get_systype())
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
assert manifest
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Install",
|
||||
label=manifest['name'])
|
||||
|
||||
if not silent:
|
||||
click.secho(
|
||||
"{name} @ {version} has been successfully installed!".format(
|
||||
**manifest),
|
||||
fg="green")
|
||||
"{name} @ {version} has been successfully installed!".
|
||||
format(**manifest),
|
||||
fg="green")
|
||||
|
||||
return pkg_dir
|
||||
|
||||
def uninstall(self, package, requirements=None, after_update=False):
|
||||
if isdir(package) and self.get_package_by_dir(package):
|
||||
pkg_dir = package
|
||||
else:
|
||||
name, requirements, url = self.parse_pkg_uri(package, requirements)
|
||||
pkg_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if not pkg_dir:
|
||||
raise exception.UnknownPackage(
|
||||
"%s @ %s" % (package, requirements or "*"))
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
click.echo(
|
||||
"Uninstalling %s @ %s: \t" % (click.style(
|
||||
manifest['name'], fg="cyan"), manifest['version']),
|
||||
nl=False)
|
||||
|
||||
if islink(pkg_dir):
|
||||
os.unlink(pkg_dir)
|
||||
else:
|
||||
util.rmtree_(pkg_dir)
|
||||
self.cache_reset()
|
||||
|
||||
# unfix package with the same name
|
||||
pkg_dir = self.get_package_dir(manifest['name'])
|
||||
if pkg_dir and "@" in pkg_dir:
|
||||
shutil.move(
|
||||
pkg_dir,
|
||||
join(self.package_dir, self.get_install_dirname(manifest)))
|
||||
# interprocess lock
|
||||
with LockFile(self.package_dir):
|
||||
self.cache_reset()
|
||||
|
||||
click.echo("[%s]" % click.style("OK", fg="green"))
|
||||
if isdir(package) and self.get_package_by_dir(package):
|
||||
pkg_dir = package
|
||||
else:
|
||||
name, requirements, url = self.parse_pkg_uri(
|
||||
package, requirements)
|
||||
pkg_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if not pkg_dir:
|
||||
raise exception.UnknownPackage(
|
||||
"%s @ %s" % (package, requirements or "*"))
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
click.echo(
|
||||
"Uninstalling %s @ %s: \t" % (click.style(
|
||||
manifest['name'], fg="cyan"), manifest['version']),
|
||||
nl=False)
|
||||
|
||||
if islink(pkg_dir):
|
||||
os.unlink(pkg_dir)
|
||||
else:
|
||||
util.rmtree_(pkg_dir)
|
||||
self.cache_reset()
|
||||
|
||||
# unfix package with the same name
|
||||
pkg_dir = self.get_package_dir(manifest['name'])
|
||||
if pkg_dir and "@" in pkg_dir:
|
||||
shutil.move(
|
||||
pkg_dir,
|
||||
join(self.package_dir, self.get_install_dirname(manifest)))
|
||||
self.cache_reset()
|
||||
|
||||
click.echo("[%s]" % click.style("OK", fg="green"))
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Uninstall",
|
||||
label=manifest['name'])
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Uninstall",
|
||||
label=manifest['name'])
|
||||
return True
|
||||
|
||||
def update(self, package, requirements=None, only_check=False):
|
||||
@@ -802,7 +817,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
||||
elif latest is False:
|
||||
click.echo("[%s]" % (click.style("Up-to-date", fg="green")))
|
||||
else:
|
||||
click.echo("[%s]" % (click.style("Fixed", fg="yellow")))
|
||||
click.echo("[%s]" % (click.style("Detached", fg="yellow")))
|
||||
|
||||
if only_check or not latest:
|
||||
return True
|
||||
|
||||
@@ -235,8 +235,8 @@ class PlatformFactory(object):
|
||||
name = pm.load_manifest(platform_dir)['name']
|
||||
|
||||
if not platform_dir:
|
||||
raise exception.UnknownPlatform(name if not requirements else
|
||||
"%s@%s" % (name, requirements))
|
||||
raise exception.UnknownPlatform(
|
||||
name if not requirements else "%s@%s" % (name, requirements))
|
||||
|
||||
platform_cls = None
|
||||
if isfile(join(platform_dir, "platform.py")):
|
||||
@@ -460,8 +460,7 @@ class PlatformBase( # pylint: disable=too-many-public-methods
|
||||
self._manifest = util.load_json(manifest_path)
|
||||
|
||||
self.pm = PackageManager(
|
||||
join(util.get_home_dir(), "packages"),
|
||||
self._manifest.get("packageRepositories"))
|
||||
join(util.get_home_dir(), "packages"), self.package_repositories)
|
||||
|
||||
self.silent = False
|
||||
self.verbose = False
|
||||
@@ -516,6 +515,10 @@ class PlatformBase( # pylint: disable=too-many-public-methods
|
||||
def engines(self):
|
||||
return self._manifest.get("engines")
|
||||
|
||||
@property
|
||||
def package_repositories(self):
|
||||
return self._manifest.get("packageRepositories")
|
||||
|
||||
@property
|
||||
def manifest(self):
|
||||
return self._manifest
|
||||
@@ -604,12 +607,13 @@ class PlatformBase( # pylint: disable=too-many-public-methods
|
||||
|
||||
# enable upload tools for upload targets
|
||||
if any(["upload" in t for t in targets] + ["program" in targets]):
|
||||
for _name, _opts in self.packages.iteritems():
|
||||
if _opts.get("type") == "uploader":
|
||||
self.packages[_name]['optional'] = False
|
||||
elif "nobuild" in targets:
|
||||
# skip all packages, allow only upload tools
|
||||
self.packages[_name]['optional'] = True
|
||||
for name, opts in self.packages.items():
|
||||
if opts.get("type") == "uploader":
|
||||
self.packages[name]['optional'] = False
|
||||
# skip all packages in "nobuild" mode
|
||||
# allow only upload tools and frameworks
|
||||
elif "nobuild" in targets and opts.get("type") != "framework":
|
||||
self.packages[name]['optional'] = True
|
||||
|
||||
def get_lib_storages(self):
|
||||
storages = []
|
||||
@@ -629,10 +633,8 @@ class PlatformBase( # pylint: disable=too-many-public-methods
|
||||
if not isdir(libcore_dir):
|
||||
continue
|
||||
storages.append({
|
||||
"name":
|
||||
"%s-core-%s" % (opts['package'], item),
|
||||
"path":
|
||||
libcore_dir
|
||||
"name": "%s-core-%s" % (opts['package'], item),
|
||||
"path": libcore_dir
|
||||
})
|
||||
|
||||
return storages
|
||||
|
||||
@@ -6,4 +6,4 @@
|
||||
; Advanced options: extra scripting
|
||||
;
|
||||
; Please visit documentation for the other options and examples
|
||||
; http://docs.platformio.org/page/projectconf.html
|
||||
; https://docs.platformio.org/page/projectconf.html
|
||||
|
||||
@@ -61,12 +61,16 @@ class MeasurementProtocol(TelemetryBase):
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
TelemetryBase.__init__(self)
|
||||
super(MeasurementProtocol, self).__init__()
|
||||
self['v'] = 1
|
||||
self['tid'] = self.TID
|
||||
self['cid'] = app.get_cid()
|
||||
|
||||
self['sr'] = "%dx%d" % click.get_terminal_size()
|
||||
try:
|
||||
self['sr'] = "%dx%d" % click.get_terminal_size()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self._prefill_screen_name()
|
||||
self._prefill_appinfo()
|
||||
self._prefill_custom_data()
|
||||
@@ -112,8 +116,8 @@ class MeasurementProtocol(TelemetryBase):
|
||||
self['cd2'] = "Python/%s %s" % (platform.python_version(),
|
||||
platform.platform())
|
||||
# self['cd3'] = " ".join(_filter_args(sys.argv[1:]))
|
||||
self['cd4'] = 1 if (not util.is_ci()
|
||||
and (caller_id or not util.is_container())) else 0
|
||||
self['cd4'] = 1 if (not util.is_ci() and
|
||||
(caller_id or not util.is_container())) else 0
|
||||
if caller_id:
|
||||
self['cd5'] = caller_id.lower()
|
||||
|
||||
@@ -291,7 +295,7 @@ def measure_ci():
|
||||
}
|
||||
}
|
||||
|
||||
for key, value in envmap.iteritems():
|
||||
for key, value in envmap.items():
|
||||
if getenv(key, "").lower() != "true":
|
||||
continue
|
||||
event.update({"action": key, "label": value['label']})
|
||||
|
||||
@@ -13,15 +13,14 @@
|
||||
# limitations under the License.
|
||||
|
||||
from os import chmod
|
||||
from os.path import join
|
||||
from os.path import exists, join
|
||||
from tarfile import open as tarfile_open
|
||||
from time import mktime
|
||||
from zipfile import ZipFile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import util
|
||||
from platformio.exception import UnsupportedArchiveType
|
||||
from platformio import exception, util
|
||||
|
||||
|
||||
class ArchiveBase(object):
|
||||
@@ -32,6 +31,9 @@ class ArchiveBase(object):
|
||||
def get_items(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
raise NotImplementedError()
|
||||
|
||||
def extract_item(self, item, dest_dir):
|
||||
self._afo.extract(item, dest_dir)
|
||||
self.after_extract(item, dest_dir)
|
||||
@@ -46,16 +48,23 @@ class ArchiveBase(object):
|
||||
class TARArchive(ArchiveBase):
|
||||
|
||||
def __init__(self, archpath):
|
||||
ArchiveBase.__init__(self, tarfile_open(archpath))
|
||||
super(TARArchive, self).__init__(tarfile_open(archpath))
|
||||
|
||||
def get_items(self):
|
||||
return self._afo.getmembers()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
return item.name
|
||||
|
||||
@staticmethod
|
||||
def islink(item):
|
||||
return item.islnk() or item.issym()
|
||||
|
||||
|
||||
class ZIPArchive(ArchiveBase):
|
||||
|
||||
def __init__(self, archpath):
|
||||
ArchiveBase.__init__(self, ZipFile(archpath))
|
||||
super(ZIPArchive, self).__init__(ZipFile(archpath))
|
||||
|
||||
@staticmethod
|
||||
def preserve_permissions(item, dest_dir):
|
||||
@@ -72,6 +81,12 @@ class ZIPArchive(ArchiveBase):
|
||||
def get_items(self):
|
||||
return self._afo.infolist()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
return item.filename
|
||||
|
||||
def islink(self, item):
|
||||
raise NotImplementedError()
|
||||
|
||||
def after_extract(self, item, dest_dir):
|
||||
self.preserve_permissions(item, dest_dir)
|
||||
self.preserve_mtime(item, dest_dir)
|
||||
@@ -89,7 +104,7 @@ class FileUnpacker(object):
|
||||
elif self.archpath.lower().endswith(".zip"):
|
||||
self._unpacker = ZIPArchive(self.archpath)
|
||||
if not self._unpacker:
|
||||
raise UnsupportedArchiveType(self.archpath)
|
||||
raise exception.UnsupportedArchiveType(self.archpath)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
@@ -107,4 +122,14 @@ class FileUnpacker(object):
|
||||
with click.progressbar(items, label="Unpacking") as pb:
|
||||
for item in pb:
|
||||
self._unpacker.extract_item(item, dest_dir)
|
||||
|
||||
# check on disk
|
||||
for item in self._unpacker.get_items():
|
||||
filename = self._unpacker.get_item_filename(item)
|
||||
item_path = join(dest_dir, filename)
|
||||
try:
|
||||
if not self._unpacker.islink(item) and not exists(item_path):
|
||||
raise exception.ExtractArchiveItemError(filename, dest_dir)
|
||||
except NotImplementedError:
|
||||
pass
|
||||
return True
|
||||
|
||||
@@ -23,6 +23,7 @@ import sys
|
||||
import time
|
||||
from functools import wraps
|
||||
from glob import glob
|
||||
from hashlib import sha1
|
||||
from os.path import (abspath, basename, dirname, expanduser, isdir, isfile,
|
||||
join, normpath, splitdrive)
|
||||
from shutil import rmtree
|
||||
@@ -77,7 +78,7 @@ class ProjectConfig(ConfigParser.ConfigParser):
|
||||
class AsyncPipe(Thread):
|
||||
|
||||
def __init__(self, outcallback=None):
|
||||
Thread.__init__(self)
|
||||
super(AsyncPipe, self).__init__()
|
||||
self.outcallback = outcallback
|
||||
|
||||
self._fd_read, self._fd_write = os.pipe()
|
||||
@@ -99,7 +100,7 @@ class AsyncPipe(Thread):
|
||||
if self.outcallback:
|
||||
self.outcallback(line)
|
||||
else:
|
||||
print line
|
||||
print(line)
|
||||
self._pipe_reader.close()
|
||||
|
||||
def close(self):
|
||||
@@ -137,8 +138,12 @@ class memoized(object):
|
||||
self.cache[key] = (time.time(), func(*args, **kwargs))
|
||||
return self.cache[key][1]
|
||||
|
||||
wrapper.reset = self._reset
|
||||
return wrapper
|
||||
|
||||
def _reset(self):
|
||||
self.cache = {}
|
||||
|
||||
|
||||
class throttle(object):
|
||||
|
||||
@@ -198,28 +203,34 @@ def pioversion_to_intstr():
|
||||
|
||||
|
||||
def get_project_optional_dir(name, default=None):
|
||||
data = None
|
||||
paths = None
|
||||
var_name = "PLATFORMIO_%s" % name.upper()
|
||||
if var_name in os.environ:
|
||||
data = os.getenv(var_name)
|
||||
paths = os.getenv(var_name)
|
||||
else:
|
||||
try:
|
||||
config = load_project_config()
|
||||
if (config.has_section("platformio")
|
||||
and config.has_option("platformio", name)):
|
||||
data = config.get("platformio", name)
|
||||
paths = config.get("platformio", name)
|
||||
except exception.NotPlatformIOProject:
|
||||
pass
|
||||
|
||||
if not data:
|
||||
if not paths:
|
||||
return default
|
||||
|
||||
items = []
|
||||
for item in data.split(", "):
|
||||
for item in paths.split(", "):
|
||||
if item.startswith("~"):
|
||||
item = expanduser(item)
|
||||
items.append(abspath(item))
|
||||
return ", ".join(items)
|
||||
paths = ", ".join(items)
|
||||
|
||||
while "$PROJECT_HASH" in paths:
|
||||
paths = paths.replace("$PROJECT_HASH",
|
||||
sha1(get_project_dir()).hexdigest()[:10])
|
||||
|
||||
return paths
|
||||
|
||||
|
||||
def get_home_dir():
|
||||
@@ -317,7 +328,7 @@ def get_projectbuild_dir(force=False):
|
||||
with open(dontmod_path, "w") as fp:
|
||||
fp.write("""
|
||||
[InternetShortcut]
|
||||
URL=http://docs.platformio.org/page/projectconf/section_platformio.html#build-dir
|
||||
URL=https://docs.platformio.org/page/projectconf/section_platformio.html#build-dir
|
||||
""")
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not force:
|
||||
@@ -349,12 +360,19 @@ def load_project_config(path=None):
|
||||
|
||||
|
||||
def parse_conf_multi_values(items):
|
||||
result = []
|
||||
if not items:
|
||||
return []
|
||||
return [
|
||||
item.strip() for item in items.split("\n" if "\n" in items else ", ")
|
||||
if item.strip()
|
||||
]
|
||||
return result
|
||||
inline_comment_re = re.compile(r"\s+;.*$")
|
||||
for item in items.split("\n" if "\n" in items else ", "):
|
||||
item = item.strip()
|
||||
# comment
|
||||
if not item or item.startswith((";", "#")):
|
||||
continue
|
||||
if ";" in item:
|
||||
item = inline_comment_re.sub("", item).strip()
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def change_filemtime(path, mtime):
|
||||
@@ -398,7 +416,7 @@ def exec_command(*args, **kwargs):
|
||||
if isinstance(kwargs[s], AsyncPipe):
|
||||
result[s[3:]] = "\n".join(kwargs[s].get_buffer())
|
||||
|
||||
for k, v in result.iteritems():
|
||||
for k, v in result.items():
|
||||
if v and isinstance(v, basestring):
|
||||
result[k].strip()
|
||||
|
||||
@@ -550,16 +568,11 @@ def get_mdns_services():
|
||||
pass
|
||||
|
||||
items.append({
|
||||
"type":
|
||||
service.type,
|
||||
"name":
|
||||
service.name,
|
||||
"ip":
|
||||
".".join([str(ord(c)) for c in service.address]),
|
||||
"port":
|
||||
service.port,
|
||||
"properties":
|
||||
properties
|
||||
"type": service.type,
|
||||
"name": service.name,
|
||||
"ip": ".".join([str(ord(c)) for c in service.address]),
|
||||
"port": service.port,
|
||||
"properties": properties
|
||||
})
|
||||
return items
|
||||
|
||||
@@ -667,7 +680,6 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
|
||||
|
||||
PING_INTERNET_IPS = [
|
||||
"192.30.253.113", # github.com
|
||||
"159.122.18.156", # dl.bintray.com
|
||||
"193.222.52.25" # dl.platformio.org
|
||||
]
|
||||
|
||||
@@ -779,6 +791,46 @@ def merge_dicts(d1, d2, path=None):
|
||||
return d1
|
||||
|
||||
|
||||
def get_file_contents(path):
|
||||
try:
|
||||
with open(path) as f:
|
||||
return f.read()
|
||||
except UnicodeDecodeError:
|
||||
with open(path, encoding="latin-1") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def ensure_udev_rules():
|
||||
|
||||
def _rules_to_set(rules_path):
|
||||
return set(l.strip() for l in get_file_contents(rules_path).split("\n")
|
||||
if l.strip() and not l.startswith("#"))
|
||||
|
||||
if "linux" not in get_systype():
|
||||
return None
|
||||
installed_rules = [
|
||||
"/etc/udev/rules.d/99-platformio-udev.rules",
|
||||
"/lib/udev/rules.d/99-platformio-udev.rules"
|
||||
]
|
||||
if not any(isfile(p) for p in installed_rules):
|
||||
raise exception.MissedUdevRules
|
||||
|
||||
origin_path = abspath(
|
||||
join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules"))
|
||||
if not isfile(origin_path):
|
||||
return None
|
||||
|
||||
origin_rules = _rules_to_set(origin_path)
|
||||
for rules_path in installed_rules:
|
||||
if not isfile(rules_path):
|
||||
continue
|
||||
current_rules = _rules_to_set(rules_path)
|
||||
if not origin_rules <= current_rules:
|
||||
raise exception.OutdatedUdevRules(rules_path)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def rmtree_(path):
|
||||
|
||||
def _onerror(_, name, __):
|
||||
@@ -787,8 +839,9 @@ def rmtree_(path):
|
||||
os.remove(name)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
click.secho(
|
||||
"Please manually remove file `%s`" % name, fg="red", err=True)
|
||||
raise e
|
||||
"%s \nPlease manually remove the file `%s`" % (str(e), name),
|
||||
fg="red",
|
||||
err=True)
|
||||
|
||||
return rmtree(path, onerror=_onerror)
|
||||
|
||||
@@ -805,8 +858,7 @@ except ImportError:
|
||||
magic_check_bytes = re.compile(b'([*?[])')
|
||||
|
||||
def glob_escape(pathname):
|
||||
"""Escape all special characters.
|
||||
"""
|
||||
"""Escape all special characters."""
|
||||
# Escaping is done by wrapping any of "*?[" between square brackets.
|
||||
# Metacharacters do not work in the drive part and shouldn't be
|
||||
# escaped.
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
#
|
||||
# INSTALLATION
|
||||
#
|
||||
# Please visit > http://docs.platformio.org/en/latest/faq.html#platformio-udev-rules
|
||||
# Please visit > https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules
|
||||
#
|
||||
#####################################################################################
|
||||
|
||||
@@ -53,6 +53,9 @@ KERNEL=="ttyACM*", ATTRS{idVendor}=="16d0", ATTRS{idProduct}=="0753", MODE:="066
|
||||
# STM32 discovery boards, with onboard st/linkv2
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374?", MODE:="0666"
|
||||
|
||||
# Maple with DFU
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="1eaf", ATTRS{idProduct}=="000[34]", MODE:="0666"
|
||||
|
||||
# USBtiny
|
||||
SUBSYSTEMS=="usb", ATTRS{idProduct}=="0c9f", ATTRS{idVendor}=="1781", MODE="0666"
|
||||
|
||||
@@ -205,3 +208,52 @@ ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="660", GROUP="plugdev",
|
||||
|
||||
# CMSIS-DAP compatible adapters
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
#SEGGER J-LIK
|
||||
ATTR{idProduct}=="1001", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1002", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1003", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1004", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1005", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1006", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1007", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1008", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1009", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100f", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1010", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1011", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1012", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1013", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1014", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1015", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1016", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1017", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1018", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1019", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101f", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1020", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1021", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1022", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1023", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1024", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1025", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1026", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1027", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1028", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1029", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102f", ATTR{idVendor}=="1366", MODE="666"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,7 @@
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from platformio import util
|
||||
|
||||
|
||||
def main():
|
||||
@@ -24,6 +25,14 @@ def main():
|
||||
for platform in platforms:
|
||||
if platform['forDesktop']:
|
||||
continue
|
||||
# RISC-V GAP does not support Windows 86
|
||||
if (util.get_systype() == "windows_x86"
|
||||
and platform['name'] == "riscv_gap"):
|
||||
continue
|
||||
# unknown issue on Linux
|
||||
if ("linux" in util.get_systype()
|
||||
and platform['name'] == "aceinna_imu"):
|
||||
continue
|
||||
subprocess.check_call(
|
||||
["platformio", "platform", "install", platform['repository']])
|
||||
|
||||
|
||||
6
setup.py
6
setup.py
@@ -21,7 +21,6 @@ install_requires = [
|
||||
"bottle<0.13",
|
||||
"click>=5,<6",
|
||||
"colorama",
|
||||
"lockfile>=0.9.1,<0.13",
|
||||
"pyserial>=3,<4,!=3.3",
|
||||
"requests>=2.4.0,<3",
|
||||
"semantic_version>=2.5.0,<3"
|
||||
@@ -38,7 +37,7 @@ setup(
|
||||
license=__license__,
|
||||
python_requires='>=2.7, <3',
|
||||
install_requires=install_requires,
|
||||
packages=find_packages(),
|
||||
packages=find_packages() + ["scripts"],
|
||||
package_data={
|
||||
"platformio": [
|
||||
"projectconftpl.ini",
|
||||
@@ -46,6 +45,9 @@ setup(
|
||||
"ide/tpls/*/*.tpl",
|
||||
"ide/tpls/*/*/*.tpl",
|
||||
"ide/tpls/*/.*/*.tpl"
|
||||
],
|
||||
"scripts": [
|
||||
"99-platformio-udev.rules"
|
||||
]
|
||||
},
|
||||
entry_points={
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os.path import join
|
||||
from os.path import isfile, join
|
||||
|
||||
from platformio.commands.ci import cli as cmd_ci
|
||||
from platformio.commands.lib import cli as cmd_lib
|
||||
@@ -32,6 +32,36 @@ def test_ci_boards(clirunner, validate_cliresult):
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_ci_build_dir(clirunner, tmpdir_factory, validate_cliresult):
|
||||
build_dir = str(tmpdir_factory.mktemp("ci_build_dir"))
|
||||
result = clirunner.invoke(cmd_ci, [
|
||||
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "uno",
|
||||
"--build-dir", build_dir
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert not isfile(join(build_dir, "platformio.ini"))
|
||||
|
||||
|
||||
def test_ci_keep_build_dir(clirunner, tmpdir_factory, validate_cliresult):
|
||||
build_dir = str(tmpdir_factory.mktemp("ci_build_dir"))
|
||||
result = clirunner.invoke(cmd_ci, [
|
||||
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "uno",
|
||||
"--build-dir", build_dir, "--keep-build-dir"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert isfile(join(build_dir, "platformio.ini"))
|
||||
|
||||
# 2nd attempt
|
||||
result = clirunner.invoke(cmd_ci, [
|
||||
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "metro",
|
||||
"--build-dir", build_dir, "--keep-build-dir"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
|
||||
assert "board: uno" in result.output
|
||||
assert "board: metro" in result.output
|
||||
|
||||
|
||||
def test_ci_project_conf(clirunner, validate_cliresult):
|
||||
project_dir = join("examples", "wiring-blink")
|
||||
result = clirunner.invoke(cmd_ci, [
|
||||
|
||||
@@ -61,7 +61,7 @@ def test_global_install_archive(clirunner, validate_cliresult,
|
||||
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
|
||||
"http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
|
||||
"SomeLib=http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
|
||||
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
@@ -75,7 +75,7 @@ def test_global_install_archive(clirunner, validate_cliresult,
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
|
||||
items2 = [
|
||||
"RadioHead-1.62", "ArduinoJson", "DallasTemperature_ID54",
|
||||
"RadioHead-1.62", "ArduinoJson", "SomeLib_ID54",
|
||||
"OneWire_ID1", "ESP32WebServer"
|
||||
]
|
||||
assert set(items1) >= set(items2)
|
||||
@@ -158,7 +158,7 @@ def test_global_lib_list(clirunner, validate_cliresult):
|
||||
items1 = [i['name'] for i in json.loads(result.output)]
|
||||
items2 = [
|
||||
"ESP32WebServer", "ArduinoJson", "ArduinoJson", "ArduinoJson",
|
||||
"ArduinoJson", "AsyncMqttClient", "AsyncTCP", "DallasTemperature",
|
||||
"ArduinoJson", "AsyncMqttClient", "AsyncTCP", "SomeLib",
|
||||
"ESPAsyncTCP", "NeoPixelBus", "OneWire", "PJON", "PJON",
|
||||
"PubSubClient", "RFcontrol", "RadioHead-1.62", "platformio-libmirror",
|
||||
"rs485-nodeproto"
|
||||
@@ -166,14 +166,12 @@ def test_global_lib_list(clirunner, validate_cliresult):
|
||||
assert sorted(items1) == sorted(items2)
|
||||
|
||||
versions1 = [
|
||||
"{name}@{version}".format(**item)
|
||||
for item in json.loads(result.output)
|
||||
"{name}@{version}".format(**item) for item in json.loads(result.output)
|
||||
]
|
||||
versions2 = [
|
||||
'ArduinoJson@5.8.2', 'ArduinoJson@5.10.1', 'AsyncMqttClient@0.8.2',
|
||||
'AsyncTCP@1.0.1', 'ESPAsyncTCP@1.1.3', 'NeoPixelBus@2.2.4',
|
||||
'PJON@07fe9aa', 'PJON@1fb26fd', 'PubSubClient@bef5814',
|
||||
'RFcontrol@77d4eb3f8a', 'RadioHead-1.62@0.0.0'
|
||||
'NeoPixelBus@2.2.4', 'PJON@07fe9aa', 'PJON@1fb26fd',
|
||||
'PubSubClient@bef5814', 'RFcontrol@77d4eb3f8a', 'RadioHead-1.62@0.0.0'
|
||||
]
|
||||
assert set(versions1) >= set(versions2)
|
||||
|
||||
@@ -204,7 +202,7 @@ def test_global_lib_update(clirunner, validate_cliresult):
|
||||
# update rest libraries
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update"])
|
||||
validate_cliresult(result)
|
||||
assert result.output.count("[Fixed]") == 6
|
||||
assert result.output.count("[Detached]") == 6
|
||||
assert result.output.count("[Up-to-date]") == 11
|
||||
assert "Uninstalling RFcontrol @ 77d4eb3f8a" in result.output
|
||||
|
||||
@@ -236,7 +234,7 @@ def test_global_lib_uninstall(clirunner, validate_cliresult,
|
||||
items2 = [
|
||||
"RadioHead-1.62", "rs485-nodeproto", "platformio-libmirror",
|
||||
"PubSubClient", "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
|
||||
"ESPAsyncTCP_ID305", "DallasTemperature_ID54", "NeoPixelBus_ID547",
|
||||
"ESPAsyncTCP_ID305", "SomeLib_ID54", "NeoPixelBus_ID547",
|
||||
"PJON", "AsyncMqttClient_ID346", "ArduinoJson_ID64",
|
||||
"PJON@src-79de467ebe19de18287becff0a1fb42d", "ESP32WebServer"
|
||||
]
|
||||
|
||||
@@ -49,4 +49,12 @@ void fooCallback(){
|
||||
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
void some_extern(const char *fmt, ...);
|
||||
};
|
||||
|
||||
void some_extern(const char *fmt, ...) {
|
||||
|
||||
}
|
||||
|
||||
// юнікод
|
||||
|
||||
@@ -25,7 +25,9 @@ def test_build_flags(clirunner, validate_cliresult, tmpdir):
|
||||
[env:native]
|
||||
platform = native
|
||||
extra_scripts = extra.py
|
||||
build_flags = %s
|
||||
build_flags =
|
||||
; -DCOMMENTED_MACRO
|
||||
%s ; inline comment
|
||||
""" % " ".join([f[0] for f in build_flags]))
|
||||
|
||||
tmpdir.join("extra.py").write("""
|
||||
@@ -47,6 +49,10 @@ projenv.Append(CPPDEFINES="POST_SCRIPT_MACRO")
|
||||
#error "POST_SCRIPT_MACRO"
|
||||
#endif
|
||||
|
||||
#ifdef COMMENTED_MACRO
|
||||
#error "COMMENTED_MACRO"
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
}
|
||||
""")
|
||||
|
||||
@@ -36,16 +36,25 @@ def pytest_generate_tests(metafunc):
|
||||
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
||||
if not p.is_embedded():
|
||||
continue
|
||||
# issue with "version `CXXABI_1.3.9' not found (required by sdcc)"
|
||||
if "linux" in util.get_systype() and p.name in ("intel_mcs51",
|
||||
"ststm8"):
|
||||
continue
|
||||
examples_dir = join(p.get_dir(), "examples")
|
||||
assert isdir(examples_dir)
|
||||
examples_dirs.append(examples_dir)
|
||||
|
||||
project_dirs = []
|
||||
for examples_dir in examples_dirs:
|
||||
platform_examples = []
|
||||
for root, _, files in walk(examples_dir):
|
||||
if "platformio.ini" not in files or ".skiptest" in files:
|
||||
continue
|
||||
project_dirs.append(root)
|
||||
platform_examples.append(root)
|
||||
|
||||
# test random 3 examples
|
||||
random.shuffle(platform_examples)
|
||||
project_dirs.extend(platform_examples[:3])
|
||||
project_dirs.sort()
|
||||
metafunc.parametrize("pioproject_dir", project_dirs)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user