mirror of
https://github.com/platformio/platformio-core.git
synced 2025-07-29 17:47:14 +02:00
Merge branch 'release/v3.6.0'
This commit is contained in:
28
.github/ISSUE_TEMPLATE.md
vendored
28
.github/ISSUE_TEMPLATE.md
vendored
@ -1,22 +1,28 @@
|
||||
What kind of issue is this?
|
||||
|
||||
- [ ] Question. This issue tracker is not the place for questions. If you want to ask how to do
|
||||
something, or to understand why something isn't working the way you expect it to, use
|
||||
our Community Forums https://community.platformio.org
|
||||
- [ ] **Question**.
|
||||
This issue tracker is not the place for questions. If you want to ask how to do something,
|
||||
or to understand why something isn't working the way you expect it to,
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] PlatformIO IDE. All issues related to PlatformIO IDE should be reported to appropriate repository
|
||||
https://github.com/platformio/platformio-atom-ide/issues
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
|
||||
- [ ] Development Platform or Board. All issues related to Development Platforms or Embedded Boards
|
||||
should be reported to appropriate repository.
|
||||
See full list with repositories and search for "platform-xxx" repository related to your hardware
|
||||
https://github.com/platformio?query=platform-
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
should be reported to appropriate repository related to your hardware
|
||||
https://github.com/topics/platformio-platform
|
||||
|
||||
- [ ] Feature Request. Start by telling us what problem you’re trying to solve. Often a solution
|
||||
- [ ] **Feature Request**.
|
||||
Start by telling us what problem you’re trying to solve. Often a solution
|
||||
already exists! Don’t send pull requests to implement new features without first getting our
|
||||
support. Sometimes we leave features out on purpose to keep the project small.
|
||||
|
||||
- [ ] PlatformIO Core. If you’ve found a bug, please provide an information below.
|
||||
- [ ] **PlatformIO Core**.
|
||||
If you’ve found a bug, please provide an information below.
|
||||
|
||||
|
||||
*You can erase any parts of this template not applicable to your Issue.*
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
[settings]
|
||||
line_length=79
|
||||
known_third_party=bottle,click,lockfile,python-dateutil,pytest,requests,SCons,semantic_version,serial
|
||||
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial
|
||||
|
@ -21,7 +21,7 @@ matrix:
|
||||
install:
|
||||
- git submodule update --init --recursive
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install tox; else pip install -U tox; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install "tox==3.0.0"; else pip install -U tox; fi
|
||||
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
@ -29,10 +29,6 @@ install:
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
after_success:
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then tox -e coverage; fi
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then bash <(curl -s https://codecov.io/bash); fi
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
|
29
HISTORY.rst
29
HISTORY.rst
@ -4,6 +4,35 @@ Release Notes
|
||||
PlatformIO 3.0
|
||||
--------------
|
||||
|
||||
3.6.0 (2018-08-06)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* `Program Memory Usage <http://docs.platformio.org/en/latest/faq.html#program-memory-usage>`_
|
||||
|
||||
- Print human-readable memory usage information after a build and before uploading
|
||||
- Print detailed memory usage information with "sections" and "addresses"
|
||||
in `verbose mode <http://docs.platformio.org/en/latest/userguide/cmd_run.html#cmdoption-platformio-run-v>`__
|
||||
- Check maximum allowed "program" and "data" sizes before uploading/programming
|
||||
(`issue #1412 <https://github.com/platformio/platformio-core/issues/1412>`_)
|
||||
|
||||
* `PIO Unit Testing <http://docs.platformio.org/page/plus/unit-testing.html>`__:
|
||||
|
||||
- Documented `Project Shared Code <http://docs.platformio.org/page/plus/unit-testing.html#shared-code>`__
|
||||
- Force building of project source code using `test_build_project_src <http://docs.platformio.org/page/projectconf/section_env_test.html#test_build_project_src>`__ option
|
||||
- Fixed missed ``UNIT_TEST`` macro for unit test components/libraries
|
||||
|
||||
* Check package structure after unpacking and raise error when antivirus tool
|
||||
blocks PlatformIO package manager
|
||||
(`issue #1462 <https://github.com/platformio/platformio-core/issues/1462>`_)
|
||||
* Lock interprocess requests to PlatformIO Package Manager for
|
||||
install/uninstall operations
|
||||
(`issue #1594 <https://github.com/platformio/platformio-core/issues/1594>`_)
|
||||
* Fixed an issue with `PIO Remote <http://docs.platformio.org/page/plus/pio-remote.html>`__
|
||||
when upload process depends on the source code of a project framework
|
||||
* Fixed an issue when ``srcFilter`` field in `library.json <http://docs.platformio.org/page/librarymanager/config.html>`__
|
||||
breaks a library build
|
||||
(`issue #1735 <https://github.com/platformio/platformio-core/issues/1735>`_)
|
||||
|
||||
3.5.4 (2018-07-03)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
44
README.rst
44
README.rst
@ -7,9 +7,6 @@ PlatformIO
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
|
||||
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
|
||||
:alt: AppVeyor.CI Build Status
|
||||
.. image:: https://requires.io/github/platformio/platformio-core/requirements.svg?branch=develop
|
||||
:target: https://requires.io/github/platformio/platformio-core/requirements/?branch=develop
|
||||
:alt: Requirements Status
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
@ -19,12 +16,12 @@ PlatformIO
|
||||
.. image:: https://img.shields.io/PlatformIO/Community.png
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/PlatformIO/Plus.png?color=orange
|
||||
:alt: PlatformIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
.. image:: https://img.shields.io/PIO/Plus.png?color=orange
|
||||
:alt: PIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
:target: https://platformio.org/pricing?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Home Page <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
|
||||
`Docs <http://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
@ -32,6 +29,7 @@ PlatformIO
|
||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
@ -49,16 +47,25 @@ Get Started
|
||||
|
||||
* `What is PlatformIO? <http://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Products
|
||||
--------
|
||||
Open Source
|
||||
-----------
|
||||
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <http://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Remote™ <http://docs.platformio.org/en/latest/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <http://docs.platformio.org/en/latest/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <http://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <http://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <http://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Advanced Scripting API <http://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
PIO Plus
|
||||
--------
|
||||
|
||||
* `PIO Remote <http://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <http://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <http://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Delivery™ <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Cloud Builder <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-cloud-builder>`_
|
||||
* `Cloud IDEs Integration <http://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Integration Services <https://platformio.org/pricing?utm_source=github&utm_medium=core#enterprise-features>`_
|
||||
|
||||
Registry
|
||||
--------
|
||||
@ -68,14 +75,6 @@ Registry
|
||||
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
|
||||
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `Library Manager <http://docs.platformio.org/en/latest/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Cloud IDEs Integration <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-cloud-ide>`_
|
||||
* `Standalone IDEs Integration <http://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#other-ide>`_
|
||||
* `Continuous Integration <http://docs.platformio.org/en/latest/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
@ -84,6 +83,7 @@ Development Platforms
|
||||
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
|
||||
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
|
||||
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
|
||||
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
|
||||
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
|
||||
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
|
||||
@ -91,6 +91,8 @@ Development Platforms
|
||||
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
|
||||
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
|
||||
* `Samsung ARTIK <https://platformio.org/platforms/samsung_artik?utm_source=github&utm_medium=core>`_
|
||||
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
|
||||
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
|
||||
|
2
docs
2
docs
Submodule docs updated: 0b8ac5fbf7...21c1cf522c
2
examples
2
examples
Submodule examples updated: 41f3396c58...40bdd9e1b4
@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (3, 5, 4)
|
||||
VERSION = (3, 6, 0)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
|
@ -19,13 +19,12 @@ import os
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
from os import environ, getenv, listdir, remove
|
||||
from os.path import abspath, dirname, expanduser, getmtime, isdir, isfile, join
|
||||
from os.path import abspath, dirname, expanduser, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import requests
|
||||
from lockfile import LockFailed, LockFile
|
||||
|
||||
from platformio import __version__, exception, util
|
||||
from platformio import exception, lockfile, util
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
@ -108,10 +107,7 @@ class State(object):
|
||||
if self._prev_state != self._state:
|
||||
try:
|
||||
with codecs.open(self.path, "w", encoding="utf8") as fp:
|
||||
if "dev" in __version__:
|
||||
json.dump(self._state, fp, indent=4)
|
||||
else:
|
||||
json.dump(self._state, fp)
|
||||
json.dump(self._state, fp)
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(util.get_home_dir())
|
||||
self._unlock_state_file()
|
||||
@ -119,21 +115,19 @@ class State(object):
|
||||
def _lock_state_file(self):
|
||||
if not self.lock:
|
||||
return
|
||||
self._lockfile = LockFile(self.path)
|
||||
|
||||
if self._lockfile.is_locked() and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
self._lockfile = lockfile.LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(dirname(self.path))
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
|
||||
def __del__(self):
|
||||
self._unlock_state_file()
|
||||
|
||||
|
||||
class ContentCache(object):
|
||||
|
||||
@ -155,15 +149,10 @@ class ContentCache(object):
|
||||
def _lock_dbindex(self):
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
if self._lockfile.is_locked() and \
|
||||
isfile(self._lockfile.lock_file) and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
self._lockfile = lockfile.LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -20,7 +20,7 @@ from os.path import expanduser, join
|
||||
from time import time
|
||||
|
||||
from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS,
|
||||
Action, AllowSubstExceptions, AlwaysBuild,
|
||||
AllowSubstExceptions, AlwaysBuild, Default,
|
||||
DefaultEnvironment, Variables)
|
||||
|
||||
from platformio import util
|
||||
@ -68,6 +68,9 @@ commonvars.AddVariables(
|
||||
("UPLOAD_FLAGS",),
|
||||
("UPLOAD_RESETMETHOD",),
|
||||
|
||||
# test options
|
||||
("TEST_BUILD_PROJECT_SRC",),
|
||||
|
||||
# debug options
|
||||
("DEBUG_TOOL",),
|
||||
("DEBUG_SVD_PATH",),
|
||||
@ -76,7 +79,8 @@ commonvars.AddVariables(
|
||||
|
||||
MULTILINE_VARS = [
|
||||
"EXTRA_SCRIPTS", "PIOFRAMEWORK", "BUILD_FLAGS", "SRC_BUILD_FLAGS",
|
||||
"BUILD_UNFLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE", "LIB_EXTRA_DIRS"
|
||||
"BUILD_UNFLAGS", "UPLOAD_FLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE",
|
||||
"LIB_EXTRA_DIRS"
|
||||
]
|
||||
|
||||
DEFAULT_ENV_OPTIONS = dict(
|
||||
@ -164,15 +168,34 @@ for item in env.GetExtraScripts("pre"):
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS + ["size"]))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"]))
|
||||
|
||||
if "UPLOAD_FLAGS" in env:
|
||||
env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Checking program size
|
||||
if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.Depends(["upload", "program"], "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
Default(_new_targets)
|
||||
Default("checkprogsize")
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(
|
||||
["upload", "program"],
|
||||
env.VerboseAction(lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
##############################################################################
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
print env.Dump()
|
||||
env.Exit(0)
|
||||
@ -189,7 +212,3 @@ if "idedata" in COMMAND_LINE_TARGETS:
|
||||
"See explanation in FAQ > Troubleshooting > Building\n"
|
||||
"http://docs.platformio.org/page/faq.html\n\n")
|
||||
env.Exit(1)
|
||||
|
||||
env.AddPreAction(["upload", "program"],
|
||||
Action(lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
|
@ -152,6 +152,10 @@ def DumpIDEData(env):
|
||||
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path":
|
||||
env.subst("$PROG_PATH"),
|
||||
"flash_extra_images": [{
|
||||
"offset": item[0],
|
||||
"path": env.subst(item[1])
|
||||
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
|
||||
"svd_path":
|
||||
_get_svd_path(env),
|
||||
"compiler_type":
|
||||
|
@ -586,15 +586,6 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
def _is_arduino_manifest(self):
|
||||
return isfile(join(self.path, "library.properties"))
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if all([
|
||||
"srcFilter" in self._manifest.get("build", {})
|
||||
or self.env['SRC_FILTER'], not self._is_arduino_manifest()
|
||||
]):
|
||||
return self.path
|
||||
return LibBuilderBase.src_dir.fget(self)
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
if "srcFilter" in self._manifest.get("build", {}):
|
||||
|
@ -288,6 +288,7 @@ def PioClean(env, clean_dir):
|
||||
def ProcessDebug(env):
|
||||
if not env.subst("$PIODEBUGFLAGS"):
|
||||
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
|
||||
env.Append(PIODEBUGFLAGS=["-D__PLATFORMIO_DEBUG__"])
|
||||
env.Append(
|
||||
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []),
|
||||
BUILD_UNFLAGS=["-Os", "-O0", "-O1", "-O2", "-O3"])
|
||||
|
@ -69,15 +69,22 @@ def LoadPioPlatform(env, variables):
|
||||
# Ensure real platform name
|
||||
env['PIOPLATFORM'] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
systype = util.get_systype()
|
||||
for name in installed_packages:
|
||||
type_ = p.get_package_type(name)
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
continue
|
||||
path = p.get_package_dir(name)
|
||||
if isdir(join(path, "bin")):
|
||||
path = join(path, "bin")
|
||||
env.PrependENVPath("PATH", path)
|
||||
pkg_dir = p.get_package_dir(name)
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
|
||||
if ("windows" not in systype and isdir(join(pkg_dir, "lib"))
|
||||
and type_ != "toolchain"):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH"
|
||||
if "darwin" in systype else "LD_LIBRARY_PATH",
|
||||
join(pkg_dir, "lib"))
|
||||
|
||||
# Platform specific LD Scripts
|
||||
if isdir(join(p.get_dir(), "ldscripts")):
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import environ
|
||||
@ -21,11 +22,13 @@ from os.path import isfile, join
|
||||
from shutil import copyfile
|
||||
from time import sleep
|
||||
|
||||
from SCons.Node.Alias import Alias
|
||||
from SCons.Script import ARGUMENTS
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import util
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
def FlushSerialBuffer(env, port):
|
||||
s = Serial(env.subst(port))
|
||||
@ -45,7 +48,7 @@ def TouchSerialPort(env, port, baudrate):
|
||||
s = Serial(port=port, baudrate=baudrate)
|
||||
s.setDTR(False)
|
||||
s.close()
|
||||
except: # pylint: disable=W0702
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
sleep(0.4) # DO NOT REMOVE THAT (required by SAM-BA based boards)
|
||||
|
||||
@ -88,7 +91,7 @@ def WaitForNewSerialPort(env, before):
|
||||
return new_port
|
||||
|
||||
|
||||
def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
def AutodetectUploadPort(*args, **kwargs):
|
||||
env = args[0]
|
||||
|
||||
def _get_pattern():
|
||||
@ -173,7 +176,7 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
def UploadToDisk(_, target, source, env):
|
||||
assert "UPLOAD_PORT" in env
|
||||
progname = env.subst("$PROGNAME")
|
||||
for ext in ("bin", "hex"):
|
||||
@ -186,32 +189,87 @@ def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
"(Some boards may require manual hard reset)"
|
||||
|
||||
|
||||
def CheckUploadSize(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
if "BOARD" not in env:
|
||||
return
|
||||
max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
if max_size == 0 or "SIZETOOL" not in env:
|
||||
return
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
cmd = [
|
||||
env.subst("$SIZETOOL"), "-B",
|
||||
str(source[0] if isinstance(target[0], Alias) else target[0])
|
||||
def CheckUploadSize(_, target, source, env):
|
||||
check_conditions = [
|
||||
env.get("BOARD"),
|
||||
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
|
||||
]
|
||||
result = util.exec_command(cmd, env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
if not all(check_conditions):
|
||||
return
|
||||
program_max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
data_max_size = int(env.BoardConfig().get("upload.maximum_ram_size", 0))
|
||||
if program_max_size == 0:
|
||||
return
|
||||
print result['out'].strip()
|
||||
|
||||
line = result['out'].strip().splitlines()[1]
|
||||
values = [v.strip() for v in line.split("\t")]
|
||||
used_size = int(values[0]) + int(values[1])
|
||||
def _configure_defaults():
|
||||
env.Replace(
|
||||
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
|
||||
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
|
||||
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
|
||||
|
||||
if used_size > max_size:
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (used_size, max_size))
|
||||
def _get_size_output():
|
||||
cmd = env.get("SIZECHECKCMD")
|
||||
if not cmd:
|
||||
return None
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command(env.subst(cmd), env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
return None
|
||||
return result['out'].strip()
|
||||
|
||||
def _calculate_size(output, pattern):
|
||||
if not output or not pattern:
|
||||
return -1
|
||||
size = 0
|
||||
regexp = re.compile(pattern)
|
||||
for line in output.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
match = regexp.search(line)
|
||||
if not match:
|
||||
continue
|
||||
size += sum(int(value) for value in match.groups())
|
||||
return size
|
||||
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
||||
if used_blocks > blocks_per_progress:
|
||||
used_blocks = blocks_per_progress
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
|
||||
|
||||
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
|
||||
_configure_defaults()
|
||||
output = _get_size_output()
|
||||
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
|
||||
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
|
||||
|
||||
print "Memory Usage -> http://bit.ly/pio-memory-usage"
|
||||
if data_max_size and data_size > -1:
|
||||
print "DATA: %s" % _format_availale_bytes(data_size, data_max_size)
|
||||
if program_size > -1:
|
||||
print "PROGRAM: %s" % _format_availale_bytes(program_size,
|
||||
program_max_size)
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print output
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write("Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" %
|
||||
(program_size, program_max_size))
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
|
@ -54,21 +54,21 @@ def _build_project_deps(env):
|
||||
if project_lib_builder.env.get(key)
|
||||
})
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessTest()
|
||||
projenv = env.Clone()
|
||||
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER")
|
||||
else:
|
||||
projenv = env.Clone()
|
||||
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
|
||||
env.get("SRC_FILTER"))
|
||||
projenv = env.Clone()
|
||||
|
||||
# CPPPATH from dependencies
|
||||
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
|
||||
# extra build flags from `platformio.ini`
|
||||
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
is_test = "__test" in COMMAND_LINE_TARGETS
|
||||
if is_test:
|
||||
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER")
|
||||
if not is_test or env.get("TEST_BUILD_PROJECT_SRC") == "true":
|
||||
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
|
||||
env.get("SRC_FILTER"))
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
@ -112,6 +112,9 @@ def BuildProgram(env):
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessTest()
|
||||
|
||||
# build project with dependencies
|
||||
_build_project_deps(env)
|
||||
|
||||
@ -127,12 +130,13 @@ def BuildProgram(env):
|
||||
|
||||
program = env.Program(
|
||||
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
checksize_action = env.VerboseAction(env.CheckUploadSize,
|
||||
"Checking program size")
|
||||
AlwaysBuild(env.Alias("checkprogsize", program, checksize_action))
|
||||
if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS):
|
||||
env.AddPostAction(program, checksize_action)
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"checkprogsize", program,
|
||||
env.VerboseAction(env.CheckUploadSize,
|
||||
"Checking size $PIOMAINPROG")))
|
||||
|
||||
return program
|
||||
|
||||
@ -303,7 +307,8 @@ def BuildFrameworks(env, frameworks):
|
||||
if f in ("arduino", "energia"):
|
||||
# Arduino IDE appends .o the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
env.ConvertInoToCpp()
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if f in board_frameworks:
|
||||
SConscript(env.GetFrameworkScript(f), exports="env")
|
||||
|
@ -125,32 +125,33 @@ class EnvironmentProcessor(object):
|
||||
|
||||
DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board")
|
||||
|
||||
KNOWN_PLATFORMIO_OPTIONS = ("description", "env_default", "home_dir",
|
||||
"lib_dir", "libdeps_dir", "include_dir",
|
||||
"src_dir", "build_dir", "data_dir", "test_dir",
|
||||
"boards_dir", "lib_extra_dirs")
|
||||
KNOWN_PLATFORMIO_OPTIONS = [
|
||||
"description", "env_default", "home_dir", "lib_dir", "libdeps_dir",
|
||||
"include_dir", "src_dir", "build_dir", "data_dir", "test_dir",
|
||||
"boards_dir", "lib_extra_dirs"
|
||||
]
|
||||
|
||||
KNOWN_ENV_OPTIONS = ("platform", "framework", "board", "build_flags",
|
||||
"src_build_flags", "build_unflags", "src_filter",
|
||||
"extra_scripts", "targets", "upload_port",
|
||||
"upload_protocol", "upload_speed", "upload_flags",
|
||||
"upload_resetmethod", "lib_deps", "lib_ignore",
|
||||
"lib_extra_dirs", "lib_ldf_mode", "lib_compat_mode",
|
||||
"lib_archive", "piotest", "test_transport",
|
||||
"test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd",
|
||||
"debug_load_mode", "debug_svd_path", "monitor_port",
|
||||
"monitor_speed", "monitor_rts", "monitor_dtr")
|
||||
KNOWN_ENV_OPTIONS = [
|
||||
"platform", "framework", "board", "build_flags", "src_build_flags",
|
||||
"build_unflags", "src_filter", "extra_scripts", "targets",
|
||||
"upload_port", "upload_protocol", "upload_speed", "upload_flags",
|
||||
"upload_resetmethod", "lib_deps", "lib_ignore", "lib_extra_dirs",
|
||||
"lib_ldf_mode", "lib_compat_mode", "lib_archive", "piotest",
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "test_build_project_src", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd", "debug_load_mode",
|
||||
"debug_svd_path", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
IGNORE_BUILD_OPTIONS = ("test_transport", "test_filter", "test_ignore",
|
||||
"test_port", "test_speed", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break",
|
||||
"debug_load_cmd", "debug_load_mode",
|
||||
"monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr")
|
||||
IGNORE_BUILD_OPTIONS = [
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "debug_port", "debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break", "debug_load_cmd",
|
||||
"debug_load_mode", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
|
||||
|
||||
|
@ -150,7 +150,7 @@ def get_develop_latest_version():
|
||||
|
||||
def get_pypi_latest_version():
|
||||
r = requests.get(
|
||||
"https://pypi.python.org/pypi/platformio/json",
|
||||
"https://pypi.org/pypi/platformio/json",
|
||||
headers=util.get_request_defheaders())
|
||||
r.raise_for_status()
|
||||
return r.json()['info']['version']
|
||||
|
@ -28,6 +28,10 @@ class ReturnErrorCode(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class LockFileTimeoutError(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class MinitermException(PlatformioException):
|
||||
pass
|
||||
|
||||
@ -102,6 +106,13 @@ class PackageInstallError(PlatformioException):
|
||||
"Please try this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class ExtractArchiveItemError(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
|
||||
"tool or check this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class FDUnrecognizedStatusCode(PlatformioException):
|
||||
|
||||
MESSAGE = "Got an unrecognized status code '{0}' when downloaded {1}"
|
||||
|
@ -97,7 +97,7 @@
|
||||
"settings":
|
||||
{
|
||||
"sublimegdb_workingdir": "{{project_dir}}",
|
||||
"sublimegdb_exec_cmd": "-exec-continue",
|
||||
"sublimegdb_exec_cmd": "",
|
||||
"sublimegdb_commandline": "{{platformio_path}} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
|
||||
|
||||
}
|
||||
|
@ -17,7 +17,7 @@
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PlatformIO Debugger",
|
||||
"name": "PIO Debug",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
@ -29,7 +29,7 @@
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PlatformIO Debugger (Skip Pre-Debug)",
|
||||
"name": "PIO Debug (Skip Pre-Debug)",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
|
108
platformio/lockfile.py
Normal file
108
platformio/lockfile.py
Normal file
@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import remove
|
||||
from os.path import abspath, exists, getmtime
|
||||
from time import sleep, time
|
||||
|
||||
from platformio import exception
|
||||
|
||||
LOCKFILE_TIMEOUT = 3600 # in seconds, 1 hour
|
||||
LOCKFILE_DELAY = 0.2
|
||||
|
||||
LOCKFILE_INTERFACE_FCNTL = 1
|
||||
LOCKFILE_INTERFACE_MSVCRT = 2
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_FCNTL
|
||||
except ImportError:
|
||||
try:
|
||||
import msvcrt
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_MSVCRT
|
||||
except ImportError:
|
||||
LOCKFILE_CURRENT_INTERFACE = None
|
||||
|
||||
|
||||
class LockFileExists(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class LockFile(object):
|
||||
|
||||
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
|
||||
self.timeout = timeout
|
||||
self.delay = delay
|
||||
self._lock_path = abspath(path) + ".lock"
|
||||
self._fp = None
|
||||
|
||||
def _lock(self):
|
||||
if not LOCKFILE_CURRENT_INTERFACE and exists(self._lock_path):
|
||||
# remove stale lock
|
||||
if time() - getmtime(self._lock_path) > 10:
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise LockFileExists
|
||||
|
||||
self._fp = open(self._lock_path, "w")
|
||||
try:
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_NBLCK, 1)
|
||||
except IOError:
|
||||
self._fp = None
|
||||
raise LockFileExists
|
||||
return True
|
||||
|
||||
def _unlock(self):
|
||||
if not self._fp:
|
||||
return
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_UN)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_UNLCK, 1)
|
||||
self._fp.close()
|
||||
self._fp = None
|
||||
|
||||
def acquire(self):
|
||||
elapsed = 0
|
||||
while elapsed < self.timeout:
|
||||
try:
|
||||
return self._lock()
|
||||
except LockFileExists:
|
||||
sleep(self.delay)
|
||||
elapsed += self.delay
|
||||
|
||||
raise exception.LockFileTimeoutError()
|
||||
|
||||
def release(self):
|
||||
self._unlock()
|
||||
if exists(self._lock_path):
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.acquire()
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
self.release()
|
||||
|
||||
def __del__(self):
|
||||
self.release()
|
@ -21,9 +21,9 @@ from platformio import __version__, exception, util
|
||||
from platformio.managers.package import PackageManager
|
||||
|
||||
CORE_PACKAGES = {
|
||||
"contrib-piohome": ">=0.9.5,<2",
|
||||
"contrib-pysite": ">=0.2.0,<2",
|
||||
"tool-pioplus": ">=1.3.1,<2",
|
||||
"contrib-piohome": "^1.0.2",
|
||||
"contrib-pysite": ">=0.3.2,<2",
|
||||
"tool-pioplus": "^1.4.5",
|
||||
"tool-unity": "~1.20403.0",
|
||||
"tool-scons": "~2.20501.4"
|
||||
}
|
||||
|
@ -27,6 +27,7 @@ import semantic_version
|
||||
|
||||
from platformio import __version__, app, exception, telemetry, util
|
||||
from platformio.downloader import FileDownloader
|
||||
from platformio.lockfile import LockFile
|
||||
from platformio.unpacker import FileUnpacker
|
||||
from platformio.vcsclient import VCSClientFactory
|
||||
|
||||
@ -679,99 +680,110 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
||||
silent=False,
|
||||
after_update=False,
|
||||
force=False):
|
||||
name, requirements, url = self.parse_pkg_uri(name, requirements)
|
||||
package_dir = self.get_package_dir(name, requirements, url)
|
||||
pkg_dir = None
|
||||
# interprocess lock
|
||||
with LockFile(self.package_dir):
|
||||
self.cache_reset()
|
||||
|
||||
# avoid circle dependencies
|
||||
if not self.INSTALL_HISTORY:
|
||||
self.INSTALL_HISTORY = []
|
||||
history_key = "%s-%s-%s" % (name, requirements or "", url or "")
|
||||
if history_key in self.INSTALL_HISTORY:
|
||||
return package_dir
|
||||
self.INSTALL_HISTORY.append(history_key)
|
||||
name, requirements, url = self.parse_pkg_uri(name, requirements)
|
||||
package_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if package_dir and force:
|
||||
self.uninstall(package_dir)
|
||||
package_dir = None
|
||||
# avoid circle dependencies
|
||||
if not self.INSTALL_HISTORY:
|
||||
self.INSTALL_HISTORY = []
|
||||
history_key = "%s-%s-%s" % (name, requirements or "", url or "")
|
||||
if history_key in self.INSTALL_HISTORY:
|
||||
return package_dir
|
||||
self.INSTALL_HISTORY.append(history_key)
|
||||
|
||||
if package_dir and force:
|
||||
self.uninstall(package_dir)
|
||||
package_dir = None
|
||||
|
||||
if not package_dir or not silent:
|
||||
msg = "Installing " + click.style(name, fg="cyan")
|
||||
if requirements:
|
||||
msg += " @ " + requirements
|
||||
self.print_message(msg)
|
||||
if package_dir:
|
||||
if not silent:
|
||||
click.secho(
|
||||
"{name} @ {version} is already installed".format(
|
||||
**self.load_manifest(package_dir)),
|
||||
fg="yellow")
|
||||
return package_dir
|
||||
|
||||
if url:
|
||||
pkg_dir = self._install_from_url(
|
||||
name, url, requirements, track=True)
|
||||
else:
|
||||
pkg_dir = self._install_from_piorepo(name, requirements)
|
||||
|
||||
if not pkg_dir or not self.manifest_exists(pkg_dir):
|
||||
raise exception.PackageInstallError(name, requirements or "*",
|
||||
util.get_systype())
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
assert manifest
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Install",
|
||||
label=manifest['name'])
|
||||
|
||||
if not package_dir or not silent:
|
||||
msg = "Installing " + click.style(name, fg="cyan")
|
||||
if requirements:
|
||||
msg += " @ " + requirements
|
||||
self.print_message(msg)
|
||||
if package_dir:
|
||||
if not silent:
|
||||
click.secho(
|
||||
"{name} @ {version} is already installed".format(
|
||||
**self.load_manifest(package_dir)),
|
||||
fg="yellow")
|
||||
return package_dir
|
||||
|
||||
if url:
|
||||
pkg_dir = self._install_from_url(
|
||||
name, url, requirements, track=True)
|
||||
else:
|
||||
pkg_dir = self._install_from_piorepo(name, requirements)
|
||||
|
||||
if not pkg_dir or not self.manifest_exists(pkg_dir):
|
||||
raise exception.PackageInstallError(name, requirements or "*",
|
||||
util.get_systype())
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
assert manifest
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Install",
|
||||
label=manifest['name'])
|
||||
|
||||
if not silent:
|
||||
click.secho(
|
||||
"{name} @ {version} has been successfully installed!".format(
|
||||
**manifest),
|
||||
fg="green")
|
||||
"{name} @ {version} has been successfully installed!".
|
||||
format(**manifest),
|
||||
fg="green")
|
||||
|
||||
return pkg_dir
|
||||
|
||||
def uninstall(self, package, requirements=None, after_update=False):
|
||||
if isdir(package) and self.get_package_by_dir(package):
|
||||
pkg_dir = package
|
||||
else:
|
||||
name, requirements, url = self.parse_pkg_uri(package, requirements)
|
||||
pkg_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if not pkg_dir:
|
||||
raise exception.UnknownPackage(
|
||||
"%s @ %s" % (package, requirements or "*"))
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
click.echo(
|
||||
"Uninstalling %s @ %s: \t" % (click.style(
|
||||
manifest['name'], fg="cyan"), manifest['version']),
|
||||
nl=False)
|
||||
|
||||
if islink(pkg_dir):
|
||||
os.unlink(pkg_dir)
|
||||
else:
|
||||
util.rmtree_(pkg_dir)
|
||||
self.cache_reset()
|
||||
|
||||
# unfix package with the same name
|
||||
pkg_dir = self.get_package_dir(manifest['name'])
|
||||
if pkg_dir and "@" in pkg_dir:
|
||||
shutil.move(
|
||||
pkg_dir,
|
||||
join(self.package_dir, self.get_install_dirname(manifest)))
|
||||
# interprocess lock
|
||||
with LockFile(self.package_dir):
|
||||
self.cache_reset()
|
||||
|
||||
click.echo("[%s]" % click.style("OK", fg="green"))
|
||||
if isdir(package) and self.get_package_by_dir(package):
|
||||
pkg_dir = package
|
||||
else:
|
||||
name, requirements, url = self.parse_pkg_uri(
|
||||
package, requirements)
|
||||
pkg_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if not pkg_dir:
|
||||
raise exception.UnknownPackage(
|
||||
"%s @ %s" % (package, requirements or "*"))
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
click.echo(
|
||||
"Uninstalling %s @ %s: \t" % (click.style(
|
||||
manifest['name'], fg="cyan"), manifest['version']),
|
||||
nl=False)
|
||||
|
||||
if islink(pkg_dir):
|
||||
os.unlink(pkg_dir)
|
||||
else:
|
||||
util.rmtree_(pkg_dir)
|
||||
self.cache_reset()
|
||||
|
||||
# unfix package with the same name
|
||||
pkg_dir = self.get_package_dir(manifest['name'])
|
||||
if pkg_dir and "@" in pkg_dir:
|
||||
shutil.move(
|
||||
pkg_dir,
|
||||
join(self.package_dir, self.get_install_dirname(manifest)))
|
||||
self.cache_reset()
|
||||
|
||||
click.echo("[%s]" % click.style("OK", fg="green"))
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Uninstall",
|
||||
label=manifest['name'])
|
||||
|
||||
if not after_update:
|
||||
telemetry.on_event(
|
||||
category=self.__class__.__name__,
|
||||
action="Uninstall",
|
||||
label=manifest['name'])
|
||||
return True
|
||||
|
||||
def update(self, package, requirements=None, only_check=False):
|
||||
|
@ -604,12 +604,13 @@ class PlatformBase( # pylint: disable=too-many-public-methods
|
||||
|
||||
# enable upload tools for upload targets
|
||||
if any(["upload" in t for t in targets] + ["program" in targets]):
|
||||
for _name, _opts in self.packages.iteritems():
|
||||
if _opts.get("type") == "uploader":
|
||||
self.packages[_name]['optional'] = False
|
||||
elif "nobuild" in targets:
|
||||
# skip all packages, allow only upload tools
|
||||
self.packages[_name]['optional'] = True
|
||||
for name, opts in self.packages.iteritems():
|
||||
if opts.get("type") == "uploader":
|
||||
self.packages[name]['optional'] = False
|
||||
# skip all packages in "nobuild" mode
|
||||
# allow only upload tools and frameworks
|
||||
elif "nobuild" in targets and opts.get("type") != "framework":
|
||||
self.packages[name]['optional'] = True
|
||||
|
||||
def get_lib_storages(self):
|
||||
storages = []
|
||||
|
@ -13,15 +13,14 @@
|
||||
# limitations under the License.
|
||||
|
||||
from os import chmod
|
||||
from os.path import join
|
||||
from os.path import exists, islink, join
|
||||
from tarfile import open as tarfile_open
|
||||
from time import mktime
|
||||
from zipfile import ZipFile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import util
|
||||
from platformio.exception import UnsupportedArchiveType
|
||||
from platformio import exception, util
|
||||
|
||||
|
||||
class ArchiveBase(object):
|
||||
@ -32,6 +31,9 @@ class ArchiveBase(object):
|
||||
def get_items(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
raise NotImplementedError()
|
||||
|
||||
def extract_item(self, item, dest_dir):
|
||||
self._afo.extract(item, dest_dir)
|
||||
self.after_extract(item, dest_dir)
|
||||
@ -51,6 +53,9 @@ class TARArchive(ArchiveBase):
|
||||
def get_items(self):
|
||||
return self._afo.getmembers()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
return item.name
|
||||
|
||||
|
||||
class ZIPArchive(ArchiveBase):
|
||||
|
||||
@ -72,6 +77,9 @@ class ZIPArchive(ArchiveBase):
|
||||
def get_items(self):
|
||||
return self._afo.infolist()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
return item.filename
|
||||
|
||||
def after_extract(self, item, dest_dir):
|
||||
self.preserve_permissions(item, dest_dir)
|
||||
self.preserve_mtime(item, dest_dir)
|
||||
@ -89,7 +97,7 @@ class FileUnpacker(object):
|
||||
elif self.archpath.lower().endswith(".zip"):
|
||||
self._unpacker = ZIPArchive(self.archpath)
|
||||
if not self._unpacker:
|
||||
raise UnsupportedArchiveType(self.archpath)
|
||||
raise exception.UnsupportedArchiveType(self.archpath)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
@ -107,4 +115,12 @@ class FileUnpacker(object):
|
||||
with click.progressbar(items, label="Unpacking") as pb:
|
||||
for item in pb:
|
||||
self._unpacker.extract_item(item, dest_dir)
|
||||
|
||||
# check on disk
|
||||
for item in self._unpacker.get_items():
|
||||
filename = self._unpacker.get_item_filename(item)
|
||||
item_path = join(dest_dir, filename)
|
||||
if not islink(item_path) and not exists(item_path):
|
||||
raise exception.ExtractArchiveItemError(filename, dest_dir)
|
||||
|
||||
return True
|
||||
|
@ -53,6 +53,9 @@ KERNEL=="ttyACM*", ATTRS{idVendor}=="16d0", ATTRS{idProduct}=="0753", MODE:="066
|
||||
# STM32 discovery boards, with onboard st/linkv2
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374?", MODE:="0666"
|
||||
|
||||
# Maple with DFU
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="1eaf", ATTRS{idProduct}=="000[34]", MODE:="0666"
|
||||
|
||||
# USBtiny
|
||||
SUBSYSTEMS=="usb", ATTRS{idProduct}=="0c9f", ATTRS{idVendor}=="1781", MODE="0666"
|
||||
|
||||
@ -205,3 +208,52 @@ ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="660", GROUP="plugdev",
|
||||
|
||||
# CMSIS-DAP compatible adapters
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
#SEGGER J-LIK
|
||||
ATTR{idProduct}=="1001", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1002", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1003", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1004", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1005", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1006", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1007", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1008", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1009", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100f", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1010", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1011", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1012", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1013", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1014", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1015", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1016", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1017", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1018", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1019", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101f", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1020", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1021", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1022", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1023", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1024", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1025", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1026", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1027", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1028", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1029", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102f", ATTR{idVendor}=="1366", MODE="666"
|
||||
|
@ -118,7 +118,7 @@ def generate_boards(boards, extend_debug=False, skip_columns=None):
|
||||
return lines
|
||||
|
||||
|
||||
def generate_debug_boards(boards, skip_columns=None):
|
||||
def generate_debug_contents(boards, skip_board_columns=None, extra_rst=None):
|
||||
lines = []
|
||||
onboard_debug = [
|
||||
b for b in boards if b['debug'] and any(
|
||||
@ -127,40 +127,61 @@ def generate_debug_boards(boards, skip_columns=None):
|
||||
external_debug = [
|
||||
b for b in boards if b['debug'] and b not in onboard_debug
|
||||
]
|
||||
if onboard_debug or external_debug:
|
||||
lines.append("""
|
||||
if not onboard_debug and not external_debug:
|
||||
return lines
|
||||
|
||||
lines.append("""
|
||||
Debugging
|
||||
---------
|
||||
|
||||
:ref:`piodebug` - "1-click" solution for debugging with a zero configuration.
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
""")
|
||||
if extra_rst:
|
||||
lines.append(".. include:: %s" % extra_rst)
|
||||
|
||||
lines.append("""
|
||||
Debug Tools
|
||||
~~~~~~~~~~~
|
||||
|
||||
Supported debugging tools are listed in "Debug" column. For more detailed
|
||||
information, please scroll table by horizontal.
|
||||
You can switch between debugging :ref:`debugging_tools` using
|
||||
:ref:`projectconf_debug_tool` options.
|
||||
|
||||
.. warning::
|
||||
You will need to install debug tool drivers depending on your system.
|
||||
Please click on compatible debug tool below for the further instructions.
|
||||
""")
|
||||
|
||||
if onboard_debug:
|
||||
lines.append("""
|
||||
On-Board tools
|
||||
~~~~~~~~~~~~~~
|
||||
On-Board Debug Tools
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Boards listed below have on-board debugging tools and **ARE READY** for debugging!
|
||||
You do not need to use/buy external debugger.
|
||||
Boards listed below have on-board debug tool and **ARE READY** for debugging!
|
||||
You do not need to use/buy external debug tool.
|
||||
""")
|
||||
lines.extend(
|
||||
generate_boards(
|
||||
onboard_debug, extend_debug=True, skip_columns=skip_columns))
|
||||
onboard_debug,
|
||||
extend_debug=True,
|
||||
skip_columns=skip_board_columns))
|
||||
if external_debug:
|
||||
lines.append("""
|
||||
External tools
|
||||
~~~~~~~~~~~~~~
|
||||
External Debug Tools
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Boards listed below are compatible with :ref:`piodebug` but depend on external
|
||||
debugging tools. See "Debug" column for compatible debugging tools.
|
||||
Boards listed below are compatible with :ref:`piodebug` but **DEPEND ON**
|
||||
external debug tool. See "Debug" column for compatible debug tools.
|
||||
""")
|
||||
lines.extend(
|
||||
generate_boards(
|
||||
external_debug, extend_debug=True, skip_columns=skip_columns))
|
||||
external_debug,
|
||||
extend_debug=True,
|
||||
skip_columns=skip_board_columns))
|
||||
return lines
|
||||
|
||||
|
||||
@ -218,7 +239,7 @@ Packages
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def generate_platform(name, has_extra=False):
|
||||
def generate_platform(name, rst_dir):
|
||||
print "Processing platform: %s" % name
|
||||
|
||||
compatible_boards = [
|
||||
@ -240,6 +261,8 @@ def generate_platform(name, has_extra=False):
|
||||
limitations under the License.
|
||||
""")
|
||||
p = PlatformFactory.newPlatform(name)
|
||||
assert p.repository_url.endswith(".git")
|
||||
github_url = p.repository_url[:-4]
|
||||
|
||||
lines.append(".. _platform_%s:" % p.name)
|
||||
lines.append("")
|
||||
@ -261,7 +284,7 @@ For more detailed information please visit `vendor site <%s>`_.""" %
|
||||
#
|
||||
# Extra
|
||||
#
|
||||
if has_extra:
|
||||
if isfile(join(rst_dir, "%s_extra.rst" % name)):
|
||||
lines.append(".. include:: %s_extra.rst" % p.name)
|
||||
|
||||
#
|
||||
@ -272,17 +295,13 @@ Examples
|
||||
--------
|
||||
|
||||
Examples are listed from `%s development platform repository <%s>`_:
|
||||
""" % (p.title,
|
||||
campaign_url(
|
||||
"https://github.com/platformio/platform-%s/tree/develop/examples" %
|
||||
p.name)))
|
||||
""" % (p.title, campaign_url("%s/tree/master/examples" % github_url)))
|
||||
examples_dir = join(p.get_dir(), "examples")
|
||||
if isdir(examples_dir):
|
||||
for eitem in os.listdir(examples_dir):
|
||||
if not isdir(join(examples_dir, eitem)):
|
||||
continue
|
||||
url = ("https://github.com/platformio/platform-%s"
|
||||
"/tree/develop/examples/%s" % (p.name, eitem))
|
||||
url = "%s/tree/master/examples/%s" % (github_url, eitem)
|
||||
lines.append("* `%s <%s>`_" % (eitem, campaign_url(url)))
|
||||
|
||||
#
|
||||
@ -290,8 +309,11 @@ Examples are listed from `%s development platform repository <%s>`_:
|
||||
#
|
||||
if compatible_boards:
|
||||
lines.extend(
|
||||
generate_debug_boards(
|
||||
compatible_boards, skip_columns=["Platform"]))
|
||||
generate_debug_contents(
|
||||
compatible_boards,
|
||||
skip_board_columns=["Platform"],
|
||||
extra_rst="%s_debug.rst" % name
|
||||
if isfile(join(rst_dir, "%s_debug.rst" % name)) else None))
|
||||
|
||||
#
|
||||
# Development version of dev/platform
|
||||
@ -300,7 +322,7 @@ Examples are listed from `%s development platform repository <%s>`_:
|
||||
Stable and upstream versions
|
||||
----------------------------
|
||||
|
||||
You can switch between `stable releases <https://github.com/platformio/platform-{name}/releases>`__
|
||||
You can switch between `stable releases <{github_url}/releases>`__
|
||||
of {title} development platform and the latest upstream version using
|
||||
:ref:`projectconf_env_platform` option in :ref:`projectconf` as described below.
|
||||
|
||||
@ -325,9 +347,9 @@ Upstream
|
||||
.. code-block:: ini
|
||||
|
||||
[env:upstream_develop]
|
||||
platform = https://github.com/platformio/platform-{name}.git
|
||||
platform = {github_url}.git
|
||||
board = ...
|
||||
""".format(name=p.name, title=p.title))
|
||||
""".format(name=p.name, title=p.title, github_url=github_url))
|
||||
|
||||
#
|
||||
# Packages
|
||||
@ -395,14 +417,10 @@ def update_platform_docs():
|
||||
dirname(realpath(__file__)), "..", "docs", "platforms")
|
||||
rst_path = join(platforms_dir, "%s.rst" % name)
|
||||
with open(rst_path, "w") as f:
|
||||
f.write(
|
||||
generate_platform(name,
|
||||
isfile(
|
||||
join(platforms_dir,
|
||||
"%s_extra.rst" % name))))
|
||||
f.write(generate_platform(name, platforms_dir))
|
||||
|
||||
|
||||
def generate_framework(type_, data, has_extra=False):
|
||||
def generate_framework(type_, data, rst_dir=None):
|
||||
print "Processing framework: %s" % type_
|
||||
|
||||
compatible_platforms = [
|
||||
@ -446,14 +464,18 @@ For more detailed information please visit `vendor site <%s>`_.
|
||||
:depth: 1""")
|
||||
|
||||
# Extra
|
||||
if has_extra:
|
||||
if isfile(join(rst_dir, "%s_extra.rst" % type_)):
|
||||
lines.append(".. include:: %s_extra.rst" % type_)
|
||||
|
||||
#
|
||||
# Debugging
|
||||
#
|
||||
if compatible_boards:
|
||||
lines.extend(generate_debug_boards(compatible_boards))
|
||||
lines.extend(
|
||||
generate_debug_contents(
|
||||
compatible_boards,
|
||||
extra_rst="%s_debug.rst" % type_
|
||||
if isfile(join(rst_dir, "%s_debug.rst" % type_)) else None))
|
||||
|
||||
if compatible_platforms:
|
||||
# examples
|
||||
@ -462,11 +484,12 @@ Examples
|
||||
--------
|
||||
""")
|
||||
for manifest in compatible_platforms:
|
||||
lines.append("* `%s for %s <%s>`_" % (
|
||||
data['title'], manifest['title'],
|
||||
campaign_url(
|
||||
"https://github.com/platformio/platform-%s/tree/develop/examples"
|
||||
% manifest['name'])))
|
||||
p = PlatformFactory.newPlatform(manifest['name'])
|
||||
lines.append(
|
||||
"* `%s for %s <%s>`_" %
|
||||
(data['title'], manifest['title'],
|
||||
campaign_url(
|
||||
"%s/tree/master/examples" % p.repository_url[:-4])))
|
||||
|
||||
# Platforms
|
||||
lines.append("""
|
||||
@ -517,45 +540,7 @@ def update_framework_docs():
|
||||
dirname(realpath(__file__)), "..", "docs", "frameworks")
|
||||
rst_path = join(frameworks_dir, "%s.rst" % name)
|
||||
with open(rst_path, "w") as f:
|
||||
f.write(
|
||||
generate_framework(name, framework,
|
||||
isfile(
|
||||
join(frameworks_dir,
|
||||
"%s_extra.rst" % name))))
|
||||
|
||||
|
||||
def update_create_platform_doc():
|
||||
lines = []
|
||||
lines.append(""".. _platform_creating_packages:
|
||||
|
||||
Packages
|
||||
--------
|
||||
|
||||
*PlatformIO* has pre-built packages for the most popular operation systems:
|
||||
*Mac OS*, *Linux (+ARM)* and *Windows*.
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Name
|
||||
- Description""")
|
||||
for name, items in sorted(API_PACKAGES.iteritems()):
|
||||
lines.append("""
|
||||
* - `{name} <{url}>`__
|
||||
- {description}""".format(
|
||||
name=name,
|
||||
url=API_PACKAGES[name]['url'],
|
||||
description=API_PACKAGES[name]['description']))
|
||||
|
||||
with open(
|
||||
join(util.get_source_dir(), "..", "docs", "platforms",
|
||||
"creating_platform.rst"), "r+") as fp:
|
||||
content = fp.read()
|
||||
fp.seek(0)
|
||||
fp.truncate()
|
||||
fp.write(content[:content.index(".. _platform_creating_packages:")] +
|
||||
"\n".join(lines) + "\n\n" + content[content.index(
|
||||
".. _platform_creating_manifest_file:"):])
|
||||
f.write(generate_framework(name, framework, frameworks_dir))
|
||||
|
||||
|
||||
def update_embedded_boards():
|
||||
@ -617,12 +602,20 @@ popular embedded boards and IDE.
|
||||
|
||||
|
||||
def update_debugging():
|
||||
tools_to_platforms = {}
|
||||
vendors = {}
|
||||
platforms = []
|
||||
frameworks = []
|
||||
for data in BOARDS:
|
||||
if not data['debug']:
|
||||
continue
|
||||
|
||||
for tool in data['debug']['tools']:
|
||||
tool = str(tool)
|
||||
if tool not in tools_to_platforms:
|
||||
tools_to_platforms[tool] = []
|
||||
tools_to_platforms[tool].append(data['platform'])
|
||||
|
||||
platforms.append(data['platform'])
|
||||
frameworks.extend(data['frameworks'])
|
||||
vendor = data['vendor']
|
||||
@ -631,6 +624,24 @@ def update_debugging():
|
||||
else:
|
||||
vendors[vendor] = [data]
|
||||
|
||||
def _update_tool_compat_platforms(content):
|
||||
begin_tpl = ".. begin_compatible_platforms_"
|
||||
end_tpl = ".. end_compatible_platforms_"
|
||||
for tool, platforms in tools_to_platforms.items():
|
||||
begin = begin_tpl + tool
|
||||
end = end_tpl + tool
|
||||
begin_index = content.index(begin)
|
||||
end_index = content.index(end)
|
||||
chunk = ["\n\n:Compatible Platforms:\n"]
|
||||
chunk.extend([
|
||||
" * :ref:`platform_%s`" % str(p)
|
||||
for p in sorted(set(platforms))
|
||||
])
|
||||
chunk.extend(["\n"])
|
||||
content = content[:begin_index + len(begin)] + "\n".join(
|
||||
chunk) + content[end_index:]
|
||||
return content
|
||||
|
||||
lines = []
|
||||
# Platforms
|
||||
lines.append(""".. _debugging_platforms:
|
||||
@ -684,27 +695,83 @@ Boards
|
||||
with open(
|
||||
join(util.get_source_dir(), "..", "docs", "plus", "debugging.rst"),
|
||||
"r+") as fp:
|
||||
content = fp.read()
|
||||
content = _update_tool_compat_platforms(fp.read())
|
||||
fp.seek(0)
|
||||
fp.truncate()
|
||||
fp.write(content[:content.index(".. _debugging_platforms:")] +
|
||||
"\n".join(lines))
|
||||
|
||||
|
||||
def update_examples_readme():
|
||||
examples_dir = join(util.get_source_dir(), "..", "examples")
|
||||
def update_project_examples():
|
||||
platform_readme_tpl = """
|
||||
# {title}: development platform for [PlatformIO](https://platformio.org)
|
||||
|
||||
# Platforms
|
||||
{description}
|
||||
|
||||
* [Home](https://platformio.org/platforms/{name}) (home page in PlatformIO Registry)
|
||||
* [Documentation](http://docs.platformio.org/page/platforms/{name}.html) (advanced usage, packages, boards, frameworks, etc.)
|
||||
|
||||
# Examples
|
||||
|
||||
{examples}
|
||||
"""
|
||||
framework_readme_tpl = """
|
||||
# {title}: framework for [PlatformIO](https://platformio.org)
|
||||
|
||||
{description}
|
||||
|
||||
* [Home](https://platformio.org/frameworks/{name}) (home page in PlatformIO Registry)
|
||||
* [Documentation](http://docs.platformio.org/page/frameworks/{name}.html)
|
||||
|
||||
# Examples
|
||||
|
||||
{examples}
|
||||
"""
|
||||
|
||||
project_examples_dir = join(util.get_source_dir(), "..", "examples")
|
||||
framework_examples_md_lines = {}
|
||||
embedded = []
|
||||
desktop = []
|
||||
|
||||
for manifest in PLATFORM_MANIFESTS:
|
||||
p = PlatformFactory.newPlatform(manifest['name'])
|
||||
url = campaign_url(
|
||||
"http://docs.platformio.org/en/latest/platforms/%s.html#examples" %
|
||||
p.name,
|
||||
source="github",
|
||||
medium="examples")
|
||||
line = "* [%s](%s)" % (p.title, url)
|
||||
github_url = p.repository_url[:-4]
|
||||
|
||||
# Platform README
|
||||
platform_examples_dir = join(p.get_dir(), "examples")
|
||||
examples_md_lines = []
|
||||
if isdir(platform_examples_dir):
|
||||
for item in os.listdir(platform_examples_dir):
|
||||
if not isdir(join(platform_examples_dir, item)):
|
||||
continue
|
||||
url = "%s/tree/master/examples/%s" % (github_url, item)
|
||||
examples_md_lines.append("* [%s](%s)" % (item, url))
|
||||
|
||||
readme_dir = join(project_examples_dir, "platforms", p.name)
|
||||
if not isdir(readme_dir):
|
||||
os.makedirs(readme_dir)
|
||||
with open(join(readme_dir, "README.md"), "w") as fp:
|
||||
fp.write(
|
||||
platform_readme_tpl.format(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
examples="\n".join(examples_md_lines)))
|
||||
|
||||
# Framework README
|
||||
for framework in API_FRAMEWORKS:
|
||||
if not is_compat_platform_and_framework(p.name, framework['name']):
|
||||
continue
|
||||
if framework['name'] not in framework_examples_md_lines:
|
||||
framework_examples_md_lines[framework['name']] = []
|
||||
lines = []
|
||||
lines.append("- [%s](%s)" % (p.title, github_url))
|
||||
lines.extend(" %s" % l for l in examples_md_lines)
|
||||
lines.append("")
|
||||
framework_examples_md_lines[framework['name']].extend(lines)
|
||||
|
||||
# Root README
|
||||
line = "* [%s](%s)" % (p.title, "%s/tree/master/examples" % github_url)
|
||||
if p.is_embedded():
|
||||
embedded.append(line)
|
||||
else:
|
||||
@ -713,6 +780,18 @@ def update_examples_readme():
|
||||
# Frameworks
|
||||
frameworks = []
|
||||
for framework in API_FRAMEWORKS:
|
||||
readme_dir = join(project_examples_dir, "frameworks",
|
||||
framework['name'])
|
||||
if not isdir(readme_dir):
|
||||
os.makedirs(readme_dir)
|
||||
with open(join(readme_dir, "README.md"), "w") as fp:
|
||||
fp.write(
|
||||
framework_readme_tpl.format(
|
||||
name=framework['name'],
|
||||
title=framework['title'],
|
||||
description=framework['description'],
|
||||
examples="\n".join(
|
||||
framework_examples_md_lines[framework['name']])))
|
||||
url = campaign_url(
|
||||
"http://docs.platformio.org/en/latest/frameworks/%s.html#examples"
|
||||
% framework['name'],
|
||||
@ -720,7 +799,7 @@ def update_examples_readme():
|
||||
medium="examples")
|
||||
frameworks.append("* [%s](%s)" % (framework['title'], url))
|
||||
|
||||
with open(join(examples_dir, "README.md"), "w") as fp:
|
||||
with open(join(project_examples_dir, "README.md"), "w") as fp:
|
||||
fp.write("""# PlatformIO Project Examples
|
||||
|
||||
- [Development platforms](#development-platforms):
|
||||
@ -745,12 +824,11 @@ def update_examples_readme():
|
||||
|
||||
|
||||
def main():
|
||||
update_create_platform_doc()
|
||||
update_platform_docs()
|
||||
update_framework_docs()
|
||||
update_embedded_boards()
|
||||
update_debugging()
|
||||
update_examples_readme()
|
||||
update_project_examples()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
1
setup.py
1
setup.py
@ -21,7 +21,6 @@ install_requires = [
|
||||
"bottle<0.13",
|
||||
"click>=5,<6",
|
||||
"colorama",
|
||||
"lockfile>=0.9.1,<0.13",
|
||||
"pyserial>=3,<4,!=3.3",
|
||||
"requests>=2.4.0,<3",
|
||||
"semantic_version>=2.5.0,<3"
|
||||
|
@ -166,14 +166,12 @@ def test_global_lib_list(clirunner, validate_cliresult):
|
||||
assert sorted(items1) == sorted(items2)
|
||||
|
||||
versions1 = [
|
||||
"{name}@{version}".format(**item)
|
||||
for item in json.loads(result.output)
|
||||
"{name}@{version}".format(**item) for item in json.loads(result.output)
|
||||
]
|
||||
versions2 = [
|
||||
'ArduinoJson@5.8.2', 'ArduinoJson@5.10.1', 'AsyncMqttClient@0.8.2',
|
||||
'AsyncTCP@1.0.1', 'ESPAsyncTCP@1.1.3', 'NeoPixelBus@2.2.4',
|
||||
'PJON@07fe9aa', 'PJON@1fb26fd', 'PubSubClient@bef5814',
|
||||
'RFcontrol@77d4eb3f8a', 'RadioHead-1.62@0.0.0'
|
||||
'AsyncTCP@1.0.1', 'NeoPixelBus@2.2.4', 'PJON@07fe9aa', 'PJON@1fb26fd',
|
||||
'PubSubClient@bef5814', 'RFcontrol@77d4eb3f8a', 'RadioHead-1.62@0.0.0'
|
||||
]
|
||||
assert set(versions1) >= set(versions2)
|
||||
|
||||
|
@ -42,10 +42,15 @@ def pytest_generate_tests(metafunc):
|
||||
|
||||
project_dirs = []
|
||||
for examples_dir in examples_dirs:
|
||||
platform_examples = []
|
||||
for root, _, files in walk(examples_dir):
|
||||
if "platformio.ini" not in files or ".skiptest" in files:
|
||||
continue
|
||||
project_dirs.append(root)
|
||||
platform_examples.append(root)
|
||||
|
||||
# test random 3 examples
|
||||
random.shuffle(platform_examples)
|
||||
project_dirs.extend(platform_examples[:3])
|
||||
project_dirs.sort()
|
||||
metafunc.parametrize("pioproject_dir", project_dirs)
|
||||
|
||||
|
Reference in New Issue
Block a user