Merge branch 'release/v3.5.3'

This commit is contained in:
Ivan Kravets
2018-06-01 17:01:46 +03:00
37 changed files with 616 additions and 299 deletions

1
.gitignore vendored
View File

@ -9,3 +9,4 @@ build
coverage.xml coverage.xml
.coverage .coverage
htmlcov htmlcov
.pytest_cache

View File

@ -4,6 +4,38 @@ Release Notes
PlatformIO 3.0 PlatformIO 3.0
-------------- --------------
3.5.3 (2018-06-01)
~~~~~~~~~~~~~~~~~~
* `PlatformIO Home <http://docs.platformio.org/page/home/index.html>`__ -
interact with PlatformIO ecosystem using modern and cross-platform GUI:
- "Recent News" block on "Welcome" page
- Direct import of development platform's example
* Simplify configuration for `PIO Unit Testing <http://docs.platformio.org/page/plus/unit-testing.html>`__: separate main program from a test build process, drop
requirement for ``#ifdef UNIT_TEST`` guard
* Override any option from board manifest in `Project Configuration File "platformio.ini" <http://docs.platformio.org/page/projectconf/section_env_board.html#more-options>`__
(`issue #1612 <https://github.com/platformio/platformio-core/issues/1612>`_)
* Configure a custom path to SVD file using `debug_svd_path <http://docs.platformio.org/page/projectconf/section_env_debug.html#debug-svd-path>`__
option
* Custom project `description <http://docs.platformio.org/en/latest/projectconf/section_platformio.html#description>`_
which will be used by `PlatformIO Home <http://docs.platformio.org/page/home/index.html>`_
* Updated Unity tool to 2.4.3
* Improved support for Black Magic Probe in "uploader" mode
* Renamed "monitor_baud" option to "monitor_speed"
* Fixed issue when a custom `lib_dir <http://docs.platformio.org/page/projectconf/section_platformio.html#lib-dir>`__
was not handled correctly
(`issue #1473 <https://github.com/platformio/platformio-core/issues/1473>`_)
* Fixed issue with useless project rebuilding for case insensitive file
systems (Windows)
* Fixed issue with ``build_unflags`` option when a macro contains value
(e.g., ``-DNAME=VALUE``)
* Fixed issue which did not allow to override runtime build environment using
extra POST script
* Fixed "RuntimeError: maximum recursion depth exceeded" for library manager
(`issue #1528 <https://github.com/platformio/platformio-core/issues/1528>`_)
3.5.2 (2018-03-13) 3.5.2 (2018-03-13)
~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~

2
docs

Submodule docs updated: e9e78d043e...3ad76be8f7

View File

@ -14,7 +14,7 @@
import sys import sys
VERSION = (3, 5, 2) VERSION = (3, 5, 3)
__version__ = ".".join([str(s) for s in VERSION]) __version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio" __title__ = "platformio"

View File

@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import codecs
import hashlib import hashlib
import json import json
import os import os
@ -106,7 +107,7 @@ class State(object):
def __exit__(self, type_, value, traceback): def __exit__(self, type_, value, traceback):
if self._prev_state != self._state: if self._prev_state != self._state:
try: try:
with open(self.path, "w") as fp: with codecs.open(self.path, "w", encoding="utf8") as fp:
if "dev" in __version__: if "dev" in __version__:
json.dump(self._state, fp, indent=4) json.dump(self._state, fp, indent=4)
else: else:
@ -187,11 +188,8 @@ class ContentCache(object):
cache_path = self.get_cache_path(key) cache_path = self.get_cache_path(key)
if not isfile(cache_path): if not isfile(cache_path):
return None return None
with open(cache_path, "rb") as fp: with codecs.open(cache_path, "rb", encoding="utf8") as fp:
data = fp.read() return fp.read()
if data and data[0] in ("{", "["):
return json.loads(data)
return data
def set(self, key, data, valid): def set(self, key, data, valid):
if not get_setting("enable_cache"): if not get_setting("enable_cache"):
@ -212,13 +210,17 @@ class ContentCache(object):
if not isdir(dirname(cache_path)): if not isdir(dirname(cache_path)):
os.makedirs(dirname(cache_path)) os.makedirs(dirname(cache_path))
with open(cache_path, "wb") as fp: try:
if isinstance(data, (dict, list)): with codecs.open(cache_path, "wb", encoding="utf8") as fp:
json.dump(data, fp) fp.write(data)
else: with open(self._db_path, "a") as fp:
fp.write(str(data)) fp.write("%s=%s\n" % (str(expire_time), cache_path))
with open(self._db_path, "a") as fp: except UnicodeError:
fp.write("%s=%s\n" % (str(expire_time), cache_path)) if isfile(cache_path):
try:
remove(cache_path)
except OSError:
pass
return self._unlock_dbindex() return self._unlock_dbindex()

View File

@ -54,10 +54,12 @@ commonvars.AddVariables(
# board options # board options
("BOARD",), ("BOARD",),
# deprecated options, use board_{object.path} instead
("BOARD_MCU",), ("BOARD_MCU",),
("BOARD_F_CPU",), ("BOARD_F_CPU",),
("BOARD_F_FLASH",), ("BOARD_F_FLASH",),
("BOARD_FLASH_MODE",), ("BOARD_FLASH_MODE",),
# end of deprecated options
# upload options # upload options
("UPLOAD_PORT",), ("UPLOAD_PORT",),
@ -68,7 +70,7 @@ commonvars.AddVariables(
# debug options # debug options
("DEBUG_TOOL",), ("DEBUG_TOOL",),
("DEBUG_SVD_PATH",),
) # yapf: disable ) # yapf: disable
@ -99,6 +101,7 @@ DEFAULT_ENV_OPTIONS = dict(
BUILD_DIR=join("$PROJECTBUILD_DIR", "$PIOENV"), BUILD_DIR=join("$PROJECTBUILD_DIR", "$PIOENV"),
BUILDSRC_DIR=join("$BUILD_DIR", "src"), BUILDSRC_DIR=join("$BUILD_DIR", "src"),
BUILDTEST_DIR=join("$BUILD_DIR", "test"), BUILDTEST_DIR=join("$BUILD_DIR", "test"),
LIBPATH=["$BUILD_DIR"],
LIBSOURCE_DIRS=[ LIBSOURCE_DIRS=[
util.get_projectlib_dir(), util.get_projectlib_dir(),
util.get_projectlibdeps_dir(), util.get_projectlibdeps_dir(),
@ -156,7 +159,7 @@ env.LoadPioPlatform(commonvars)
env.SConscriptChdir(0) env.SConscriptChdir(0)
env.SConsignFile(join("$PROJECTBUILD_DIR", ".sconsign.dblite")) env.SConsignFile(join("$PROJECTBUILD_DIR", ".sconsign.dblite"))
for item in env.GetPreExtraScripts(): for item in env.GetExtraScripts("pre"):
env.SConscript(item, exports="env") env.SConscript(item, exports="env")
env.SConscript("$BUILD_SCRIPT") env.SConscript("$BUILD_SCRIPT")
@ -165,9 +168,9 @@ AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS + ["size"]))
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"])) AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"]))
if "UPLOAD_FLAGS" in env: if "UPLOAD_FLAGS" in env:
env.Append(UPLOADERFLAGS=["$UPLOAD_FLAGS"]) env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
for item in env.GetPostExtraScripts(): for item in env.GetExtraScripts("post"):
env.SConscript(item, exports="env") env.SConscript(item, exports="env")
if "envdump" in COMMAND_LINE_TARGETS: if "envdump" in COMMAND_LINE_TARGETS:

View File

@ -16,7 +16,7 @@ from __future__ import absolute_import
from glob import glob from glob import glob
from os import environ from os import environ
from os.path import join from os.path import abspath, isfile, join
from SCons.Defaults import processDefines from SCons.Defaults import processDefines
@ -53,11 +53,11 @@ def _dump_includes(env):
if unity_dir: if unity_dir:
includes.append(unity_dir) includes.append(unity_dir)
# remove dupicates # remove duplicates
result = [] result = []
for item in includes: for item in includes:
if item not in result: if item not in result:
result.append(item) result.append(abspath(item))
return result return result
@ -101,12 +101,34 @@ def _dump_defines(env):
.replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny"))) .replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
# built-in GCC marcos # built-in GCC marcos
if env.GetCompilerType() == "gcc": # if env.GetCompilerType() == "gcc":
defines.extend(_get_gcc_defines(env)) # defines.extend(_get_gcc_defines(env))
return defines return defines
def _get_svd_path(env):
svd_path = env.subst("$DEBUG_SVD_PATH")
if svd_path:
return abspath(svd_path)
if "BOARD" not in env:
return None
try:
svd_path = env.BoardConfig().get("debug.svd_path")
assert svd_path
except (AssertionError, KeyError):
return None
# custom path to SVD file
if isfile(svd_path):
return svd_path
# default file from ./platform/misc/svd folder
p = env.PioPlatform()
if isfile(join(p.get_dir(), "misc", "svd", svd_path)):
return abspath(join(p.get_dir(), "misc", "svd", svd_path))
return None
def DumpIDEData(env): def DumpIDEData(env):
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS" LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS" LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
@ -130,6 +152,8 @@ def DumpIDEData(env):
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")), util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path": "prog_path":
env.subst("$PROG_PATH"), env.subst("$PROG_PATH"),
"svd_path":
_get_svd_path(env),
"compiler_type": "compiler_type":
env.GetCompilerType() env.GetCompilerType()
} }

View File

@ -27,7 +27,7 @@ from os.path import (basename, commonprefix, dirname, isdir, isfile, join,
import SCons.Scanner import SCons.Scanner
from SCons.Script import ARGUMENTS, COMMAND_LINE_TARGETS, DefaultEnvironment from SCons.Script import ARGUMENTS, COMMAND_LINE_TARGETS, DefaultEnvironment
from platformio import util from platformio import exception, util
from platformio.builder.tools import platformio as piotool from platformio.builder.tools import platformio as piotool
from platformio.managers.lib import LibraryManager from platformio.managers.lib import LibraryManager
from platformio.managers.package import PackageManager from platformio.managers.package import PackageManager
@ -86,8 +86,8 @@ class LibBuilderBase(object):
LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"] LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"]
LDF_MODE_DEFAULT = "chain" LDF_MODE_DEFAULT = "chain"
COMPAT_MODES = ["off", "light", "strict"] COMPAT_MODES = ["off", "soft", "strict"]
COMPAT_MODE_DEFAULT = "light" COMPAT_MODE_DEFAULT = "soft"
CLASSIC_SCANNER = SCons.Scanner.C.CScanner() CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner() CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
@ -758,7 +758,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
sys.stderr.write( sys.stderr.write(
"Platform incompatible library %s\n" % lb.path) "Platform incompatible library %s\n" % lb.path)
return False return False
if compat_mode == "light" and "PIOFRAMEWORK" in env and \ if compat_mode == "soft" and "PIOFRAMEWORK" in env and \
not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", [])): not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", [])):
if verbose: if verbose:
sys.stderr.write( sys.stderr.write(
@ -777,7 +777,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
try: try:
lb = LibBuilderFactory.new( lb = LibBuilderFactory.new(
env, join(libs_dir, item), verbose=verbose) env, join(libs_dir, item), verbose=verbose)
except ValueError: except exception.InvalidJSONFile:
if verbose: if verbose:
sys.stderr.write("Skip library with broken manifest: %s\n" sys.stderr.write("Skip library with broken manifest: %s\n"
% join(libs_dir, item)) % join(libs_dir, item))

View File

@ -18,7 +18,7 @@ import atexit
import re import re
import sys import sys
from os import environ, remove, walk from os import environ, remove, walk
from os.path import basename, isdir, isfile, join, relpath, sep from os.path import basename, isdir, isfile, join, realpath, relpath, sep
from tempfile import mkstemp from tempfile import mkstemp
from SCons.Action import Action from SCons.Action import Action
@ -199,7 +199,7 @@ def _delete_file(path):
pass pass
@util.memoized @util.memoized()
def _get_compiler_type(env): def _get_compiler_type(env):
try: try:
sysenv = environ.copy() sysenv = environ.copy()
@ -295,25 +295,21 @@ def ProcessTest(env):
src_filter.append("+<%s%s>" % (env['PIOTEST'], sep)) src_filter.append("+<%s%s>" % (env['PIOTEST'], sep))
env.Replace(PIOTEST_SRC_FILTER=src_filter) env.Replace(PIOTEST_SRC_FILTER=src_filter)
return env.CollectBuildFiles( return env.CollectBuildFiles("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
"$BUILDTEST_DIR", "$PIOTEST_SRC_FILTER")
"$PROJECTTEST_DIR",
"$PIOTEST_SRC_FILTER",
duplicate=False)
def GetPreExtraScripts(env): def GetExtraScripts(env, scope):
return [ items = []
item[4:] for item in env.get("EXTRA_SCRIPTS", []) for item in env.get("EXTRA_SCRIPTS", []):
if item.startswith("pre:") if scope == "post" and ":" not in item:
] items.append(item)
elif item.startswith("%s:" % scope):
items.append(item[len(scope) + 1:])
def GetPostExtraScripts(env): if not items:
return [ return items
item[5:] if item.startswith("post:") else item with util.cd(env.subst("$PROJECT_DIR")):
for item in env.get("EXTRA_SCRIPTS", []) if not item.startswith("pre:") return [realpath(item) for item in items]
]
def exists(_): def exists(_):
@ -328,6 +324,5 @@ def generate(env):
env.AddMethod(PioClean) env.AddMethod(PioClean)
env.AddMethod(ProcessDebug) env.AddMethod(ProcessDebug)
env.AddMethod(ProcessTest) env.AddMethod(ProcessTest)
env.AddMethod(GetPreExtraScripts) env.AddMethod(GetExtraScripts)
env.AddMethod(GetPostExtraScripts)
return env return env

View File

@ -14,6 +14,7 @@
from __future__ import absolute_import from __future__ import absolute_import
import base64
import sys import sys
from os.path import isdir, isfile, join from os.path import isdir, isfile, join
@ -22,8 +23,10 @@ from SCons.Script import COMMAND_LINE_TARGETS
from platformio import exception, util from platformio import exception, util
from platformio.managers.platform import PlatformFactory from platformio.managers.platform import PlatformFactory
# pylint: disable=too-many-branches
@util.memoized
@util.memoized()
def initPioPlatform(name): def initPioPlatform(name):
return PlatformFactory.newPlatform(name) return PlatformFactory.newPlatform(name)
@ -69,7 +72,7 @@ def LoadPioPlatform(env, variables):
# Add toolchains and uploaders to $PATH # Add toolchains and uploaders to $PATH
for name in installed_packages: for name in installed_packages:
type_ = p.get_package_type(name) type_ = p.get_package_type(name)
if type_ not in ("toolchain", "uploader"): if type_ not in ("toolchain", "uploader", "debugger"):
continue continue
path = p.get_package_dir(name) path = p.get_package_dir(name)
if isdir(join(path, "bin")): if isdir(join(path, "bin")):
@ -81,24 +84,37 @@ def LoadPioPlatform(env, variables):
env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")]) env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")])
if "BOARD" not in env: if "BOARD" not in env:
# handle _MCU and _F_CPU variables for AVR native
for key, value in variables.UnknownVariables().items():
if not key.startswith("BOARD_"):
continue
env.Replace(
**{key.upper().replace("BUILD.", ""): base64.b64decode(value)})
return return
# update board manifest with a custom data
board_config = env.BoardConfig() board_config = env.BoardConfig()
for k in variables.keys(): for key, value in variables.UnknownVariables().items():
if k in env or \ if not key.startswith("BOARD_"):
not any([k.startswith("BOARD_"), k.startswith("UPLOAD_")]):
continue continue
_opt, _val = k.lower().split("_", 1) board_config.update(key.lower()[6:], base64.b64decode(value))
# update default environment variables
for key in variables.keys():
if key in env or \
not any([key.startswith("BOARD_"), key.startswith("UPLOAD_")]):
continue
_opt, _val = key.lower().split("_", 1)
if _opt == "board": if _opt == "board":
_opt = "build" _opt = "build"
if _val in board_config.get(_opt): if _val in board_config.get(_opt):
env.Replace(**{k: board_config.get("%s.%s" % (_opt, _val))}) env.Replace(**{key: board_config.get("%s.%s" % (_opt, _val))})
if "build.ldscript" in board_config: if "build.ldscript" in board_config:
env.Replace(LDSCRIPT_PATH=board_config.get("build.ldscript")) env.Replace(LDSCRIPT_PATH=board_config.get("build.ldscript"))
def PrintConfiguration(env): # pylint: disable=too-many-branches def PrintConfiguration(env):
platform_data = ["PLATFORM: %s >" % env.PioPlatform().title] platform_data = ["PLATFORM: %s >" % env.PioPlatform().title]
system_data = ["SYSTEM:"] system_data = ["SYSTEM:"]
mcu = env.subst("$BOARD_MCU") mcu = env.subst("$BOARD_MCU")

View File

@ -130,10 +130,12 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
if not _is_match_pattern(item['port']): if not _is_match_pattern(item['port']):
continue continue
port = item['port'] port = item['port']
if upload_protocol.startswith("blackmagic") \ if upload_protocol.startswith("blackmagic"):
and "GDB" in item['description']: if "windows" in util.get_systype() and \
return ("\\\\.\\%s" % port if "windows" in util.get_systype() port.startswith("COM") and len(port) > 4:
and port.startswith("COM") and len(port) > 4 else port) port = "\\\\.\\%s" % port
if "GDB" in item['description']:
return port
for hwid in board_hwids: for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "") hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item['hwid']: if hwid_str in item['hwid']:
@ -220,7 +222,7 @@ def PrintUploadInfo(env):
available.extend(env.BoardConfig().get("upload", {}).get( available.extend(env.BoardConfig().get("upload", {}).get(
"protocols", [])) "protocols", []))
if available: if available:
print "AVAILABLE: %s" % ", ".join(sorted(available)) print "AVAILABLE: %s" % ", ".join(sorted(set(available)))
if configured: if configured:
print "CURRENT: upload_protocol = %s" % configured print "CURRENT: upload_protocol = %s" % configured

View File

@ -20,7 +20,7 @@ from glob import glob
from os import sep, walk from os import sep, walk
from os.path import basename, dirname, isdir, join, realpath from os.path import basename, dirname, isdir, join, realpath
from SCons import Action, Builder, Util from SCons import Builder, Util
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild, from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild,
DefaultEnvironment, SConscript) DefaultEnvironment, SConscript)
@ -30,12 +30,11 @@ SRC_HEADER_EXT = ["h", "hpp"]
SRC_C_EXT = ["c", "cc", "cpp"] SRC_C_EXT = ["c", "cc", "cpp"]
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"] SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep] SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep]
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
def scons_patched_match_splitext(path, suffixes=None): def scons_patched_match_splitext(path, suffixes=None):
""" """Patch SCons Builder, append $OBJSUFFIX to the end of each target"""
Patch SCons Builder, append $OBJSUFFIX to the end of each target
"""
tokens = Util.splitext(path) tokens = Util.splitext(path)
if suffixes and tokens[1] and tokens[1] in suffixes: if suffixes and tokens[1] and tokens[1] in suffixes:
return (path, tokens[1]) return (path, tokens[1])
@ -63,8 +62,6 @@ def BuildProgram(env):
# process extra flags from board # process extra flags from board
if "BOARD" in env and "build.extra_flags" in env.BoardConfig(): if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
env.ProcessFlags(env.BoardConfig().get("build.extra_flags")) env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
# remove base flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
# apply user flags # apply user flags
env.ProcessFlags(env.get("BUILD_FLAGS")) env.ProcessFlags(env.get("BUILD_FLAGS"))
@ -74,6 +71,9 @@ def BuildProgram(env):
# restore PIO macros if it was deleted by framework # restore PIO macros if it was deleted by framework
_append_pio_macros() _append_pio_macros()
# remove specified flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
# build dependent libs; place them before built-in libs # build dependent libs; place them before built-in libs
env.Prepend(LIBS=env.BuildProjectLibraries()) env.Prepend(LIBS=env.BuildProjectLibraries())
@ -90,16 +90,14 @@ def BuildProgram(env):
# Handle SRC_BUILD_FLAGS # Handle SRC_BUILD_FLAGS
env.ProcessFlags(env.get("SRC_BUILD_FLAGS")) env.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
env.Append(
LIBPATH=["$BUILD_DIR"],
PIOBUILDFILES=env.CollectBuildFiles(
"$BUILDSRC_DIR",
"$PROJECTSRC_DIR",
src_filter=env.get("SRC_FILTER"),
duplicate=False))
if "__test" in COMMAND_LINE_TARGETS: if "__test" in COMMAND_LINE_TARGETS:
env.Append(PIOBUILDFILES=env.ProcessTest()) env.Append(PIOBUILDFILES=env.ProcessTest())
else:
env.Append(
PIOBUILDFILES=env.CollectBuildFiles(
"$BUILDSRC_DIR",
"$PROJECTSRC_DIR",
src_filter=env.get("SRC_FILTER")))
if not env['PIOBUILDFILES'] and not COMMAND_LINE_TARGETS: if not env['PIOBUILDFILES'] and not COMMAND_LINE_TARGETS:
sys.stderr.write( sys.stderr.write(
@ -110,8 +108,8 @@ def BuildProgram(env):
program = env.Program( program = env.Program(
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES']) join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
checksize_action = Action.Action(env.CheckUploadSize, checksize_action = env.VerboseAction(env.CheckUploadSize,
"Checking program size") "Checking program size")
AlwaysBuild(env.Alias("checkprogsize", program, checksize_action)) AlwaysBuild(env.Alias("checkprogsize", program, checksize_action))
if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS): if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS):
env.AddPostAction(program, checksize_action) env.AddPostAction(program, checksize_action)
@ -119,38 +117,47 @@ def BuildProgram(env):
return program return program
def ProcessFlags(env, flags): # pylint: disable=too-many-branches def ParseFlagsExtended(env, flags):
if not flags:
return
if isinstance(flags, list): if isinstance(flags, list):
flags = " ".join(flags) flags = " ".join(flags)
parsed_flags = env.ParseFlags(str(flags)) result = env.ParseFlags(str(flags))
for flag in parsed_flags.pop("CPPDEFINES"):
if not Util.is_Sequence(flag): cppdefines = []
env.Append(CPPDEFINES=flag) for item in result['CPPDEFINES']:
if not Util.is_Sequence(item):
cppdefines.append(item)
continue continue
_key, _value = flag[:2] name, value = item[:2]
if '\"' in _value: if '\"' in value:
_value = _value.replace('\"', '\\\"') value = value.replace('\"', '\\\"')
elif _value.isdigit(): elif value.isdigit():
_value = int(_value) value = int(value)
elif _value.replace(".", "", 1).isdigit(): elif value.replace(".", "", 1).isdigit():
_value = float(_value) value = float(value)
env.Append(CPPDEFINES=(_key, _value)) cppdefines.append((name, value))
env.Append(**parsed_flags) result['CPPDEFINES'] = cppdefines
# fix relative CPPPATH & LIBPATH # fix relative CPPPATH & LIBPATH
for k in ("CPPPATH", "LIBPATH"): for k in ("CPPPATH", "LIBPATH"):
for i, p in enumerate(env.get(k, [])): for i, p in enumerate(result.get(k, [])):
if isdir(p): if isdir(p):
env[k][i] = realpath(p) result[k][i] = realpath(p)
# fix relative path for "-include" # fix relative path for "-include"
for i, f in enumerate(env.get("CCFLAGS", [])): for i, f in enumerate(result.get("CCFLAGS", [])):
if isinstance(f, tuple) and f[0] == "-include": if isinstance(f, tuple) and f[0] == "-include":
env['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path()))) result['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
return result
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
if not flags:
return
env.Append(**env.ParseFlagsExtended(flags))
# Cancel any previous definition of name, either built in or # Cancel any previous definition of name, either built in or
# provided with a -D option // Issue #191 # provided with a -U option // Issue #191
undefines = [ undefines = [
u for u in env.get("CCFLAGS", []) u for u in env.get("CCFLAGS", [])
if isinstance(u, basestring) and u.startswith("-U") if isinstance(u, basestring) and u.startswith("-U")
@ -164,19 +171,16 @@ def ProcessFlags(env, flags): # pylint: disable=too-many-branches
def ProcessUnFlags(env, flags): def ProcessUnFlags(env, flags):
if not flags: if not flags:
return return
if isinstance(flags, list): for key, unflags in env.ParseFlagsExtended(flags).items():
flags = " ".join(flags) for unflag in unflags:
parsed_flags = env.ParseFlags(str(flags)) for current in env.get(key, []):
all_flags = [] conditions = [
for items in parsed_flags.values(): unflag == current,
all_flags.extend(items) isinstance(current, (tuple, list))
all_flags = set(all_flags) and unflag[0] == current[0]
]
for key in parsed_flags: if any(conditions):
cur_flags = set(env.Flatten(env.get(key, []))) env[key].remove(current)
for item in cur_flags & all_flags:
while item in env[key]:
env[key].remove(item)
def IsFileWithExt(env, file_, ext): # pylint: disable=W0613 def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
@ -190,8 +194,6 @@ def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
def MatchSourceFiles(env, src_dir, src_filter=None): def MatchSourceFiles(env, src_dir, src_filter=None):
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
def _append_build_item(items, item, src_dir): def _append_build_item(items, item, src_dir):
if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT): if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT):
items.add(item.replace(src_dir + sep, "")) items.add(item.replace(src_dir + sep, ""))
@ -281,15 +283,14 @@ def BuildFrameworks(env, frameworks):
def BuildLibrary(env, variant_dir, src_dir, src_filter=None): def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
lib = env.Clone() return env.StaticLibrary(
return lib.StaticLibrary( env.subst(variant_dir),
lib.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter))
lib.CollectBuildFiles(variant_dir, src_dir, src_filter))
def BuildSources(env, variant_dir, src_dir, src_filter=None): def BuildSources(env, variant_dir, src_dir, src_filter=None):
DefaultEnvironment().Append(PIOBUILDFILES=env.Clone().CollectBuildFiles( DefaultEnvironment().Append(
variant_dir, src_dir, src_filter)) PIOBUILDFILES=env.CollectBuildFiles(variant_dir, src_dir, src_filter))
def exists(_): def exists(_):
@ -298,6 +299,7 @@ def exists(_):
def generate(env): def generate(env):
env.AddMethod(BuildProgram) env.AddMethod(BuildProgram)
env.AddMethod(ParseFlagsExtended)
env.AddMethod(ProcessFlags) env.AddMethod(ProcessFlags)
env.AddMethod(ProcessUnFlags) env.AddMethod(ProcessUnFlags)
env.AddMethod(IsFileWithExt) env.AddMethod(IsFileWithExt)

View File

@ -165,8 +165,10 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
kwargs['environment']) kwargs['environment'])
monitor_options = {k: v for k, v in project_options or []} monitor_options = {k: v for k, v in project_options or []}
if monitor_options: if monitor_options:
for k in ("port", "baud", "rts", "dtr"): for k in ("port", "baud", "speed", "rts", "dtr"):
k2 = "monitor_%s" % k k2 = "monitor_%s" % k
if k == "speed":
k = "baud"
if kwargs[k] is None and k2 in monitor_options: if kwargs[k] is None and k2 in monitor_options:
kwargs[k] = monitor_options[k2] kwargs[k] = monitor_options[k2]
if k != "port": if k != "port":

View File

@ -139,15 +139,12 @@ def init_base_project(project_dir):
join(util.get_source_dir(), "projectconftpl.ini"), join(util.get_source_dir(), "projectconftpl.ini"),
join(project_dir, "platformio.ini")) join(project_dir, "platformio.ini"))
lib_dir = join(project_dir, "lib") with util.cd(project_dir):
src_dir = join(project_dir, "src") lib_dir = util.get_projectlib_dir()
config = util.load_project_config(project_dir) src_dir = util.get_projectsrc_dir()
if config.has_option("platformio", "src_dir"): for d in (src_dir, lib_dir):
src_dir = join(project_dir, config.get("platformio", "src_dir")) if not isdir(d):
makedirs(d)
for d in (src_dir, lib_dir):
if not isdir(d):
makedirs(d)
init_lib_readme(lib_dir) init_lib_readme(lib_dir)
init_ci_conf(project_dir) init_ci_conf(project_dir)
@ -168,16 +165,21 @@ The source code of each library should be placed in separate directory, like
For example, see how can be organized `Foo` and `Bar` libraries: For example, see how can be organized `Foo` and `Bar` libraries:
|--lib |--lib
| |
| |--Bar | |--Bar
| | |--docs | | |--docs
| | |--examples | | |--examples
| | |--src | | |--src
| | |- Bar.c | | |- Bar.c
| | |- Bar.h | | |- Bar.h
| | |- library.json (optional, custom build options, etc) http://docs.platformio.org/page/librarymanager/config.html
| |
| |--Foo | |--Foo
| | |- Foo.c | | |- Foo.c
| | |- Foo.h | | |- Foo.h
| |
| |- readme.txt --> THIS FILE | |- readme.txt --> THIS FILE
|
|- platformio.ini |- platformio.ini
|--src |--src
|- main.c |- main.c

View File

@ -255,9 +255,10 @@ def lib_search(query, json_output, page, noninteractive, **filters):
elif not click.confirm("Show next libraries?"): elif not click.confirm("Show next libraries?"):
break break
result = get_api_result( result = get_api_result(
"/v2/lib/search", "/v2/lib/search", {
{"query": " ".join(query), "query": " ".join(query),
"page": int(result['page']) + 1}, "page": int(result['page']) + 1
},
cache_valid="1d") cache_valid="1d")

View File

@ -85,6 +85,7 @@ def _get_installed_platform_data(platform,
homepage=p.homepage, homepage=p.homepage,
repository=p.repository_url, repository=p.repository_url,
url=p.vendor_url, url=p.vendor_url,
docs=p.docs_url,
license=p.license, license=p.license,
forDesktop=not p.is_embedded(), forDesktop=not p.is_embedded(),
frameworks=sorted(p.frameworks.keys() if p.frameworks else []), frameworks=sorted(p.frameworks.keys() if p.frameworks else []),

View File

@ -126,32 +126,31 @@ class EnvironmentProcessor(object):
DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board") DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board")
KNOWN_PLATFORMIO_OPTIONS = ("env_default", "home_dir", "lib_dir", KNOWN_PLATFORMIO_OPTIONS = ("description", "env_default", "home_dir",
"libdeps_dir", "include_dir", "src_dir", "lib_dir", "libdeps_dir", "include_dir",
"build_dir", "data_dir", "test_dir", "src_dir", "build_dir", "data_dir", "test_dir",
"boards_dir", "lib_extra_dirs") "boards_dir", "lib_extra_dirs")
KNOWN_ENV_OPTIONS = ("platform", "framework", "board", "board_mcu", KNOWN_ENV_OPTIONS = ("platform", "framework", "board", "build_flags",
"board_f_cpu", "board_f_flash", "board_flash_mode", "src_build_flags", "build_unflags", "src_filter",
"build_flags", "src_build_flags", "build_unflags", "extra_scripts", "targets", "upload_port",
"src_filter", "extra_scripts", "targets", "upload_protocol", "upload_speed", "upload_flags",
"upload_port", "upload_protocol", "upload_speed", "upload_resetmethod", "lib_deps", "lib_ignore",
"upload_flags", "upload_resetmethod", "lib_deps", "lib_extra_dirs", "lib_ldf_mode", "lib_compat_mode",
"lib_ignore", "lib_extra_dirs", "lib_ldf_mode", "lib_archive", "piotest", "test_transport",
"lib_compat_mode", "lib_archive", "piotest", "test_filter", "test_ignore", "test_port",
"test_transport", "test_filter", "test_ignore", "test_speed", "debug_tool", "debug_port",
"test_port", "test_speed", "debug_tool", "debug_port",
"debug_init_cmds", "debug_extra_cmds", "debug_server", "debug_init_cmds", "debug_extra_cmds", "debug_server",
"debug_init_break", "debug_load_cmd", "debug_init_break", "debug_load_cmd",
"debug_load_mode", "monitor_port", "monitor_baud", "debug_load_mode", "debug_svd_path", "monitor_port",
"monitor_rts", "monitor_dtr") "monitor_speed", "monitor_rts", "monitor_dtr")
IGNORE_BUILD_OPTIONS = ("test_transport", "test_filter", "test_ignore", IGNORE_BUILD_OPTIONS = ("test_transport", "test_filter", "test_ignore",
"test_port", "test_speed", "debug_port", "test_port", "test_speed", "debug_port",
"debug_init_cmds", "debug_extra_cmds", "debug_init_cmds", "debug_extra_cmds",
"debug_server", "debug_init_break", "debug_server", "debug_init_break",
"debug_load_cmd", "debug_load_mode", "debug_load_cmd", "debug_load_mode",
"monitor_port", "monitor_baud", "monitor_rts", "monitor_port", "monitor_speed", "monitor_rts",
"monitor_dtr") "monitor_dtr")
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"} REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
@ -159,7 +158,12 @@ class EnvironmentProcessor(object):
RENAMED_OPTIONS = { RENAMED_OPTIONS = {
"lib_use": "lib_deps", "lib_use": "lib_deps",
"lib_force": "lib_deps", "lib_force": "lib_deps",
"extra_script": "extra_scripts" "extra_script": "extra_scripts",
"monitor_baud": "monitor_speed",
"board_mcu": "board_build.mcu",
"board_f_cpu": "board_build.f_cpu",
"board_f_flash": "board_build.f_flash",
"board_flash_mode": "board_build.flash_mode"
} }
RENAMED_PLATFORMS = {"espressif": "espressif8266"} RENAMED_PLATFORMS = {"espressif": "espressif8266"}
@ -237,7 +241,11 @@ class EnvironmentProcessor(object):
v = self.RENAMED_PLATFORMS[v] v = self.RENAMED_PLATFORMS[v]
# warn about unknown options # warn about unknown options
if k not in self.KNOWN_ENV_OPTIONS and not k.startswith("custom_"): unknown_conditions = [
k not in self.KNOWN_ENV_OPTIONS, not k.startswith("custom_"),
not k.startswith("board_")
]
if all(unknown_conditions):
click.secho( click.secho(
"Detected non-PlatformIO `%s` option in `[env:%s]` section" "Detected non-PlatformIO `%s` option in `[env:%s]` section"
% (k, self.name), % (k, self.name),
@ -411,7 +419,7 @@ def check_project_envs(config, environments=None):
def calculate_project_hash(): def calculate_project_hash():
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S") check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
structure = [__version__] chunks = [__version__]
for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()): for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()):
if not isdir(d): if not isdir(d):
continue continue
@ -419,5 +427,10 @@ def calculate_project_hash():
for f in files: for f in files:
path = join(root, f) path = join(root, f)
if path.endswith(check_suffixes): if path.endswith(check_suffixes):
structure.append(path) chunks.append(path)
return sha1(",".join(sorted(structure))).hexdigest() chunks_to_str = ",".join(sorted(chunks))
if "windows" in util.get_systype():
# Fix issue with useless project rebuilding for case insensitive FS.
# A case of disk drive can differ...
chunks_to_str = chunks_to_str.lower()
return sha1(chunks_to_str).hexdigest()

View File

@ -21,7 +21,7 @@ from time import mktime
import click import click
import requests import requests
from platformio import app, util from platformio import util
from platformio.exception import (FDSHASumMismatch, FDSizeMismatch, from platformio.exception import (FDSHASumMismatch, FDSizeMismatch,
FDUnrecognizedStatusCode) FDUnrecognizedStatusCode)
@ -50,7 +50,6 @@ class FileDownloader(object):
else: else:
self._fname = [p for p in url.split("/") if p][-1] self._fname = [p for p in url.split("/") if p][-1]
self._progressbar = None
self._destination = self._fname self._destination = self._fname
if dest_dir: if dest_dir:
self.set_destination( self.set_destination(
@ -70,12 +69,12 @@ class FileDownloader(object):
return -1 return -1
return int(self._request.headers['content-length']) return int(self._request.headers['content-length'])
def start(self): def start(self, with_progress=True):
label = "Downloading" label = "Downloading"
itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE) itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE)
f = open(self._destination, "wb") f = open(self._destination, "wb")
try: try:
if app.is_disabled_progressbar() or self.get_size() == -1: if not with_progress or self.get_size() == -1:
click.echo("%s..." % label) click.echo("%s..." % label)
for chunk in itercontent: for chunk in itercontent:
if chunk: if chunk:
@ -85,12 +84,6 @@ class FileDownloader(object):
with click.progressbar(length=chunks, label=label) as pb: with click.progressbar(length=chunks, label=label) as pb:
for _ in pb: for _ in pb:
f.write(next(itercontent)) f.write(next(itercontent))
except IOError as e:
click.secho(
"Error: Please read http://bit.ly/package-manager-ioerror",
fg="red",
err=True)
raise e
finally: finally:
f.close() f.close()
self._request.close() self._request.close()
@ -98,6 +91,8 @@ class FileDownloader(object):
if self.get_lmtime(): if self.get_lmtime():
self._preserve_filemtime(self.get_lmtime()) self._preserve_filemtime(self.get_lmtime())
return True
def verify(self, sha1=None): def verify(self, sha1=None):
_dlsize = getsize(self._destination) _dlsize = getsize(self._destination)
if self.get_size() != -1 and _dlsize != self.get_size(): if self.get_size() != -1 and _dlsize != self.get_size():

View File

@ -207,6 +207,11 @@ class InvalidSettingValue(PlatformioException):
MESSAGE = "Invalid value '{0}' for the setting '{1}'" MESSAGE = "Invalid value '{0}' for the setting '{1}'"
class InvalidJSONFile(PlatformioException):
MESSAGE = "Could not load broken JSON: {0}"
class CIBuildEnvsEmpty(PlatformioException): class CIBuildEnvsEmpty(PlatformioException):
MESSAGE = ("Can't find PlatformIO build environments.\n" MESSAGE = ("Can't find PlatformIO build environments.\n"

View File

@ -40,7 +40,7 @@ class ProjectGenerator(object):
return sorted( return sorted(
[d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))]) [d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
@util.memoized @util.memoized()
def get_project_env(self): def get_project_env(self):
data = {} data = {}
config = util.load_project_config(self.project_dir) config = util.load_project_config(self.project_dir)
@ -54,7 +54,6 @@ class ProjectGenerator(object):
data[k] = v data[k] = v
return data return data
@util.memoized
def get_project_build_data(self): def get_project_build_data(self):
data = { data = {
"defines": [], "defines": [],

View File

@ -17,8 +17,8 @@
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/> <stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_REGISTER_GROUPS" value=""/> <stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_REGISTER_GROUPS" value=""/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/> <stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="true"/> <booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/> <stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value=""/>
<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="{{prog_path}}"/> <stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="{{prog_path}}"/>
<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="{{project_name}}"/> <stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="{{project_name}}"/>
<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="false"/> <booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="false"/>

View File

@ -1,8 +1,19 @@
{ {
"!!! WARNING !!!": "PLEASE DO NOT MODIFY THIS FILE! USE http://docs.platformio.org/page/projectconf/section_env_build.html#build-flags",
"configurations": [ "configurations": [
{ {
% import platform % import platform
% from os.path import commonprefix, dirname
%
% systype = platform.system().lower() % systype = platform.system().lower()
%
% cleaned_includes = []
% for include in includes:
% if "toolchain-" not in dirname(commonprefix([include, cc_path])):
% cleaned_includes.append(include)
% end
% end
%
% if systype == "windows": % if systype == "windows":
"name": "Win32", "name": "Win32",
% elif systype == "darwin": % elif systype == "darwin":
@ -11,7 +22,7 @@
"name": "Linux", "name": "Linux",
% end % end
"includePath": [ "includePath": [
% for include in includes: % for include in cleaned_includes:
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}", "{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
% end % end
"" ""
@ -20,7 +31,7 @@
"limitSymbolsToIncludedHeaders": true, "limitSymbolsToIncludedHeaders": true,
"databaseFilename": "${workspaceRoot}/.vscode/.browse.c_cpp.db", "databaseFilename": "${workspaceRoot}/.vscode/.browse.c_cpp.db",
"path": [ "path": [
% for include in includes: % for include in cleaned_includes:
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}", "{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
% end % end
"" ""
@ -32,7 +43,19 @@
% end % end
"" ""
], ],
"intelliSenseMode": "clang-x64" "intelliSenseMode": "clang-x64",
% import re
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
% cc_stds = STD_RE.findall(cc_flags)
% cxx_stds = STD_RE.findall(cxx_flags)
%
% if cc_stds:
"cStandard": "c{{ cc_stds[-1] }}",
% end
% if cxx_stds:
"cppStandard": "c++{{ cxx_stds[-1] }}",
% end
"compilerPath": "{{ cc_path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"') }}"
} }
] ]
} }

View File

@ -0,0 +1,7 @@
{
// See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"platformio.platformio-ide"
]
}

View File

@ -1,17 +1,41 @@
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
// PIO Unified Debugger
//
// Documentation: http://docs.platformio.org/page/plus/debugging.html
// Configuration: http://docs.platformio.org/page/projectconf/section_env_debug.html
% from os.path import dirname, join % from os.path import dirname, join
%
% def _escape_path(path):
% return path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
% end
%
{ {
"version": "0.2.0", "version": "0.2.0",
"configurations": [ "configurations": [
{ {
"type": "gdb", "type": "platformio-debug",
"request": "launch", "request": "launch",
"cwd": "${workspaceRoot}",
"name": "PlatformIO Debugger", "name": "PlatformIO Debugger",
"target": "{{prog_path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}", "executable": "{{ _escape_path(prog_path) }}",
"gdbpath": "{{join(dirname(platformio_path), "piodebuggdb").replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}", "toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
"autorun": [ "source .pioinit" ], % if svd_path:
"svdPath": "{{ _escape_path(svd_path) }}",
% end
"preLaunchTask": "PlatformIO: Pre-Debug", "preLaunchTask": "PlatformIO: Pre-Debug",
"internalConsoleOptions": "openOnSessionStart" "internalConsoleOptions": "openOnSessionStart"
},
{
"type": "platformio-debug",
"request": "launch",
"name": "PlatformIO Debugger (Skip Pre-Debug)",
"executable": "{{ _escape_path(prog_path) }}",
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
% if svd_path:
"svdPath": "{{ _escape_path(svd_path) }}",
% end
"internalConsoleOptions": "openOnSessionStart"
} }
] ]
} }

View File

@ -21,10 +21,10 @@ from platformio import __version__, exception, util
from platformio.managers.package import PackageManager from platformio.managers.package import PackageManager
CORE_PACKAGES = { CORE_PACKAGES = {
"contrib-piohome": ">=0.7.1,<2", "contrib-piohome": ">=0.9.5,<2",
"contrib-pysite": ">=0.1.5,<2", "contrib-pysite": ">=0.2.0,<2",
"tool-pioplus": ">=0.14.5,<2", "tool-pioplus": ">=1.3.1,<2",
"tool-unity": "~1.20302.1", "tool-unity": "~1.20403.0",
"tool-scons": "~2.20501.4" "tool-scons": "~2.20501.4"
} }
@ -69,7 +69,7 @@ class CorePackageManager(PackageManager):
if manifest['name'] not in best_pkg_versions: if manifest['name'] not in best_pkg_versions:
continue continue
if manifest['version'] != best_pkg_versions[manifest['name']]: if manifest['version'] != best_pkg_versions[manifest['name']]:
self.uninstall(manifest['__pkg_dir'], trigger_event=False) self.uninstall(manifest['__pkg_dir'], after_update=True)
self.cache_reset() self.cache_reset()
return True return True

View File

@ -332,7 +332,7 @@ class LibraryManager(BasePkgManager):
name, name,
requirements=None, requirements=None,
silent=False, silent=False,
trigger_event=True, after_update=False,
interactive=False, interactive=False,
force=False): force=False):
_name, _requirements, _url = self.parse_pkg_uri(name, requirements) _name, _requirements, _url = self.parse_pkg_uri(name, requirements)
@ -350,7 +350,7 @@ class LibraryManager(BasePkgManager):
name, name,
requirements, requirements,
silent=silent, silent=silent,
trigger_event=trigger_event, after_update=after_update,
force=force) force=force)
if not pkg_dir: if not pkg_dir:
@ -365,11 +365,20 @@ class LibraryManager(BasePkgManager):
for filters in self.normalize_dependencies(manifest['dependencies']): for filters in self.normalize_dependencies(manifest['dependencies']):
assert "name" in filters assert "name" in filters
# avoid circle dependencies
if not self.INSTALL_HISTORY:
self.INSTALL_HISTORY = []
history_key = str(filters)
if history_key in self.INSTALL_HISTORY:
continue
self.INSTALL_HISTORY.append(history_key)
if any(s in filters.get("version", "") for s in ("\\", "/")): if any(s in filters.get("version", "") for s in ("\\", "/")):
self.install( self.install(
"{name}={version}".format(**filters), "{name}={version}".format(**filters),
silent=silent, silent=silent,
trigger_event=trigger_event, after_update=after_update,
interactive=interactive, interactive=interactive,
force=force) force=force)
else: else:
@ -385,20 +394,20 @@ class LibraryManager(BasePkgManager):
lib_id, lib_id,
filters.get("version"), filters.get("version"),
silent=silent, silent=silent,
trigger_event=trigger_event, after_update=after_update,
interactive=interactive, interactive=interactive,
force=force) force=force)
else: else:
self.install( self.install(
lib_id, lib_id,
silent=silent, silent=silent,
trigger_event=trigger_event, after_update=after_update,
interactive=interactive, interactive=interactive,
force=force) force=force)
return pkg_dir return pkg_dir
@util.memoized @util.memoized()
def get_builtin_libs(storage_names=None): def get_builtin_libs(storage_names=None):
items = [] items = []
storage_names = storage_names or [] storage_names = storage_names or []
@ -417,7 +426,7 @@ def get_builtin_libs(storage_names=None):
return items return items
@util.memoized @util.memoized()
def is_builtin_lib(name): def is_builtin_lib(name):
for storage in get_builtin_libs(): for storage in get_builtin_libs():
if any(l.get("name") == name for l in storage['items']): if any(l.get("name") == name for l in storage['items']):

View File

@ -177,8 +177,25 @@ class PkgInstallerMixin(object):
shutil.copy(cache_path, dst_path) shutil.copy(cache_path, dst_path)
return dst_path return dst_path
fd = FileDownloader(url, dest_dir) with_progress = not app.is_disabled_progressbar()
fd.start() try:
fd = FileDownloader(url, dest_dir)
fd.start(with_progress=with_progress)
except IOError as e:
raise_error = not with_progress
if with_progress:
try:
fd = FileDownloader(url, dest_dir)
fd.start(with_progress=False)
except IOError:
raise_error = True
if raise_error:
click.secho(
"Error: Please read http://bit.ly/package-manager-ioerror",
fg="red",
err=True)
raise e
if sha1: if sha1:
fd.verify(sha1) fd.verify(sha1)
dst_path = fd.get_filepath() dst_path = fd.get_filepath()
@ -194,8 +211,15 @@ class PkgInstallerMixin(object):
@staticmethod @staticmethod
def unpack(source_path, dest_dir): def unpack(source_path, dest_dir):
with FileUnpacker(source_path) as fu: with_progress = not app.is_disabled_progressbar()
return fu.unpack(dest_dir) try:
with FileUnpacker(source_path) as fu:
return fu.unpack(dest_dir, with_progress=with_progress)
except IOError as e:
if not with_progress:
raise e
with FileUnpacker(source_path) as fu:
return fu.unpack(dest_dir, with_progress=False)
@staticmethod @staticmethod
def parse_semver_spec(value, raise_exception=False): def parse_semver_spec(value, raise_exception=False):
@ -478,7 +502,7 @@ class PkgInstallerMixin(object):
target_dirname = "%s@src-%s" % ( target_dirname = "%s@src-%s" % (
pkg_dirname, pkg_dirname,
hashlib.md5(cur_manifest['__src_url']).hexdigest()) hashlib.md5(cur_manifest['__src_url']).hexdigest())
os.rename(pkg_dir, join(self.package_dir, target_dirname)) shutil.move(pkg_dir, join(self.package_dir, target_dirname))
# fix to a version # fix to a version
elif action == 2: elif action == 2:
target_dirname = "%s@%s" % (pkg_dirname, target_dirname = "%s@%s" % (pkg_dirname,
@ -492,7 +516,7 @@ class PkgInstallerMixin(object):
# remove previous/not-satisfied package # remove previous/not-satisfied package
if isdir(pkg_dir): if isdir(pkg_dir):
util.rmtree_(pkg_dir) util.rmtree_(pkg_dir)
os.rename(tmp_dir, pkg_dir) shutil.move(tmp_dir, pkg_dir)
assert isdir(pkg_dir) assert isdir(pkg_dir)
self.cache_reset() self.cache_reset()
return pkg_dir return pkg_dir
@ -633,7 +657,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
name, name,
requirements=None, requirements=None,
silent=False, silent=False,
trigger_event=True, after_update=False,
force=False): force=False):
name, requirements, url = self.parse_pkg_uri(name, requirements) name, requirements, url = self.parse_pkg_uri(name, requirements)
package_dir = self.get_package_dir(name, requirements, url) package_dir = self.get_package_dir(name, requirements, url)
@ -676,7 +700,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
manifest = self.load_manifest(pkg_dir) manifest = self.load_manifest(pkg_dir)
assert manifest assert manifest
if trigger_event: if not after_update:
telemetry.on_event( telemetry.on_event(
category=self.__class__.__name__, category=self.__class__.__name__,
action="Install", action="Install",
@ -690,7 +714,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
return pkg_dir return pkg_dir
def uninstall(self, package, requirements=None, trigger_event=True): def uninstall(self, package, requirements=None, after_update=False):
if isdir(package): if isdir(package):
pkg_dir = package pkg_dir = package
else: else:
@ -716,14 +740,14 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
# unfix package with the same name # unfix package with the same name
pkg_dir = self.get_package_dir(manifest['name']) pkg_dir = self.get_package_dir(manifest['name'])
if pkg_dir and "@" in pkg_dir: if pkg_dir and "@" in pkg_dir:
os.rename(pkg_dir, shutil.move(pkg_dir,
join(self.package_dir, join(self.package_dir,
self.get_install_dirname(manifest))) self.get_install_dirname(manifest)))
self.cache_reset() self.cache_reset()
click.echo("[%s]" % click.style("OK", fg="green")) click.echo("[%s]" % click.style("OK", fg="green"))
if trigger_event: if not after_update:
telemetry.on_event( telemetry.on_event(
category=self.__class__.__name__, category=self.__class__.__name__,
action="Uninstall", action="Uninstall",
@ -769,8 +793,8 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
self._update_src_manifest( self._update_src_manifest(
dict(version=vcs.get_current_revision()), vcs.storage_dir) dict(version=vcs.get_current_revision()), vcs.storage_dir)
else: else:
self.uninstall(pkg_dir, trigger_event=False) self.uninstall(pkg_dir, after_update=True)
self.install(name, latest, trigger_event=False) self.install(name, latest, after_update=True)
telemetry.on_event( telemetry.on_event(
category=self.__class__.__name__, category=self.__class__.__name__,

View File

@ -30,7 +30,7 @@ from platformio.managers.package import BasePkgManager, PackageManager
class PlatformManager(BasePkgManager): class PlatformManager(BasePkgManager):
FILE_CACHE_VALID = None # disable platform caching FILE_CACHE_VALID = None # disable platform download caching
def __init__(self, package_dir=None, repositories=None): def __init__(self, package_dir=None, repositories=None):
if not repositories: if not repositories:
@ -62,7 +62,7 @@ class PlatformManager(BasePkgManager):
with_packages=None, with_packages=None,
without_packages=None, without_packages=None,
skip_default_package=False, skip_default_package=False,
trigger_event=True, after_update=False,
silent=False, silent=False,
force=False, force=False,
**_): # pylint: disable=too-many-arguments, arguments-differ **_): # pylint: disable=too-many-arguments, arguments-differ
@ -70,20 +70,20 @@ class PlatformManager(BasePkgManager):
self, name, requirements, silent=silent, force=force) self, name, requirements, silent=silent, force=force)
p = PlatformFactory.newPlatform(platform_dir) p = PlatformFactory.newPlatform(platform_dir)
# @Hook: when 'update' operation (trigger_event is False), # don't cleanup packages or install them after update
# don't cleanup packages or install them # we check packages for updates in def update()
if not trigger_event: if after_update:
return True return True
p.install_packages( p.install_packages(
with_packages, with_packages,
without_packages, without_packages,
skip_default_package, skip_default_package,
silent=silent, silent=silent,
force=force) force=force)
self.cleanup_packages(p.packages.keys()) return self.cleanup_packages(p.packages.keys())
return True
def uninstall(self, package, requirements=None, trigger_event=True): def uninstall(self, package, requirements=None, after_update=False):
if isdir(package): if isdir(package):
pkg_dir = package pkg_dir = package
else: else:
@ -96,13 +96,12 @@ class PlatformManager(BasePkgManager):
p = PlatformFactory.newPlatform(pkg_dir) p = PlatformFactory.newPlatform(pkg_dir)
BasePkgManager.uninstall(self, pkg_dir, requirements) BasePkgManager.uninstall(self, pkg_dir, requirements)
# @Hook: when 'update' operation (trigger_event is False), # don't cleanup packages or install them after update
# don't cleanup packages or install them # we check packages for updates in def update()
if not trigger_event: if after_update:
return True return True
self.cleanup_packages(p.packages.keys()) return self.cleanup_packages(p.packages.keys())
return True
def update( # pylint: disable=arguments-differ def update( # pylint: disable=arguments-differ
self, self,
@ -154,11 +153,15 @@ class PlatformManager(BasePkgManager):
continue continue
if (manifest['name'] not in deppkgs if (manifest['name'] not in deppkgs
or manifest['version'] not in deppkgs[manifest['name']]): or manifest['version'] not in deppkgs[manifest['name']]):
pm.uninstall(manifest['__pkg_dir'], trigger_event=False) try:
pm.uninstall(manifest['__pkg_dir'], after_update=True)
except exception.UnknownPackage:
pass
self.cache_reset() self.cache_reset()
return True return True
@util.memoized(expire=5000)
def get_installed_boards(self): def get_installed_boards(self):
boards = [] boards = []
for manifest in self.get_installed(): for manifest in self.get_installed():
@ -170,7 +173,7 @@ class PlatformManager(BasePkgManager):
return boards return boards
@staticmethod @staticmethod
@util.memoized @util.memoized()
def get_registered_boards(): def get_registered_boards():
return util.get_api_result("/boards", cache_valid="7d") return util.get_api_result("/boards", cache_valid="7d")
@ -280,21 +283,25 @@ class PlatformPackagesMixin(object):
return True return True
def find_pkg_names(self, items): def find_pkg_names(self, candidates):
result = [] result = []
for item in items: for candidate in candidates:
candidate = item found = False
# lookup by package types # lookup by package types
for _name, _opts in self.packages.items(): for _name, _opts in self.packages.items():
if _opts.get("type") == item: if _opts.get("type") == candidate:
candidate = _name result.append(_name)
found = True
if (self.frameworks and item.startswith("framework-") if (self.frameworks and candidate.startswith("framework-")
and item[10:] in self.frameworks): and candidate[10:] in self.frameworks):
candidate = self.frameworks[item[10:]]['package'] result.append(self.frameworks[candidate[10:]]['package'])
found = True
if not found:
result.append(candidate)
result.append(candidate)
return result return result
def update_packages(self, only_check=False): def update_packages(self, only_check=False):
@ -489,6 +496,10 @@ class PlatformBase( # pylint: disable=too-many-public-methods
def vendor_url(self): def vendor_url(self):
return self._manifest.get("url") return self._manifest.get("url")
@property
def docs_url(self):
return self._manifest.get("docs")
@property @property
def repository_url(self): def repository_url(self):
return self._manifest.get("repository", {}).get("url") return self._manifest.get("repository", {}).get("url")
@ -654,6 +665,15 @@ class PlatformBoardConfig(object):
else: else:
raise KeyError("Invalid board option '%s'" % path) raise KeyError("Invalid board option '%s'" % path)
def update(self, path, value):
newdict = None
for key in path.split(".")[::-1]:
if newdict is None:
newdict = {key: value}
else:
newdict = {key: newdict}
util.merge_dicts(self._manifest, newdict)
def __contains__(self, key): def __contains__(self, key):
try: try:
self.get(key) self.get(key)

View File

@ -16,6 +16,7 @@ import atexit
import platform import platform
import Queue import Queue
import re import re
import sys
import threading import threading
from collections import deque from collections import deque
from os import getenv, sep from os import getenv, sep
@ -152,16 +153,22 @@ class MeasurementProtocol(TelemetryBase):
cmd_path.append(sub_cmd) cmd_path.append(sub_cmd)
self['screen_name'] = " ".join([p.title() for p in cmd_path]) self['screen_name'] = " ".join([p.title() for p in cmd_path])
def send(self, hittype): @staticmethod
def _ignore_hit():
if not app.get_setting("enable_telemetry"): if not app.get_setting("enable_telemetry"):
return True
if app.get_session_var("caller_id") and \
all(c in sys.argv for c in ("run", "idedata")):
return True
return False
def send(self, hittype):
if self._ignore_hit():
return return
self['t'] = hittype self['t'] = hittype
# correct queue time # correct queue time
if "qt" in self._params and isinstance(self['qt'], float): if "qt" in self._params and isinstance(self['qt'], float):
self['qt'] = int((time() - self['qt']) * 1000) self['qt'] = int((time() - self['qt']) * 1000)
MPDataPusher().push(self._params) MPDataPusher().push(self._params)

View File

@ -20,7 +20,7 @@ from zipfile import ZipFile
import click import click
from platformio import app, util from platformio import util
from platformio.exception import UnsupportedArchiveType from platformio.exception import UnsupportedArchiveType
@ -96,9 +96,9 @@ class FileUnpacker(object):
if self._unpacker: if self._unpacker:
self._unpacker.close() self._unpacker.close()
def unpack(self, dest_dir="."): def unpack(self, dest_dir=".", with_progress=True):
assert self._unpacker assert self._unpacker
if app.is_disabled_progressbar(): if not with_progress:
click.echo("Unpacking...") click.echo("Unpacking...")
for item in self._unpacker.get_items(): for item in self._unpacker.get_items():
self._unpacker.extract_item(item, dest_dir) self._unpacker.extract_item(item, dest_dir)

View File

@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import collections
import functools
import json import json
import os import os
import platform import platform
@ -113,40 +111,23 @@ class cd(object):
class memoized(object): class memoized(object):
'''
Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
'''
def __init__(self, func): def __init__(self, expire=0):
self.func = func self.expire = expire / 1000 # milliseconds
self.cache = {} self.cache = {}
def __call__(self, *args): def __call__(self, func):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self): @wraps(func)
'''Return the function's docstring.''' def wrapper(*args, **kwargs):
return self.func.__doc__ key = str(args) + str(kwargs)
if (key not in self.cache
or (self.expire > 0
and self.cache[key][0] < time.time() - self.expire)):
self.cache[key] = (time.time(), func(*args, **kwargs))
return self.cache[key][1]
def __get__(self, obj, objtype): return wrapper
'''Support instance methods.'''
fn = functools.partial(self.__call__, obj)
fn.reset = self._reset
return fn
def _reset(self):
self.cache = {}
class throttle(object): class throttle(object):
@ -155,15 +136,15 @@ class throttle(object):
self.threshhold = threshhold # milliseconds self.threshhold = threshhold # milliseconds
self.last = 0 self.last = 0
def __call__(self, fn): def __call__(self, func):
@wraps(fn) @wraps(func)
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
diff = int(round((time.time() - self.last) * 1000)) diff = int(round((time.time() - self.last) * 1000))
if diff < self.threshhold: if diff < self.threshhold:
time.sleep((self.threshhold - diff) * 0.001) time.sleep((self.threshhold - diff) * 0.001)
self.last = time.time() self.last = time.time()
return fn(*args, **kwargs) return func(*args, **kwargs)
return wrapper return wrapper
@ -189,8 +170,7 @@ def load_json(file_path):
with open(file_path, "r") as f: with open(file_path, "r") as f:
return json.load(f) return json.load(f)
except ValueError: except ValueError:
raise exception.PlatformioException( raise exception.InvalidJSONFile(file_path)
"Could not load broken JSON: %s" % file_path)
def get_systype(): def get_systype():
@ -548,6 +528,14 @@ def get_mdns_services():
with mDNSListener() as mdns: with mDNSListener() as mdns:
time.sleep(3) time.sleep(3)
for service in mdns.get_services(): for service in mdns.get_services():
properties = None
try:
if service.properties:
json.dumps(service.properties)
properties = service.properties
except UnicodeDecodeError:
pass
items.append({ items.append({
"type": "type":
service.type, service.type,
@ -558,7 +546,7 @@ def get_mdns_services():
"port": "port":
service.port, service.port,
"properties": "properties":
service.properties properties
}) })
return items return items
@ -568,7 +556,7 @@ def get_request_defheaders():
return {"User-Agent": "PlatformIO/%s CI/%d %s" % data} return {"User-Agent": "PlatformIO/%s CI/%d %s" % data}
@memoized @memoized(expire=10000)
def _api_request_session(): def _api_request_session():
return requests.Session() return requests.Session()
@ -609,6 +597,7 @@ def _get_api_result(
verify=verify_ssl) verify=verify_ssl)
result = r.json() result = r.json()
r.raise_for_status() r.raise_for_status()
return r.text
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
if result and "message" in result: if result and "message" in result:
raise exception.APIRequestError(result['message']) raise exception.APIRequestError(result['message'])
@ -622,7 +611,7 @@ def _get_api_result(
finally: finally:
if r: if r:
r.close() r.close()
return result return None
def get_api_result(url, params=None, data=None, auth=None, cache_valid=None): def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
@ -637,7 +626,7 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
if cache_key: if cache_key:
result = cc.get(cache_key) result = cc.get(cache_key)
if result is not None: if result is not None:
return result return json.loads(result)
# check internet before and resolve issue with 60 seconds timeout # check internet before and resolve issue with 60 seconds timeout
internet_on(raise_exception=True) internet_on(raise_exception=True)
@ -646,7 +635,7 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
if cache_valid: if cache_valid:
with ContentCache() as cc: with ContentCache() as cc:
cc.set(cache_key, result, cache_valid) cc.set(cache_key, result, cache_valid)
return result return json.loads(result)
except (requests.exceptions.ConnectionError, except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout) as e: requests.exceptions.Timeout) as e:
from platformio.maintenance import in_silence from platformio.maintenance import in_silence
@ -670,7 +659,7 @@ PING_INTERNET_IPS = [
] ]
@memoized @memoized(expire=5000)
def _internet_on(): def _internet_on():
timeout = 2 timeout = 2
socket.setdefaulttimeout(timeout) socket.setdefaulttimeout(timeout)
@ -765,6 +754,18 @@ def format_filesize(filesize):
return "%d%sB" % ((base * filesize / unit), suffix) return "%d%sB" % ((base * filesize / unit), suffix)
def merge_dicts(d1, d2, path=None):
if path is None:
path = []
for key in d2:
if (key in d1 and isinstance(d1[key], dict)
and isinstance(d2[key], dict)):
merge_dicts(d1[key], d2[key], path + [str(key)])
else:
d1[key] = d2[key]
return d1
def rmtree_(path): def rmtree_(path):
def _onerror(_, name, __): def _onerror(_, name, __):

View File

@ -302,21 +302,31 @@ Stable and upstream versions
You can switch between `stable releases <https://github.com/platformio/platform-{name}/releases>`__ You can switch between `stable releases <https://github.com/platformio/platform-{name}/releases>`__
of {title} development platform and the latest upstream version using of {title} development platform and the latest upstream version using
:ref:`projectconf_env_platform` option as described below: :ref:`projectconf_env_platform` option in :ref:`projectconf` as described below.
Stable
~~~~~~
.. code-block:: ini .. code-block:: ini
; Custom stable version ; Latest stable version
[env:stable] [env:latest_stable]
platform ={name}@x.y.z platform = {name}
board = ... board = ...
...
; The latest upstream/development version ; Custom stable version
[env:upstream] [env:custom_stable]
platform = {name}@x.y.z
board = ...
Upstream
~~~~~~~~
.. code-block:: ini
[env:upstream_develop]
platform = https://github.com/platformio/platform-{name}.git platform = https://github.com/platformio/platform-{name}.git
board = ... board = ...
...
""".format(name=p.name, title=p.title)) """.format(name=p.name, title=p.title))
# #

View File

@ -62,7 +62,7 @@ def test_global_install_archive(clirunner, validate_cliresult,
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip", "https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2", "https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
"http://dl.platformio.org/libraries/archives/0/9540.tar.gz", "http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
"https://github.com/adafruit/Adafruit-ST7735-Library/archive/master.zip" "https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip"
]) ])
validate_cliresult(result) validate_cliresult(result)
@ -76,7 +76,7 @@ def test_global_install_archive(clirunner, validate_cliresult,
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()] items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [ items2 = [
"RadioHead-1.62", "ArduinoJson", "DallasTemperature_ID54", "RadioHead-1.62", "ArduinoJson", "DallasTemperature_ID54",
"OneWire_ID1", "Adafruit ST7735 Library" "OneWire_ID1", "ESP32WebServer"
] ]
assert set(items1) >= set(items2) assert set(items1) >= set(items2)
@ -142,7 +142,7 @@ def test_global_lib_list(clirunner, validate_cliresult):
validate_cliresult(result) validate_cliresult(result)
assert all([ assert all([
n in result.output for n in n in result.output for n in
("Source: https://github.com/adafruit/Adafruit-ST7735-Library/archive/master.zip", ("Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
"Version: 5.10.1", "Version: 5.10.1",
"Source: git+https://github.com/gioblu/PJON.git#3.0", "Source: git+https://github.com/gioblu/PJON.git#3.0",
"Version: 1fb26fd", "RadioHead-1.62") "Version: 1fb26fd", "RadioHead-1.62")
@ -157,7 +157,7 @@ def test_global_lib_list(clirunner, validate_cliresult):
]) ])
items1 = [i['name'] for i in json.loads(result.output)] items1 = [i['name'] for i in json.loads(result.output)]
items2 = [ items2 = [
"Adafruit ST7735 Library", "ArduinoJson", "ArduinoJson", "ArduinoJson", "ESP32WebServer", "ArduinoJson", "ArduinoJson", "ArduinoJson",
"ArduinoJson", "AsyncMqttClient", "AsyncTCP", "DallasTemperature", "ArduinoJson", "AsyncMqttClient", "AsyncTCP", "DallasTemperature",
"ESPAsyncTCP", "NeoPixelBus", "OneWire", "PJON", "PJON", "ESPAsyncTCP", "NeoPixelBus", "OneWire", "PJON", "PJON",
"PubSubClient", "RFcontrol", "RadioHead-1.62", "platformio-libmirror", "PubSubClient", "RFcontrol", "RadioHead-1.62", "platformio-libmirror",
@ -221,9 +221,9 @@ def test_global_lib_uninstall(clirunner, validate_cliresult,
validate_cliresult(result) validate_cliresult(result)
items = json.loads(result.output) items = json.loads(result.output)
result = clirunner.invoke(cmd_lib, result = clirunner.invoke(cmd_lib,
["-g", "uninstall", items[0]['__pkg_dir']]) ["-g", "uninstall", items[5]['__pkg_dir']])
validate_cliresult(result) validate_cliresult(result)
assert "Uninstalling Adafruit ST7735 Library" in result.output assert "Uninstalling AsyncTCP" in result.output
# uninstall the rest libraries # uninstall the rest libraries
result = clirunner.invoke(cmd_lib, [ result = clirunner.invoke(cmd_lib, [
@ -238,7 +238,7 @@ def test_global_lib_uninstall(clirunner, validate_cliresult,
"PubSubClient", "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81", "PubSubClient", "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
"ESPAsyncTCP_ID305", "DallasTemperature_ID54", "NeoPixelBus_ID547", "ESPAsyncTCP_ID305", "DallasTemperature_ID54", "NeoPixelBus_ID547",
"PJON", "AsyncMqttClient_ID346", "ArduinoJson_ID64", "PJON", "AsyncMqttClient_ID346", "ArduinoJson_ID64",
"PJON@src-79de467ebe19de18287becff0a1fb42d", "AsyncTCP_ID1826" "PJON@src-79de467ebe19de18287becff0a1fb42d", "ESP32WebServer"
] ]
assert set(items1) == set(items2) assert set(items1) == set(items2)

View File

@ -61,13 +61,14 @@ def test_install_known_version(clirunner, validate_cliresult,
assert len(isolated_pio_home.join("packages").listdir()) == 1 assert len(isolated_pio_home.join("packages").listdir()) == 1
def test_install_from_vcs(clirunner, validate_cliresult): def test_install_from_vcs(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cli_platform.platform_install, [ result = clirunner.invoke(cli_platform.platform_install, [
"https://github.com/platformio/" "https://github.com/platformio/"
"platform-espressif8266.git#feature/stage", "--skip-default-package" "platform-espressif8266.git#feature/stage", "--skip-default-package"
]) ])
validate_cliresult(result) validate_cliresult(result)
assert "espressif8266" in result.output assert "espressif8266" in result.output
assert len(isolated_pio_home.join("packages").listdir()) == 1
def test_list_json_output(clirunner, validate_cliresult): def test_list_json_output(clirunner, validate_cliresult):

96
tests/test_builder.py Normal file
View File

@ -0,0 +1,96 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.run import cli as cmd_run
def test_build_flags(clirunner, validate_cliresult, tmpdir):
build_flags = [("-D TEST_INT=13", "-DTEST_INT=13"),
("-DTEST_SINGLE_MACRO", "-DTEST_SINGLE_MACRO"),
('-DTEST_STR_SPACE="Andrew Smith"',
'"-DTEST_STR_SPACE=Andrew Smith"')]
tmpdir.join("platformio.ini").write("""
[env:native]
platform = native
extra_scripts = extra.py
build_flags = %s
""" % " ".join([f[0] for f in build_flags]))
tmpdir.join("extra.py").write("""
Import("env")
env.Append(CPPDEFINES="POST_SCRIPT_MACRO")
""")
tmpdir.mkdir("src").join("main.cpp").write("""
#if !defined(TEST_INT) || TEST_INT != 13
#error "TEST_INT"
#endif
#ifndef TEST_STR_SPACE
#error "TEST_STR_SPACE"
#endif
#ifndef POST_SCRIPT_MACRO
#error "POST_SCRIPT_MACRO"
#endif
int main() {
}
""")
result = clirunner.invoke(
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
validate_cliresult(result)
build_output = result.output[result.output.find(
"Scanning dependencies..."):]
for flag in build_flags:
assert flag[1] in build_output, flag
def test_build_unflags(clirunner, validate_cliresult, tmpdir):
tmpdir.join("platformio.ini").write("""
[env:native]
platform = native
build_unflags = -DTMP_MACRO1=45 -I. -DNON_EXISTING_MACRO -lunknownLib -Os
extra_scripts = pre:extra.py
""")
tmpdir.join("extra.py").write("""
Import("env")
env.Append(CPPPATH="%s")
env.Append(CPPDEFINES="TMP_MACRO1")
env.Append(CPPDEFINES=["TMP_MACRO2"])
env.Append(CPPDEFINES=("TMP_MACRO3", 13))
env.Append(CCFLAGS=["-Os"])
env.Append(LIBS=["unknownLib"])
""" % str(tmpdir))
tmpdir.mkdir("src").join("main.c").write("""
#ifdef TMP_MACRO1
#error "TMP_MACRO1 should be removed"
#endif
int main() {
}
""")
result = clirunner.invoke(
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
validate_cliresult(result)
build_output = result.output[result.output.find(
"Scanning dependencies..."):]
assert "-DTMP_MACRO1" not in build_output
assert "-Os" not in build_output
assert str(tmpdir) not in build_output

View File

@ -16,11 +16,11 @@ import pytest
import requests import requests
def validate_response(req): def validate_response(r):
assert req.status_code == 200 assert r.status_code == 200, r.url
assert int(req.headers['Content-Length']) > 0 assert int(r.headers['Content-Length']) > 0, r.url
assert req.headers['Content-Type'] in ("application/gzip", assert r.headers['Content-Type'] in ("application/gzip",
"application/octet-stream") "application/octet-stream")
def test_packages(): def test_packages():