Introduce Black to automate code formatting

This commit is contained in:
Ivan Kravets
2019-09-23 23:13:48 +03:00
parent 5e144a2c98
commit 7c41c7c2f3
90 changed files with 4064 additions and 3367 deletions

View File

@ -1,3 +1,3 @@
[settings]
line_length=79
line_length=88
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial,twisted,autobahn,jsonrpc,tabulate

View File

@ -1,5 +1,7 @@
[MESSAGES CONTROL]
disable=
bad-continuation,
bad-whitespace,
missing-docstring,
ungrouped-imports,
invalid-name,

View File

@ -5,13 +5,14 @@ isort:
isort -rc ./platformio
isort -rc ./tests
yapf:
yapf --recursive --in-place platformio/
black:
black --target-version py27 ./platformio
black --target-version py27 ./tests
test:
py.test --verbose --capture=no --exitfirst -n 3 --dist=loadscope tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
before-commit: isort yapf lint test
before-commit: isort black lint test
clean-docs:
rm -rf docs/_build

View File

@ -21,7 +21,8 @@ __description__ = (
"Cross-platform IDE and unified debugger. "
"Remote unit testing and firmware updates. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO"

View File

@ -23,27 +23,31 @@ from platformio.commands import PlatformioCLI
from platformio.compat import CYGWIN
@click.command(cls=PlatformioCLI,
context_settings=dict(help_option_names=["-h", "--help"]))
@click.command(
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
)
@click.version_option(__version__, prog_name="PlatformIO")
@click.option("--force", "-f", is_flag=True, help="DEPRECATE")
@click.option("--caller", "-c", help="Caller ID (service)")
@click.option("--no-ansi",
is_flag=True,
help="Do not print ANSI control characters")
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
@click.pass_context
def cli(ctx, force, caller, no_ansi):
try:
if (no_ansi or str(
os.getenv(
"PLATFORMIO_NO_ANSI",
os.getenv("PLATFORMIO_DISABLE_COLOR"))).lower() == "true"):
if (
no_ansi
or str(
os.getenv("PLATFORMIO_NO_ANSI", os.getenv("PLATFORMIO_DISABLE_COLOR"))
).lower()
== "true"
):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: False
elif str(
os.getenv(
"PLATFORMIO_FORCE_ANSI",
os.getenv("PLATFORMIO_FORCE_COLOR"))).lower() == "true":
elif (
str(
os.getenv("PLATFORMIO_FORCE_ANSI", os.getenv("PLATFORMIO_FORCE_COLOR"))
).lower()
== "true"
):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
except: # pylint: disable=bare-except
@ -67,6 +71,7 @@ def configure():
# /en/latest/security.html#insecureplatformwarning
try:
import urllib3
urllib3.disable_warnings()
except (AttributeError, ImportError):
pass
@ -79,7 +84,8 @@ def configure():
click_echo_origin[origin](*args, **kwargs)
except IOError:
(sys.stderr.write if kwargs.get("err") else sys.stdout.write)(
"%s\n" % (args[0] if args else ""))
"%s\n" % (args[0] if args else "")
)
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)

View File

@ -23,11 +23,9 @@ from time import time
import requests
from platformio import exception, fs, lockfile
from platformio.compat import (WINDOWS, dump_json_to_unicode,
hashlib_encode_data)
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
from platformio.proc import is_ci
from platformio.project.helpers import (get_project_cache_dir,
get_project_core_dir)
from platformio.project.helpers import get_project_cache_dir, get_project_core_dir
def get_default_projects_dir():
@ -35,6 +33,7 @@ def get_default_projects_dir():
try:
assert WINDOWS
import ctypes.wintypes
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
ctypes.windll.shell32.SHGetFolderPathW(None, 5, None, 0, buf)
docs_dir = buf.value
@ -51,45 +50,41 @@ def projects_dir_validate(projects_dir):
DEFAULT_SETTINGS = {
"auto_update_libraries": {
"description": "Automatically update libraries (Yes/No)",
"value": False
"value": False,
},
"auto_update_platforms": {
"description": "Automatically update platforms (Yes/No)",
"value": False
"value": False,
},
"check_libraries_interval": {
"description": "Check for the library updates interval (days)",
"value": 7
"value": 7,
},
"check_platformio_interval": {
"description": "Check for the new PlatformIO interval (days)",
"value": 3
"value": 3,
},
"check_platforms_interval": {
"description": "Check for the platform updates interval (days)",
"value": 7
"value": 7,
},
"enable_cache": {
"description": "Enable caching for API requests and Library Manager",
"value": True
},
"strict_ssl": {
"description": "Strict SSL for PlatformIO Services",
"value": False
"value": True,
},
"strict_ssl": {"description": "Strict SSL for PlatformIO Services", "value": False},
"enable_telemetry": {
"description":
("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
"value": True
"description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
"value": True,
},
"force_verbose": {
"description": "Force verbose output when processing environments",
"value": False
"value": False,
},
"projects_dir": {
"description": "Default location for PlatformIO projects (PIO Home)",
"value": get_default_projects_dir(),
"validator": projects_dir_validate
"validator": projects_dir_validate,
},
}
@ -97,7 +92,6 @@ SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
class State(object):
def __init__(self, path=None, lock=False):
self.path = path
self.lock = lock
@ -113,8 +107,12 @@ class State(object):
if isfile(self.path):
self._storage = fs.load_json(self.path)
assert isinstance(self._storage, dict)
except (AssertionError, ValueError, UnicodeDecodeError,
exception.InvalidJSONFile):
except (
AssertionError,
ValueError,
UnicodeDecodeError,
exception.InvalidJSONFile,
):
self._storage = {}
return self
@ -174,7 +172,6 @@ class State(object):
class ContentCache(object):
def __init__(self, cache_dir=None):
self.cache_dir = None
self._db_path = None
@ -277,8 +274,11 @@ class ContentCache(object):
continue
expire, path = line.split("=")
try:
if time() < int(expire) and isfile(path) and \
path not in paths_for_delete:
if (
time() < int(expire)
and isfile(path)
and path not in paths_for_delete
):
newlines.append(line)
continue
except ValueError:
@ -317,11 +317,11 @@ def sanitize_setting(name, value):
defdata = DEFAULT_SETTINGS[name]
try:
if "validator" in defdata:
value = defdata['validator'](value)
elif isinstance(defdata['value'], bool):
value = defdata["validator"](value)
elif isinstance(defdata["value"], bool):
if not isinstance(value, bool):
value = str(value).lower() in ("true", "yes", "y", "1")
elif isinstance(defdata['value'], int):
elif isinstance(defdata["value"], int):
value = int(value)
except Exception:
raise exception.InvalidSettingValue(value, name)
@ -351,24 +351,24 @@ def get_setting(name):
return sanitize_setting(name, getenv(_env_name))
with State() as state:
if "settings" in state and name in state['settings']:
return state['settings'][name]
if "settings" in state and name in state["settings"]:
return state["settings"][name]
return DEFAULT_SETTINGS[name]['value']
return DEFAULT_SETTINGS[name]["value"]
def set_setting(name, value):
with State(lock=True) as state:
if "settings" not in state:
state['settings'] = {}
state['settings'][name] = sanitize_setting(name, value)
state["settings"] = {}
state["settings"][name] = sanitize_setting(name, value)
state.modified = True
def reset_settings():
with State(lock=True) as state:
if "settings" in state:
del state['settings']
del state["settings"]
def get_session_var(name, default=None):
@ -381,11 +381,13 @@ def set_session_var(name, value):
def is_disabled_progressbar():
return any([
get_session_var("force_option"),
is_ci(),
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
])
return any(
[
get_session_var("force_option"),
is_ci(),
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
]
)
def get_cid():
@ -397,9 +399,16 @@ def get_cid():
uid = getenv("C9_UID")
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
try:
uid = requests.get("{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"))).json().get("id")
uid = (
requests.get(
"{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"),
)
)
.json()
.get("id")
)
except: # pylint: disable=bare-except
pass
if not uid:

View File

@ -43,17 +43,27 @@ clivars.AddVariables(
("PROJECT_CONFIG",),
("PIOENV",),
("PIOTEST_RUNNING_NAME",),
("UPLOAD_PORT",)
("UPLOAD_PORT",),
) # yapf: disable
DEFAULT_ENV_OPTIONS = dict(
tools=[
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
"pioproject", "piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
"ar",
"gas",
"gcc",
"g++",
"gnulink",
"platformio",
"pioplatform",
"pioproject",
"piowinhooks",
"piolib",
"pioupload",
"piomisc",
"pioide",
],
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
variables=clivars,
# Propagating External Environment
ENV=environ,
UNIX_TIME=int(time()),
@ -75,16 +85,17 @@ DEFAULT_ENV_OPTIONS = dict(
LIBSOURCE_DIRS=[
project_helpers.get_project_lib_dir(),
join("$PROJECTLIBDEPS_DIR", "$PIOENV"),
project_helpers.get_project_global_lib_dir()
project_helpers.get_project_global_lib_dir(),
],
PROGNAME="program",
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PYTHONEXE=get_pythonexe_path())
PYTHONEXE=get_pythonexe_path(),
)
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
DEFAULT_ENV_OPTIONS['ARCOMSTR'] = "Archiving $TARGET"
DEFAULT_ENV_OPTIONS['LINKCOMSTR'] = "Linking $TARGET"
DEFAULT_ENV_OPTIONS['RANLIBCOMSTR'] = "Indexing $TARGET"
DEFAULT_ENV_OPTIONS["ARCOMSTR"] = "Archiving $TARGET"
DEFAULT_ENV_OPTIONS["LINKCOMSTR"] = "Linking $TARGET"
DEFAULT_ENV_OPTIONS["RANLIBCOMSTR"] = "Indexing $TARGET"
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
@ -94,8 +105,10 @@ env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
env.Replace(
**{
key: PlatformBase.decode_scons_arg(env[key])
for key in list(clivars.keys()) if key in env
})
for key in list(clivars.keys())
if key in env
}
)
if env.subst("$BUILDCACHE_DIR"):
if not isdir(env.subst("$BUILDCACHE_DIR")):
@ -106,18 +119,17 @@ if int(ARGUMENTS.get("ISATTY", 0)):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
if env.GetOption('clean'):
if env.GetOption("clean"):
env.PioClean(env.subst("$BUILD_DIR"))
env.Exit(0)
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
print("Verbose mode can be enabled via `-v, --verbose` option")
print ("Verbose mode can be enabled via `-v, --verbose` option")
env.LoadProjectOptions()
env.LoadPioPlatform()
env.SConscriptChdir(0)
env.SConsignFile(
join("$BUILD_DIR", ".sconsign.dblite" if PY2 else ".sconsign3.dblite"))
env.SConsignFile(join("$BUILD_DIR", ".sconsign.dblite" if PY2 else ".sconsign3.dblite"))
for item in env.GetExtraScripts("pre"):
env.SConscript(item, exports="env")
@ -144,10 +156,13 @@ if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
Default("checkprogsize")
# Print configured protocols
env.AddPreAction(["upload", "program"],
env.VerboseAction(
lambda source, target, env: env.PrintUploadInfo(),
"Configuring upload protocol..."))
env.AddPreAction(
["upload", "program"],
env.VerboseAction(
lambda source, target, env: env.PrintUploadInfo(),
"Configuring upload protocol...",
),
)
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
@ -155,12 +170,15 @@ AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
##############################################################################
if "envdump" in COMMAND_LINE_TARGETS:
print(env.Dump())
print (env.Dump())
env.Exit(0)
if "idedata" in COMMAND_LINE_TARGETS:
Import("projenv")
print("\n%s\n" % dump_json_to_unicode(
env.DumpIDEData(projenv) # pylint: disable=undefined-variable
))
print (
"\n%s\n"
% dump_json_to_unicode(
env.DumpIDEData(projenv) # pylint: disable=undefined-variable
)
)
env.Exit(0)

View File

@ -45,7 +45,7 @@ def _dump_includes(env, projenv):
join(toolchain_dir, "*", "include*"),
join(toolchain_dir, "*", "include", "c++", "*"),
join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
join(toolchain_dir, "lib", "gcc", "*", "*", "include*")
join(toolchain_dir, "lib", "gcc", "*", "*", "include*"),
]
for g in toolchain_incglobs:
includes.extend(glob(g))
@ -54,9 +54,7 @@ def _dump_includes(env, projenv):
if unity_dir:
includes.append(unity_dir)
includes.extend(
[env.subst("$PROJECTINCLUDE_DIR"),
env.subst("$PROJECTSRC_DIR")])
includes.extend([env.subst("$PROJECTINCLUDE_DIR"), env.subst("$PROJECTSRC_DIR")])
# remove duplicates
result = []
@ -71,15 +69,15 @@ def _get_gcc_defines(env):
items = []
try:
sysenv = environ.copy()
sysenv['PATH'] = str(env['ENV']['PATH'])
result = exec_command("echo | %s -dM -E -" % env.subst("$CC"),
env=sysenv,
shell=True)
sysenv["PATH"] = str(env["ENV"]["PATH"])
result = exec_command(
"echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True
)
except OSError:
return items
if result['returncode'] != 0:
if result["returncode"] != 0:
return items
for line in result['out'].split("\n"):
for line in result["out"].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
@ -94,17 +92,22 @@ def _dump_defines(env):
defines = []
# global symbols
for item in processDefines(env.get("CPPDEFINES", [])):
defines.append(env.subst(item).replace('\\', ''))
defines.append(env.subst(item).replace("\\", ""))
# special symbol for Atmel AVR MCU
if env['PIOPLATFORM'] == "atmelavr":
if env["PIOPLATFORM"] == "atmelavr":
board_mcu = env.get("BOARD_MCU")
if not board_mcu and "BOARD" in env:
board_mcu = env.BoardConfig().get("build.mcu")
if board_mcu:
defines.append(
str("__AVR_%s__" % board_mcu.upper().replace(
"ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
str(
"__AVR_%s__"
% board_mcu.upper()
.replace("ATMEGA", "ATmega")
.replace("ATTINY", "ATtiny")
)
)
# built-in GCC marcos
# if env.GetCompilerType() == "gcc":
@ -140,33 +143,22 @@ def DumpIDEData(env, projenv):
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS"
data = {
"env_name":
env['PIOENV'],
"env_name": env["PIOENV"],
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
"defines":
_dump_defines(env),
"includes":
_dump_includes(env, projenv),
"cc_flags":
env.subst(LINTCCOM),
"cxx_flags":
env.subst(LINTCXXCOM),
"cc_path":
where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
"cxx_path":
where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
"gdb_path":
where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path":
env.subst("$PROG_PATH"),
"flash_extra_images": [{
"offset": item[0],
"path": env.subst(item[1])
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
"svd_path":
_get_svd_path(env),
"compiler_type":
env.GetCompilerType()
"defines": _dump_defines(env),
"includes": _dump_includes(env, projenv),
"cc_flags": env.subst(LINTCCOM),
"cxx_flags": env.subst(LINTCXXCOM),
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path": env.subst("$PROG_PATH"),
"flash_extra_images": [
{"offset": item[0], "path": env.subst(item[1])}
for item in env.get("FLASH_EXTRA_IMAGES", [])
],
"svd_path": _get_svd_path(env),
"compiler_type": env.GetCompilerType(),
}
env_ = env.Clone()
@ -180,10 +172,7 @@ def DumpIDEData(env, projenv):
_new_defines.append(item)
env_.Replace(CPPDEFINES=_new_defines)
data.update({
"cc_flags": env_.subst(LINTCCOM),
"cxx_flags": env_.subst(LINTCXXCOM)
})
data.update({"cc_flags": env_.subst(LINTCCOM), "cxx_flags": env_.subst(LINTCXXCOM)})
return data

View File

@ -22,8 +22,16 @@ import hashlib
import os
import re
import sys
from os.path import (basename, commonprefix, expanduser, isdir, isfile, join,
realpath, sep)
from os.path import (
basename,
commonprefix,
expanduser,
isdir,
isfile,
join,
realpath,
sep,
)
import click
import SCons.Scanner # pylint: disable=import-error
@ -33,13 +41,16 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from platformio import exception, fs, util
from platformio.builder.tools import platformio as piotool
from platformio.compat import (WINDOWS, get_file_contents, hashlib_encode_data,
string_types)
from platformio.compat import (
WINDOWS,
get_file_contents,
hashlib_encode_data,
string_types,
)
from platformio.managers.lib import LibraryManager
class LibBuilderFactory(object):
@staticmethod
def new(env, path, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
clsname = "UnknownLibBuilder"
@ -47,31 +58,30 @@ class LibBuilderFactory(object):
clsname = "PlatformIOLibBuilder"
else:
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
common_frameworks = (set(env.get("PIOFRAMEWORK", []))
& set(used_frameworks))
common_frameworks = set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks)
if common_frameworks:
clsname = "%sLibBuilder" % list(common_frameworks)[0].title()
elif used_frameworks:
clsname = "%sLibBuilder" % used_frameworks[0].title()
obj = getattr(sys.modules[__name__], clsname)(env,
path,
verbose=verbose)
obj = getattr(sys.modules[__name__], clsname)(env, path, verbose=verbose)
assert isinstance(obj, LibBuilderBase)
return obj
@staticmethod
def get_used_frameworks(env, path):
if any(
isfile(join(path, fname))
for fname in ("library.properties", "keywords.txt")):
isfile(join(path, fname))
for fname in ("library.properties", "keywords.txt")
):
return ["arduino"]
if isfile(join(path, "module.json")):
return ["mbed"]
include_re = re.compile(r'^#include\s+(<|")(Arduino|mbed)\.h(<|")',
flags=re.MULTILINE)
include_re = re.compile(
r'^#include\s+(<|")(Arduino|mbed)\.h(<|")', flags=re.MULTILINE
)
# check source files
for root, _, files in os.walk(path, followlinks=True):
@ -79,7 +89,8 @@ class LibBuilderFactory(object):
return ["mbed"]
for fname in files:
if not fs.path_endswith_ext(
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
):
continue
content = get_file_contents(join(root, fname))
if not content:
@ -124,7 +135,7 @@ class LibBuilderBase(object):
self._processed_files = list()
# reset source filter, could be overridden with extra script
self.env['SRC_FILTER'] = ""
self.env["SRC_FILTER"] = ""
# process extra options and append to build environment
self.process_extra_options()
@ -153,7 +164,8 @@ class LibBuilderBase(object):
@property
def dependencies(self):
return LibraryManager.normalize_dependencies(
self._manifest.get("dependencies", []))
self._manifest.get("dependencies", [])
)
@property
def src_filter(self):
@ -161,7 +173,7 @@ class LibBuilderBase(object):
"-<example%s>" % os.sep,
"-<examples%s>" % os.sep,
"-<test%s>" % os.sep,
"-<tests%s>" % os.sep
"-<tests%s>" % os.sep,
]
@property
@ -172,8 +184,7 @@ class LibBuilderBase(object):
@property
def src_dir(self):
return (join(self.path, "src")
if isdir(join(self.path, "src")) else self.path)
return join(self.path, "src") if isdir(join(self.path, "src")) else self.path
def get_include_dirs(self):
items = []
@ -234,8 +245,7 @@ class LibBuilderBase(object):
@property
def lib_compat_mode(self):
return self.env.GetProjectOption("lib_compat_mode",
self.COMPAT_MODE_DEFAULT)
return self.env.GetProjectOption("lib_compat_mode", self.COMPAT_MODE_DEFAULT)
@staticmethod
def validate_compat_mode(mode):
@ -263,11 +273,10 @@ class LibBuilderBase(object):
self.env.ProcessFlags(self.build_flags)
if self.extra_script:
self.env.SConscriptChdir(1)
self.env.SConscript(realpath(self.extra_script),
exports={
"env": self.env,
"pio_lib_builder": self
})
self.env.SConscript(
realpath(self.extra_script),
exports={"env": self.env, "pio_lib_builder": self},
)
self.env.ProcessUnFlags(self.build_unflags)
def process_dependencies(self):
@ -276,7 +285,7 @@ class LibBuilderBase(object):
for item in self.dependencies:
found = False
for lb in self.env.GetLibBuilders():
if item['name'] != lb.name:
if item["name"] != lb.name:
continue
found = True
if lb not in self.depbuilders:
@ -284,37 +293,43 @@ class LibBuilderBase(object):
break
if not found and self.verbose:
sys.stderr.write("Warning: Ignored `%s` dependency for `%s` "
"library\n" % (item['name'], self.name))
sys.stderr.write(
"Warning: Ignored `%s` dependency for `%s` "
"library\n" % (item["name"], self.name)
)
def get_search_files(self):
items = [
join(self.src_dir, item) for item in self.env.MatchSourceFiles(
self.src_dir, self.src_filter)
join(self.src_dir, item)
for item in self.env.MatchSourceFiles(self.src_dir, self.src_filter)
]
include_dir = self.include_dir
if include_dir:
items.extend([
join(include_dir, item)
for item in self.env.MatchSourceFiles(include_dir)
])
items.extend(
[
join(include_dir, item)
for item in self.env.MatchSourceFiles(include_dir)
]
)
return items
def _get_found_includes( # pylint: disable=too-many-branches
self, search_files=None):
self, search_files=None
):
# all include directories
if not LibBuilderBase._INCLUDE_DIRS_CACHE:
LibBuilderBase._INCLUDE_DIRS_CACHE = []
for lb in self.env.GetLibBuilders():
LibBuilderBase._INCLUDE_DIRS_CACHE.extend(
[self.env.Dir(d) for d in lb.get_include_dirs()])
[self.env.Dir(d) for d in lb.get_include_dirs()]
)
# append self include directories
include_dirs = [self.env.Dir(d) for d in self.get_include_dirs()]
include_dirs.extend(LibBuilderBase._INCLUDE_DIRS_CACHE)
result = []
for path in (search_files or []):
for path in search_files or []:
if path in self._processed_files:
continue
self._processed_files.append(path)
@ -325,19 +340,25 @@ class LibBuilderBase(object):
self.env.File(path),
self.env,
tuple(include_dirs),
depth=self.CCONDITIONAL_SCANNER_DEPTH)
depth=self.CCONDITIONAL_SCANNER_DEPTH,
)
# mark candidates already processed via Conditional Scanner
self._processed_files.extend([
c.get_abspath() for c in candidates
if c.get_abspath() not in self._processed_files
])
self._processed_files.extend(
[
c.get_abspath()
for c in candidates
if c.get_abspath() not in self._processed_files
]
)
except Exception as e: # pylint: disable=broad-except
if self.verbose and "+" in self.lib_ldf_mode:
sys.stderr.write(
"Warning! Classic Pre Processor is used for `%s`, "
"advanced has failed with `%s`\n" % (path, e))
"advanced has failed with `%s`\n" % (path, e)
)
candidates = LibBuilderBase.CLASSIC_SCANNER(
self.env.File(path), self.env, tuple(include_dirs))
self.env.File(path), self.env, tuple(include_dirs)
)
# print(path, map(lambda n: n.get_abspath(), candidates))
for item in candidates:
@ -348,7 +369,7 @@ class LibBuilderBase(object):
_h_path = item.get_abspath()
if not fs.path_endswith_ext(_h_path, piotool.SRC_HEADER_EXT):
continue
_f_part = _h_path[:_h_path.rindex(".")]
_f_part = _h_path[: _h_path.rindex(".")]
for ext in piotool.SRC_C_EXT:
if not isfile("%s.%s" % (_f_part, ext)):
continue
@ -359,7 +380,6 @@ class LibBuilderBase(object):
return result
def depend_recursive(self, lb, search_files=None):
def _already_depends(_lb):
if self in _lb.depbuilders:
return True
@ -372,9 +392,10 @@ class LibBuilderBase(object):
if self != lb:
if _already_depends(lb):
if self.verbose:
sys.stderr.write("Warning! Circular dependencies detected "
"between `%s` and `%s`\n" %
(self.path, lb.path))
sys.stderr.write(
"Warning! Circular dependencies detected "
"between `%s` and `%s`\n" % (self.path, lb.path)
)
self._circular_deps.append(lb)
elif lb not in self._depbuilders:
self._depbuilders.append(lb)
@ -431,11 +452,10 @@ class LibBuilderBase(object):
if self.lib_archive:
libs.append(
self.env.BuildLibrary(self.build_dir, self.src_dir,
self.src_filter))
self.env.BuildLibrary(self.build_dir, self.src_dir, self.src_filter)
)
else:
self.env.BuildSources(self.build_dir, self.src_dir,
self.src_filter)
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
return libs
@ -444,7 +464,6 @@ class UnknownLibBuilder(LibBuilderBase):
class ArduinoLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest = {}
if not isfile(join(self.path, "library.properties")):
@ -508,7 +527,7 @@ class ArduinoLibBuilder(LibBuilderBase):
"esp32": ["espressif32"],
"arc32": ["intel_arc32"],
"stm32": ["ststm32"],
"nrf5": ["nordicnrf51", "nordicnrf52"]
"nrf5": ["nordicnrf51", "nordicnrf52"],
}
items = []
for arch in self._manifest.get("architectures", "").split(","):
@ -524,7 +543,6 @@ class ArduinoLibBuilder(LibBuilderBase):
class MbedLibBuilder(LibBuilderBase):
def load_manifest(self):
if not isfile(join(self.path, "module.json")):
return {}
@ -611,14 +629,15 @@ class MbedLibBuilder(LibBuilderBase):
# default macros
for macro in manifest.get("macros", []):
macro = self._mbed_normalize_macro(macro)
macros[macro['name']] = macro
macros[macro["name"]] = macro
# configuration items
for key, options in manifest.get("config", {}).items():
if "value" not in options:
continue
macros[key] = dict(name=options.get("macro_name"),
value=options.get("value"))
macros[key] = dict(
name=options.get("macro_name"), value=options.get("value")
)
# overrode items per target
for target, options in manifest.get("target_overrides", {}).items():
@ -626,25 +645,23 @@ class MbedLibBuilder(LibBuilderBase):
continue
for macro in options.get("target.macros_add", []):
macro = self._mbed_normalize_macro(macro)
macros[macro['name']] = macro
macros[macro["name"]] = macro
for key, value in options.items():
if not key.startswith("target.") and key in macros:
macros[key]['value'] = value
macros[key]["value"] = value
# normalize macro names
for key, macro in macros.items():
if not macro['name']:
macro['name'] = key
if "." not in macro['name']:
macro['name'] = "%s.%s" % (manifest.get("name"),
macro['name'])
macro['name'] = re.sub(r"[^a-z\d]+",
"_",
macro['name'],
flags=re.I).upper()
macro['name'] = "MBED_CONF_" + macro['name']
if isinstance(macro['value'], bool):
macro['value'] = 1 if macro['value'] else 0
if not macro["name"]:
macro["name"] = key
if "." not in macro["name"]:
macro["name"] = "%s.%s" % (manifest.get("name"), macro["name"])
macro["name"] = re.sub(
r"[^a-z\d]+", "_", macro["name"], flags=re.I
).upper()
macro["name"] = "MBED_CONF_" + macro["name"]
if isinstance(macro["value"], bool):
macro["value"] = 1 if macro["value"] else 0
return {macro["name"]: macro["value"] for macro in macros.values()}
@ -654,13 +671,13 @@ class MbedLibBuilder(LibBuilderBase):
for line in fp.readlines():
line = line.strip()
if line == "#endif":
lines.append(
"// PlatformIO Library Dependency Finder (LDF)")
lines.extend([
"#define %s %s" %
(name, value if value is not None else "")
for name, value in macros.items()
])
lines.append("// PlatformIO Library Dependency Finder (LDF)")
lines.extend(
[
"#define %s %s" % (name, value if value is not None else "")
for name, value in macros.items()
]
)
lines.append("")
if not line.startswith("#define"):
lines.append(line)
@ -674,7 +691,6 @@ class MbedLibBuilder(LibBuilderBase):
class PlatformIOLibBuilder(LibBuilderBase):
def load_manifest(self):
assert isfile(join(self.path, "library.json"))
manifest = fs.load_json(join(self.path, "library.json"))
@ -682,9 +698,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
# replace "espressif" old name dev/platform with ESP8266
if "platforms" in manifest:
manifest['platforms'] = [
manifest["platforms"] = [
"espressif8266" if p == "espressif" else p
for p in util.items_to_list(manifest['platforms'])
for p in util.items_to_list(manifest["platforms"])
]
return manifest
@ -710,8 +726,8 @@ class PlatformIOLibBuilder(LibBuilderBase):
def src_filter(self):
if "srcFilter" in self._manifest.get("build", {}):
return self._manifest.get("build").get("srcFilter")
if self.env['SRC_FILTER']:
return self.env['SRC_FILTER']
if self.env["SRC_FILTER"]:
return self.env["SRC_FILTER"]
if self._is_arduino_manifest():
return ArduinoLibBuilder.src_filter.fget(self)
return LibBuilderBase.src_filter.fget(self)
@ -740,7 +756,8 @@ class PlatformIOLibBuilder(LibBuilderBase):
if global_value is not None:
return global_value
return self._manifest.get("build", {}).get(
"libArchive", LibBuilderBase.lib_archive.fget(self))
"libArchive", LibBuilderBase.lib_archive.fget(self)
)
@property
def lib_ldf_mode(self):
@ -748,7 +765,10 @@ class PlatformIOLibBuilder(LibBuilderBase):
self.env.GetProjectOption(
"lib_ldf_mode",
self._manifest.get("build", {}).get(
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self))))
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self)
),
)
)
@property
def lib_compat_mode(self):
@ -756,8 +776,10 @@ class PlatformIOLibBuilder(LibBuilderBase):
self.env.GetProjectOption(
"lib_compat_mode",
self._manifest.get("build", {}).get(
"libCompatMode",
LibBuilderBase.lib_compat_mode.fget(self))))
"libCompatMode", LibBuilderBase.lib_compat_mode.fget(self)
),
)
)
def is_platforms_compatible(self, platforms):
items = self._manifest.get("platforms")
@ -775,9 +797,12 @@ class PlatformIOLibBuilder(LibBuilderBase):
include_dirs = LibBuilderBase.get_include_dirs(self)
# backwards compatibility with PlatformIO 2.0
if ("build" not in self._manifest and self._is_arduino_manifest()
and not isdir(join(self.path, "src"))
and isdir(join(self.path, "utility"))):
if (
"build" not in self._manifest
and self._is_arduino_manifest()
and not isdir(join(self.path, "src"))
and isdir(join(self.path, "utility"))
):
include_dirs.append(join(self.path, "utility"))
for path in self.env.get("CPPPATH", []):
@ -788,12 +813,11 @@ class PlatformIOLibBuilder(LibBuilderBase):
class ProjectAsLibBuilder(LibBuilderBase):
def __init__(self, env, *args, **kwargs):
# backup original value, will be reset in base.__init__
project_src_filter = env.get("SRC_FILTER")
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
self.env['SRC_FILTER'] = project_src_filter
self.env["SRC_FILTER"] = project_src_filter
@property
def include_dir(self):
@ -819,11 +843,14 @@ class ProjectAsLibBuilder(LibBuilderBase):
items = LibBuilderBase.get_search_files(self)
# test files
if "__test" in COMMAND_LINE_TARGETS:
items.extend([
join("$PROJECTTEST_DIR",
item) for item in self.env.MatchSourceFiles(
"$PROJECTTEST_DIR", "$PIOTEST_SRC_FILTER")
])
items.extend(
[
join("$PROJECTTEST_DIR", item)
for item in self.env.MatchSourceFiles(
"$PROJECTTEST_DIR", "$PIOTEST_SRC_FILTER"
)
]
)
return items
@property
@ -836,8 +863,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def src_filter(self):
return (self.env.get("SRC_FILTER")
or LibBuilderBase.src_filter.fget(self))
return self.env.get("SRC_FILTER") or LibBuilderBase.src_filter.fget(self)
@property
def dependencies(self):
@ -848,7 +874,6 @@ class ProjectAsLibBuilder(LibBuilderBase):
pass
def install_dependencies(self):
def _is_builtin(uri):
for lb in self.env.GetLibBuilders():
if lb.name == uri:
@ -871,8 +896,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
not_found_uri.append(uri)
did_install = False
lm = LibraryManager(
self.env.subst(join("$PROJECTLIBDEPS_DIR", "$PIOENV")))
lm = LibraryManager(self.env.subst(join("$PROJECTLIBDEPS_DIR", "$PIOENV")))
for uri in not_found_uri:
try:
lm.install(uri)
@ -923,28 +947,27 @@ class ProjectAsLibBuilder(LibBuilderBase):
def GetLibSourceDirs(env):
items = env.GetProjectOption("lib_extra_dirs", [])
items.extend(env['LIBSOURCE_DIRS'])
items.extend(env["LIBSOURCE_DIRS"])
return [
env.subst(expanduser(item) if item.startswith("~") else item)
for item in items
env.subst(expanduser(item) if item.startswith("~") else item) for item in items
]
def IsCompatibleLibBuilder(env,
lb,
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
def IsCompatibleLibBuilder(env, lb, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
compat_mode = lb.lib_compat_mode
if lb.name in env.GetProjectOption("lib_ignore", []):
if verbose:
sys.stderr.write("Ignored library %s\n" % lb.path)
return None
if compat_mode == "strict" and not lb.is_platforms_compatible(
env['PIOPLATFORM']):
if compat_mode == "strict" and not lb.is_platforms_compatible(env["PIOPLATFORM"]):
if verbose:
sys.stderr.write("Platform incompatible library %s\n" % lb.path)
return False
if (compat_mode in ("soft", "strict") and "PIOFRAMEWORK" in env
and not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", []))):
if (
compat_mode in ("soft", "strict")
and "PIOFRAMEWORK" in env
and not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", []))
):
if verbose:
sys.stderr.write("Framework incompatible library %s\n" % lb.path)
return False
@ -953,8 +976,10 @@ def IsCompatibleLibBuilder(env,
def GetLibBuilders(env): # pylint: disable=too-many-branches
if DefaultEnvironment().get("__PIO_LIB_BUILDERS", None) is not None:
return sorted(DefaultEnvironment()['__PIO_LIB_BUILDERS'],
key=lambda lb: 0 if lb.dependent else 1)
return sorted(
DefaultEnvironment()["__PIO_LIB_BUILDERS"],
key=lambda lb: 0 if lb.dependent else 1,
)
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=[])
@ -974,7 +999,8 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
except exception.InvalidJSONFile:
if verbose:
sys.stderr.write(
"Skip library with broken manifest: %s\n" % lib_dir)
"Skip library with broken manifest: %s\n" % lib_dir
)
continue
if env.IsCompatibleLibBuilder(lb):
DefaultEnvironment().Append(__PIO_LIB_BUILDERS=[lb])
@ -989,15 +1015,15 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
if verbose and found_incompat:
sys.stderr.write(
"More details about \"Library Compatibility Mode\": "
'More details about "Library Compatibility Mode": '
"https://docs.platformio.org/page/librarymanager/ldf.html#"
"ldf-compat-mode\n")
"ldf-compat-mode\n"
)
return DefaultEnvironment()['__PIO_LIB_BUILDERS']
return DefaultEnvironment()["__PIO_LIB_BUILDERS"]
def ConfigureProjectLibBuilder(env):
def _get_vcs_info(lb):
path = LibraryManager.get_src_manifest_path(lb.path)
return fs.load_json(path) if path else None
@ -1036,26 +1062,28 @@ def ConfigureProjectLibBuilder(env):
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
print("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
print("LDF Modes: Finder ~ %s, Compatibility ~ %s" %
(ldf_mode, project.lib_compat_mode))
print ("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
print (
"LDF Modes: Finder ~ %s, Compatibility ~ %s"
% (ldf_mode, project.lib_compat_mode)
)
project.install_dependencies()
lib_builders = env.GetLibBuilders()
print("Found %d compatible libraries" % len(lib_builders))
print ("Found %d compatible libraries" % len(lib_builders))
print("Scanning dependencies...")
print ("Scanning dependencies...")
project.search_deps_recursive()
if ldf_mode.startswith("chain") and project.depbuilders:
_correct_found_libs(lib_builders)
if project.depbuilders:
print("Dependency Graph")
print ("Dependency Graph")
_print_deps_tree(project)
else:
print("No dependencies")
print ("No dependencies")
return project

View File

@ -39,7 +39,9 @@ class InoToCPPConverter(object):
([a-z_\d]+\s*) # name of prototype
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
)\s*(\{|;) # must end with `{` or `;`
""", re.X | re.M | re.I)
""",
re.X | re.M | re.I,
)
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
@ -61,9 +63,7 @@ class InoToCPPConverter(object):
lines = []
for node in nodes:
contents = get_file_contents(node.get_path())
_lines = [
'# 1 "%s"' % node.get_path().replace("\\", "/"), contents
]
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
if self.is_main_node(contents):
lines = _lines + lines
self._main_ino = node.get_path()
@ -91,8 +91,11 @@ class InoToCPPConverter(object):
self.env.Execute(
self.env.VerboseAction(
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
out_file, tmp_path),
"Converting " + basename(out_file[:-4])))
out_file, tmp_path
),
"Converting " + basename(out_file[:-4]),
)
)
atexit.register(_delete_file, tmp_path)
return isfile(out_file)
@ -120,8 +123,9 @@ class InoToCPPConverter(object):
elif stropen and line.endswith(('",', '";')):
newlines[len(newlines) - 1] += line
stropen = False
newlines.append('#line %d "%s"' %
(linenum, self._main_ino.replace("\\", "/")))
newlines.append(
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
)
continue
newlines.append(line)
@ -141,8 +145,10 @@ class InoToCPPConverter(object):
prototypes = []
reserved_keywords = set(["if", "else", "while"])
for match in self.PROTOTYPE_RE.finditer(contents):
if (set([match.group(2).strip(),
match.group(3).strip()]) & reserved_keywords):
if (
set([match.group(2).strip(), match.group(3).strip()])
& reserved_keywords
):
continue
prototypes.append(match)
return prototypes
@ -162,11 +168,8 @@ class InoToCPPConverter(object):
prototypes = self._parse_prototypes(contents) or []
# skip already declared prototypes
declared = set(
m.group(1).strip() for m in prototypes if m.group(4) == ";")
prototypes = [
m for m in prototypes if m.group(1).strip() not in declared
]
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
if not prototypes:
return contents
@ -175,23 +178,29 @@ class InoToCPPConverter(object):
split_pos = prototypes[0].start()
match_ptrs = re.search(
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
contents[:split_pos], re.M)
contents[:split_pos],
re.M,
)
if match_ptrs:
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
result = []
result.append(contents[:split_pos].strip())
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
result.append('#line %d "%s"' % (self._get_total_lines(
contents[:split_pos]), self._main_ino.replace("\\", "/")))
result.append(
'#line %d "%s"'
% (
self._get_total_lines(contents[:split_pos]),
self._main_ino.replace("\\", "/"),
)
)
result.append(contents[split_pos:].strip())
return "\n".join(result)
def ConvertInoToCpp(env):
src_dir = glob_escape(env.subst("$PROJECTSRC_DIR"))
ino_nodes = (env.Glob(join(src_dir, "*.ino")) +
env.Glob(join(src_dir, "*.pde")))
ino_nodes = env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde"))
if not ino_nodes:
return
c = InoToCPPConverter(env)
@ -214,13 +223,13 @@ def _get_compiler_type(env):
return "gcc"
try:
sysenv = environ.copy()
sysenv['PATH'] = str(env['ENV']['PATH'])
sysenv["PATH"] = str(env["ENV"]["PATH"])
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
except OSError:
return None
if result['returncode'] != 0:
if result["returncode"] != 0:
return None
output = "".join([result['out'], result['err']]).lower()
output = "".join([result["out"], result["err"]]).lower()
if "clang" in output and "LLVM" in output:
return "clang"
if "gcc" in output:
@ -233,7 +242,6 @@ def GetCompilerType(env):
def GetActualLDScript(env):
def _lookup_in_ldpath(script):
for d in env.get("LIBPATH", []):
path = join(env.subst(d), script)
@ -264,12 +272,13 @@ def GetActualLDScript(env):
if script:
sys.stderr.write(
"Error: Could not find '%s' LD script in LDPATH '%s'\n" %
(script, env.subst("$LIBPATH")))
"Error: Could not find '%s' LD script in LDPATH '%s'\n"
% (script, env.subst("$LIBPATH"))
)
env.Exit(1)
if not script and "LDSCRIPT_PATH" in env:
path = _lookup_in_ldpath(env['LDSCRIPT_PATH'])
path = _lookup_in_ldpath(env["LDSCRIPT_PATH"])
if path:
return path
@ -285,16 +294,17 @@ def VerboseAction(_, act, actstr):
def PioClean(env, clean_dir):
if not isdir(clean_dir):
print("Build environment is clean")
print ("Build environment is clean")
env.Exit(0)
clean_rel_path = relpath(clean_dir)
for root, _, files in walk(clean_dir):
for f in files:
dst = join(root, f)
remove(dst)
print("Removed %s" %
(dst if clean_rel_path.startswith(".") else relpath(dst)))
print("Done cleaning")
print (
"Removed %s" % (dst if clean_rel_path.startswith(".") else relpath(dst))
)
print ("Done cleaning")
fs.rmtree(clean_dir)
env.Exit(0)
@ -302,8 +312,9 @@ def PioClean(env, clean_dir):
def ProcessDebug(env):
if not env.subst("$PIODEBUGFLAGS"):
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
env.Append(BUILD_FLAGS=list(env['PIODEBUGFLAGS']) +
["-D__PLATFORMIO_BUILD_DEBUG__"])
env.Append(
BUILD_FLAGS=list(env["PIODEBUGFLAGS"]) + ["-D__PLATFORMIO_BUILD_DEBUG__"]
)
unflags = ["-Os"]
for level in [0, 1, 2]:
for flag in ("O", "g", "ggdb"):
@ -312,15 +323,18 @@ def ProcessDebug(env):
def ProcessTest(env):
env.Append(CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
unitylib = env.BuildLibrary(join("$BUILD_DIR", "UnityTestLib"),
get_core_package_dir("tool-unity"))
env.Append(
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")],
)
unitylib = env.BuildLibrary(
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity")
)
env.Prepend(LIBS=[unitylib])
src_filter = ["+<*.cpp>", "+<*.c>"]
if "PIOTEST_RUNNING_NAME" in env:
src_filter.append("+<%s%s>" % (env['PIOTEST_RUNNING_NAME'], sep))
src_filter.append("+<%s%s>" % (env["PIOTEST_RUNNING_NAME"], sep))
env.Replace(PIOTEST_SRC_FILTER=src_filter)
@ -330,7 +344,7 @@ def GetExtraScripts(env, scope):
if scope == "post" and ":" not in item:
items.append(item)
elif item.startswith("%s:" % scope):
items.append(item[len(scope) + 1:])
items.append(item[len(scope) + 1 :])
if not items:
return items
with fs.cd(env.subst("$PROJECT_DIR")):

View File

@ -33,8 +33,8 @@ def PioPlatform(env):
variables = env.GetProjectOptions(as_dict=True)
if "framework" in variables:
# support PIO Core 3.0 dev/platforms
variables['pioframework'] = variables['framework']
p = PlatformFactory.newPlatform(env['PLATFORM_MANIFEST'])
variables["pioframework"] = variables["framework"]
p = PlatformFactory.newPlatform(env["PLATFORM_MANIFEST"])
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
return p
@ -54,7 +54,7 @@ def BoardConfig(env, board=None):
def GetFrameworkScript(env, framework):
p = env.PioPlatform()
assert p.frameworks and framework in p.frameworks
script_path = env.subst(p.frameworks[framework]['script'])
script_path = env.subst(p.frameworks[framework]["script"])
if not isfile(script_path):
script_path = join(p.get_dir(), script_path)
return script_path
@ -65,7 +65,7 @@ def LoadPioPlatform(env):
installed_packages = p.get_installed_packages()
# Ensure real platform name
env['PIOPLATFORM'] = p.name
env["PIOPLATFORM"] = p.name
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
systype = util.get_systype()
@ -75,14 +75,13 @@ def LoadPioPlatform(env):
continue
pkg_dir = p.get_package_dir(name)
env.PrependENVPath(
"PATH",
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
if (not WINDOWS and isdir(join(pkg_dir, "lib"))
and type_ != "toolchain"):
"PATH", join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir
)
if not WINDOWS and isdir(join(pkg_dir, "lib")) and type_ != "toolchain":
env.PrependENVPath(
"DYLD_LIBRARY_PATH"
if "darwin" in systype else "LD_LIBRARY_PATH",
join(pkg_dir, "lib"))
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
join(pkg_dir, "lib"),
)
# Platform specific LD Scripts
if isdir(join(p.get_dir(), "ldscripts")):
@ -101,9 +100,11 @@ def LoadPioPlatform(env):
for option_meta in ProjectOptions.values():
if not option_meta.buildenvvar or option_meta.buildenvvar in env:
continue
data_path = (option_meta.name[6:]
if option_meta.name.startswith("board_") else
option_meta.name.replace("_", "."))
data_path = (
option_meta.name[6:]
if option_meta.name.startswith("board_")
else option_meta.name.replace("_", ".")
)
try:
env[option_meta.buildenvvar] = board_config.get(data_path)
except KeyError:
@ -118,22 +119,25 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
board_config = env.BoardConfig() if "BOARD" in env else None
def _get_configuration_data():
return None if not board_config else [
"CONFIGURATION:",
"https://docs.platformio.org/page/boards/%s/%s.html" %
(platform.name, board_config.id)
]
return (
None
if not board_config
else [
"CONFIGURATION:",
"https://docs.platformio.org/page/boards/%s/%s.html"
% (platform.name, board_config.id),
]
)
def _get_plaform_data():
data = ["PLATFORM: %s %s" % (platform.title, platform.version)]
src_manifest_path = platform.pm.get_src_manifest_path(
platform.get_dir())
src_manifest_path = platform.pm.get_src_manifest_path(platform.get_dir())
if src_manifest_path:
src_manifest = fs.load_json(src_manifest_path)
if "version" in src_manifest:
data.append("#" + src_manifest['version'])
data.append("#" + src_manifest["version"])
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
data.append("(%s)" % src_manifest['url'])
data.append("(%s)" % src_manifest["url"])
if board_config:
data.extend([">", board_config.get("name")])
return data
@ -151,19 +155,22 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
return data
ram = board_config.get("upload", {}).get("maximum_ram_size")
flash = board_config.get("upload", {}).get("maximum_size")
data.append("%s RAM, %s Flash" %
(fs.format_filesize(ram), fs.format_filesize(flash)))
data.append(
"%s RAM, %s Flash" % (fs.format_filesize(ram), fs.format_filesize(flash))
)
return data
def _get_debug_data():
debug_tools = board_config.get(
"debug", {}).get("tools") if board_config else None
debug_tools = (
board_config.get("debug", {}).get("tools") if board_config else None
)
if not debug_tools:
return None
data = [
"DEBUG:", "Current",
"(%s)" % board_config.get_debug_tool_name(
env.GetProjectOption("debug_tool"))
"DEBUG:",
"Current",
"(%s)"
% board_config.get_debug_tool_name(env.GetProjectOption("debug_tool")),
]
onboard = []
external = []
@ -187,23 +194,27 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
if not pkg_dir:
continue
manifest = platform.pm.load_manifest(pkg_dir)
original_version = util.get_original_version(manifest['version'])
info = "%s %s" % (manifest['name'], manifest['version'])
original_version = util.get_original_version(manifest["version"])
info = "%s %s" % (manifest["name"], manifest["version"])
extra = []
if original_version:
extra.append(original_version)
if "__src_url" in manifest and int(ARGUMENTS.get("PIOVERBOSE", 0)):
extra.append(manifest['__src_url'])
extra.append(manifest["__src_url"])
if extra:
info += " (%s)" % ", ".join(extra)
data.append(info)
return ["PACKAGES:", ", ".join(data)]
for data in (_get_configuration_data(), _get_plaform_data(),
_get_hardware_data(), _get_debug_data(),
_get_packages_data()):
for data in (
_get_configuration_data(),
_get_plaform_data(),
_get_hardware_data(),
_get_debug_data(),
_get_packages_data(),
):
if data and len(data) > 1:
print(" ".join(data))
print (" ".join(data))
def exists(_):

View File

@ -18,22 +18,25 @@ from platformio.project.config import ProjectConfig, ProjectOptions
def GetProjectConfig(env):
return ProjectConfig.get_instance(env['PROJECT_CONFIG'])
return ProjectConfig.get_instance(env["PROJECT_CONFIG"])
def GetProjectOptions(env, as_dict=False):
return env.GetProjectConfig().items(env=env['PIOENV'], as_dict=as_dict)
return env.GetProjectConfig().items(env=env["PIOENV"], as_dict=as_dict)
def GetProjectOption(env, option, default=None):
return env.GetProjectConfig().get("env:" + env['PIOENV'], option, default)
return env.GetProjectConfig().get("env:" + env["PIOENV"], option, default)
def LoadProjectOptions(env):
for option, value in env.GetProjectOptions():
option_meta = ProjectOptions.get("env." + option)
if (not option_meta or not option_meta.buildenvvar
or option_meta.buildenvvar in env):
if (
not option_meta
or not option_meta.buildenvvar
or option_meta.buildenvvar in env
):
continue
env[option_meta.buildenvvar] = value

View File

@ -45,7 +45,7 @@ def FlushSerialBuffer(env, port):
def TouchSerialPort(env, port, baudrate):
port = env.subst(port)
print("Forcing reset using %dbps open/close on port %s" % (baudrate, port))
print ("Forcing reset using %dbps open/close on port %s" % (baudrate, port))
try:
s = Serial(port=port, baudrate=baudrate)
s.setDTR(False)
@ -56,13 +56,13 @@ def TouchSerialPort(env, port, baudrate):
def WaitForNewSerialPort(env, before):
print("Waiting for the new upload port...")
print ("Waiting for the new upload port...")
prev_port = env.subst("$UPLOAD_PORT")
new_port = None
elapsed = 0
before = [p['port'] for p in before]
before = [p["port"] for p in before]
while elapsed < 5 and new_port is None:
now = [p['port'] for p in util.get_serial_ports()]
now = [p["port"] for p in util.get_serial_ports()]
for p in now:
if p not in before:
new_port = p
@ -84,10 +84,12 @@ def WaitForNewSerialPort(env, before):
sleep(1)
if not new_port:
sys.stderr.write("Error: Couldn't find a board on the selected port. "
"Check that you have the correct port selected. "
"If it is correct, try pressing the board's reset "
"button after initiating the upload.\n")
sys.stderr.write(
"Error: Couldn't find a board on the selected port. "
"Check that you have the correct port selected. "
"If it is correct, try pressing the board's reset "
"button after initiating the upload.\n"
)
env.Exit(1)
return new_port
@ -99,8 +101,8 @@ def AutodetectUploadPort(*args, **kwargs):
def _get_pattern():
if "UPLOAD_PORT" not in env:
return None
if set(["*", "?", "[", "]"]) & set(env['UPLOAD_PORT']):
return env['UPLOAD_PORT']
if set(["*", "?", "[", "]"]) & set(env["UPLOAD_PORT"]):
return env["UPLOAD_PORT"]
return None
def _is_match_pattern(port):
@ -112,17 +114,13 @@ def AutodetectUploadPort(*args, **kwargs):
def _look_for_mbed_disk():
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
for item in util.get_logical_devices():
if item['path'].startswith("/net") or not _is_match_pattern(
item['path']):
if item["path"].startswith("/net") or not _is_match_pattern(item["path"]):
continue
mbed_pages = [
join(item['path'], n) for n in ("mbed.htm", "mbed.html")
]
mbed_pages = [join(item["path"], n) for n in ("mbed.htm", "mbed.html")]
if any(isfile(p) for p in mbed_pages):
return item['path']
if item['name'] \
and any(l in item['name'].lower() for l in msdlabels):
return item['path']
return item["path"]
if item["name"] and any(l in item["name"].lower() for l in msdlabels):
return item["path"]
return None
def _look_for_serial_port():
@ -132,27 +130,27 @@ def AutodetectUploadPort(*args, **kwargs):
if "BOARD" in env and "build.hwids" in env.BoardConfig():
board_hwids = env.BoardConfig().get("build.hwids")
for item in util.get_serial_ports(filter_hwid=True):
if not _is_match_pattern(item['port']):
if not _is_match_pattern(item["port"]):
continue
port = item['port']
port = item["port"]
if upload_protocol.startswith("blackmagic"):
if WINDOWS and port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item['description']:
if "GDB" in item["description"]:
return port
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item['hwid']:
if hwid_str in item["hwid"]:
return port
return port
if "UPLOAD_PORT" in env and not _get_pattern():
print(env.subst("Use manually specified: $UPLOAD_PORT"))
print (env.subst("Use manually specified: $UPLOAD_PORT"))
return
if (env.subst("$UPLOAD_PROTOCOL") == "mbed"
or ("mbed" in env.subst("$PIOFRAMEWORK")
and not env.subst("$UPLOAD_PROTOCOL"))):
if env.subst("$UPLOAD_PROTOCOL") == "mbed" or (
"mbed" in env.subst("$PIOFRAMEWORK") and not env.subst("$UPLOAD_PROTOCOL")
):
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
else:
try:
@ -162,13 +160,14 @@ def AutodetectUploadPort(*args, **kwargs):
env.Replace(UPLOAD_PORT=_look_for_serial_port())
if env.subst("$UPLOAD_PORT"):
print(env.subst("Auto-detected: $UPLOAD_PORT"))
print (env.subst("Auto-detected: $UPLOAD_PORT"))
else:
sys.stderr.write(
"Error: Please specify `upload_port` for environment or use "
"global `--upload-port` option.\n"
"For some development platforms it can be a USB flash "
"drive (i.e. /media/<user>/<device name>)\n")
"drive (i.e. /media/<user>/<device name>)\n"
)
env.Exit(1)
@ -179,16 +178,17 @@ def UploadToDisk(_, target, source, env):
fpath = join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
if not isfile(fpath):
continue
copyfile(fpath,
join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
print("Firmware has been successfully uploaded.\n"
"(Some boards may require manual hard reset)")
copyfile(fpath, join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
print (
"Firmware has been successfully uploaded.\n"
"(Some boards may require manual hard reset)"
)
def CheckUploadSize(_, target, source, env):
check_conditions = [
env.get("BOARD"),
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
env.get("SIZETOOL") or env.get("SIZECHECKCMD"),
]
if not all(check_conditions):
return
@ -198,9 +198,11 @@ def CheckUploadSize(_, target, source, env):
return
def _configure_defaults():
env.Replace(SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
env.Replace(
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+",
)
def _get_size_output():
cmd = env.get("SIZECHECKCMD")
@ -210,11 +212,11 @@ def CheckUploadSize(_, target, source, env):
cmd = cmd.split()
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
sysenv = environ.copy()
sysenv['PATH'] = str(env['ENV']['PATH'])
sysenv["PATH"] = str(env["ENV"]["PATH"])
result = exec_command(env.subst(cmd), env=sysenv)
if result['returncode'] != 0:
if result["returncode"] != 0:
return None
return result['out'].strip()
return result["out"].strip()
def _calculate_size(output, pattern):
if not output or not pattern:
@ -238,7 +240,8 @@ def CheckUploadSize(_, target, source, env):
if used_blocks > blocks_per_progress:
used_blocks = blocks_per_progress
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
)
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
_configure_defaults()
@ -246,14 +249,13 @@ def CheckUploadSize(_, target, source, env):
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
print("Memory Usage -> http://bit.ly/pio-memory-usage")
print ("Memory Usage -> http://bit.ly/pio-memory-usage")
if data_max_size and data_size > -1:
print("DATA: %s" % _format_availale_bytes(data_size, data_max_size))
print ("DATA: %s" % _format_availale_bytes(data_size, data_max_size))
if program_size > -1:
print("PROGRAM: %s" %
_format_availale_bytes(program_size, program_max_size))
print ("PROGRAM: %s" % _format_availale_bytes(program_size, program_max_size))
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
print(output)
print (output)
# raise error
# if data_max_size and data_size > data_max_size:
@ -262,9 +264,10 @@ def CheckUploadSize(_, target, source, env):
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
# env.Exit(1)
if program_size > program_max_size:
sys.stderr.write("Error: The program size (%d bytes) is greater "
"than maximum allowed (%s bytes)\n" %
(program_size, program_max_size))
sys.stderr.write(
"Error: The program size (%d bytes) is greater "
"than maximum allowed (%s bytes)\n" % (program_size, program_max_size)
)
env.Exit(1)
@ -272,12 +275,11 @@ def PrintUploadInfo(env):
configured = env.subst("$UPLOAD_PROTOCOL")
available = [configured] if configured else []
if "BOARD" in env:
available.extend(env.BoardConfig().get("upload",
{}).get("protocols", []))
available.extend(env.BoardConfig().get("upload", {}).get("protocols", []))
if available:
print("AVAILABLE: %s" % ", ".join(sorted(set(available))))
print ("AVAILABLE: %s" % ", ".join(sorted(set(available))))
if configured:
print("CURRENT: upload_protocol = %s" % configured)
print ("CURRENT: upload_protocol = %s" % configured)
def exists(_):

View File

@ -61,8 +61,9 @@ def _file_long_data(env, data):
build_dir = env.subst("$BUILD_DIR")
if not isdir(build_dir):
makedirs(build_dir)
tmp_file = join(build_dir,
"longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest())
tmp_file = join(
build_dir, "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest()
)
if isfile(tmp_file):
return tmp_file
with open(tmp_file, "w") as fp:
@ -83,10 +84,12 @@ def generate(env):
coms = {}
for key in ("ARCOM", "LINKCOM"):
coms[key] = env.get(key, "").replace(
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}")
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
)
for key in ("_CCCOMCOM", "ASPPCOM"):
coms[key] = env.get(key, "").replace(
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}")
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}"
)
env.Replace(**coms)
return env

View File

@ -54,7 +54,8 @@ def _build_project_deps(env):
key: project_lib_builder.env.get(key)
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
if project_lib_builder.env.get(key)
})
}
)
projenv = env.Clone()
@ -65,27 +66,32 @@ def _build_project_deps(env):
is_test = "__test" in COMMAND_LINE_TARGETS
if is_test:
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
"$PIOTEST_SRC_FILTER")
projenv.BuildSources(
"$BUILDTEST_DIR", "$PROJECTTEST_DIR", "$PIOTEST_SRC_FILTER"
)
if not is_test or env.GetProjectOption("test_build_project_src", False):
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
env.get("SRC_FILTER"))
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR", env.get("SRC_FILTER"))
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
sys.stderr.write(
"Error: Nothing to build. Please put your source code files "
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR")
)
env.Exit(1)
Export("projenv")
def BuildProgram(env):
def _append_pio_macros():
env.AppendUnique(CPPDEFINES=[(
"PLATFORMIO",
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())))])
env.AppendUnique(
CPPDEFINES=[
(
"PLATFORMIO",
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())),
)
]
)
_append_pio_macros()
@ -95,8 +101,7 @@ def BuildProgram(env):
if not Util.case_sensitive_suffixes(".s", ".S"):
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
if ("debug" in COMMAND_LINE_TARGETS
or env.GetProjectOption("build_type") == "debug"):
if "debug" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "debug":
env.ProcessDebug()
# process extra flags from board
@ -122,8 +127,7 @@ def BuildProgram(env):
_build_project_deps(env)
# append into the beginning a main LD script
if (env.get("LDSCRIPT_PATH")
and not any("-Wl,-T" in f for f in env['LINKFLAGS'])):
if env.get("LDSCRIPT_PATH") and not any("-Wl,-T" in f for f in env["LINKFLAGS"]):
env.Prepend(LINKFLAGS=["-T", "$LDSCRIPT_PATH"])
# enable "cyclic reference" for linker
@ -131,15 +135,18 @@ def BuildProgram(env):
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
env.Append(_LIBFLAGS=" -Wl,--end-group")
program = env.Program(join("$BUILD_DIR", env.subst("$PROGNAME")),
env['PIOBUILDFILES'])
program = env.Program(
join("$BUILD_DIR", env.subst("$PROGNAME")), env["PIOBUILDFILES"]
)
env.Replace(PIOMAINPROG=program)
AlwaysBuild(
env.Alias(
"checkprogsize", program,
env.VerboseAction(env.CheckUploadSize,
"Checking size $PIOMAINPROG")))
"checkprogsize",
program,
env.VerboseAction(env.CheckUploadSize, "Checking size $PIOMAINPROG"),
)
)
return program
@ -155,19 +162,19 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
result[key].extend(value)
cppdefines = []
for item in result['CPPDEFINES']:
for item in result["CPPDEFINES"]:
if not Util.is_Sequence(item):
cppdefines.append(item)
continue
name, value = item[:2]
if '\"' in value:
value = value.replace('\"', '\\\"')
if '"' in value:
value = value.replace('"', '\\"')
elif value.isdigit():
value = int(value)
elif value.replace(".", "", 1).isdigit():
value = float(value)
cppdefines.append((name, value))
result['CPPDEFINES'] = cppdefines
result["CPPDEFINES"] = cppdefines
# fix relative CPPPATH & LIBPATH
for k in ("CPPPATH", "LIBPATH"):
@ -178,7 +185,7 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
# fix relative path for "-include"
for i, f in enumerate(result.get("CCFLAGS", [])):
if isinstance(f, tuple) and f[0] == "-include":
result['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
result["CCFLAGS"][i] = (f[0], env.File(realpath(f[1].get_path())))
return result
@ -191,14 +198,15 @@ def ProcessFlags(env, flags): # pylint: disable=too-many-branches
# Cancel any previous definition of name, either built in or
# provided with a -U option // Issue #191
undefines = [
u for u in env.get("CCFLAGS", [])
u
for u in env.get("CCFLAGS", [])
if isinstance(u, string_types) and u.startswith("-U")
]
if undefines:
for undef in undefines:
env['CCFLAGS'].remove(undef)
if undef[2:] in env['CPPDEFINES']:
env['CPPDEFINES'].remove(undef[2:])
env["CCFLAGS"].remove(undef)
if undef[2:] in env["CPPDEFINES"]:
env["CPPDEFINES"].remove(undef[2:])
env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
@ -221,8 +229,7 @@ def ProcessUnFlags(env, flags):
for current in env.get(key, []):
conditions = [
unflag == current,
isinstance(current, (tuple, list))
and unflag[0] == current[0]
isinstance(current, (tuple, list)) and unflag[0] == current[0],
]
if any(conditions):
env[key].remove(current)
@ -231,15 +238,12 @@ def ProcessUnFlags(env, flags):
def MatchSourceFiles(env, src_dir, src_filter=None):
src_filter = env.subst(src_filter) if src_filter else None
src_filter = src_filter or SRC_FILTER_DEFAULT
return fs.match_src_files(env.subst(src_dir), src_filter,
SRC_BUILD_EXT + SRC_HEADER_EXT)
return fs.match_src_files(
env.subst(src_dir), src_filter, SRC_BUILD_EXT + SRC_HEADER_EXT
)
def CollectBuildFiles(env,
variant_dir,
src_dir,
src_filter=None,
duplicate=False):
def CollectBuildFiles(env, variant_dir, src_dir, src_filter=None, duplicate=False):
sources = []
variants = []
@ -267,8 +271,10 @@ def BuildFrameworks(env, frameworks):
return
if "BOARD" not in env:
sys.stderr.write("Please specify `board` in `platformio.ini` to use "
"with '%s' framework\n" % ", ".join(frameworks))
sys.stderr.write(
"Please specify `board` in `platformio.ini` to use "
"with '%s' framework\n" % ", ".join(frameworks)
)
env.Exit(1)
board_frameworks = env.BoardConfig().get("frameworks", [])
@ -276,8 +282,7 @@ def BuildFrameworks(env, frameworks):
if board_frameworks:
frameworks.insert(0, board_frameworks[0])
else:
sys.stderr.write(
"Error: Please specify `board` in `platformio.ini`\n")
sys.stderr.write("Error: Please specify `board` in `platformio.ini`\n")
env.Exit(1)
for f in frameworks:
@ -290,22 +295,20 @@ def BuildFrameworks(env, frameworks):
if f in board_frameworks:
SConscript(env.GetFrameworkScript(f), exports="env")
else:
sys.stderr.write(
"Error: This board doesn't support %s framework!\n" % f)
sys.stderr.write("Error: This board doesn't support %s framework!\n" % f)
env.Exit(1)
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
return env.StaticLibrary(
env.subst(variant_dir),
env.CollectBuildFiles(variant_dir, src_dir, src_filter))
env.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter)
)
def BuildSources(env, variant_dir, src_dir, src_filter=None):
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
DefaultEnvironment().Append(
PIOBUILDFILES=[env.Object(node) for node in nodes])
DefaultEnvironment().Append(PIOBUILDFILES=[env.Object(node) for node in nodes])
def exists(_):

View File

@ -25,10 +25,14 @@ class PlatformioCLI(click.MultiCommand):
@staticmethod
def in_silence():
args = PlatformioCLI.leftover_args
return args and any([
args[0] == "debug" and "--interpreter" in " ".join(args),
args[0] == "upgrade", "--json-output" in args, "--version" in args
])
return args and any(
[
args[0] == "debug" and "--interpreter" in " ".join(args),
args[0] == "upgrade",
"--json-output" in args,
"--version" in args,
]
)
def invoke(self, ctx):
PlatformioCLI.leftover_args = ctx.args
@ -52,8 +56,7 @@ class PlatformioCLI(click.MultiCommand):
def get_command(self, ctx, cmd_name):
mod = None
try:
mod = __import__("platformio.commands." + cmd_name, None, None,
["cli"])
mod = __import__("platformio.commands." + cmd_name, None, None, ["cli"])
except ImportError:
try:
return self._handle_obsolate_command(cmd_name)
@ -65,8 +68,10 @@ class PlatformioCLI(click.MultiCommand):
def _handle_obsolate_command(name):
if name == "platforms":
from platformio.commands import platform
return platform.cli
if name == "serialports":
from platformio.commands import device
return device.cli
raise AttributeError()

View File

@ -34,9 +34,9 @@ def cli(query, installed, json_output): # pylint: disable=R0912
for board in _get_boards(installed):
if query and query.lower() not in json.dumps(board).lower():
continue
if board['platform'] not in grpboards:
grpboards[board['platform']] = []
grpboards[board['platform']].append(board)
if board["platform"] not in grpboards:
grpboards[board["platform"]] = []
grpboards[board["platform"]].append(board)
terminal_width, _ = click.get_terminal_size()
for (platform, boards) in sorted(grpboards.items()):
@ -50,11 +50,21 @@ def cli(query, installed, json_output): # pylint: disable=R0912
def print_boards(boards):
click.echo(
tabulate([(click.style(b['id'], fg="cyan"), b['mcu'], "%dMHz" %
(b['fcpu'] / 1000000), fs.format_filesize(
b['rom']), fs.format_filesize(b['ram']), b['name'])
for b in boards],
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"]))
tabulate(
[
(
click.style(b["id"], fg="cyan"),
b["mcu"],
"%dMHz" % (b["fcpu"] / 1000000),
fs.format_filesize(b["rom"]),
fs.format_filesize(b["ram"]),
b["name"],
)
for b in boards
],
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"],
)
)
def _get_boards(installed=False):
@ -66,7 +76,7 @@ def _print_boards_json(query, installed=False):
result = []
for board in _get_boards(installed):
if query:
search_data = "%s %s" % (board['id'], json.dumps(board).lower())
search_data = "%s %s" % (board["id"], json.dumps(board).lower())
if query.lower() not in search_data.lower():
continue
result.append(board)

View File

@ -28,39 +28,50 @@ from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools import CheckToolFactory
from platformio.compat import dump_json_to_unicode
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (find_project_dir_above,
get_project_dir,
get_project_include_dir,
get_project_src_dir)
from platformio.project.helpers import (
find_project_dir_above,
get_project_dir,
get_project_include_dir,
get_project_src_dir,
)
@click.command("check", short_help="Run a static analysis tool on code")
@click.option("-e", "--environment", multiple=True)
@click.option("-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True,
file_okay=True,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-c",
"--project-conf",
type=click.Path(exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True))
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("--filter", multiple=True, help="Pattern: +<include> -<exclude>")
@click.option("--flags", multiple=True)
@click.option("--severity",
multiple=True,
type=click.Choice(DefectItem.SEVERITY_LABELS.values()))
@click.option(
"--severity", multiple=True, type=click.Choice(DefectItem.SEVERITY_LABELS.values())
)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
@click.option("--json-output", is_flag=True)
def cli(environment, project_dir, project_conf, filter, flags, severity,
silent, verbose, json_output):
def cli(
environment,
project_dir,
project_conf,
filter,
flags,
severity,
silent,
verbose,
json_output,
):
# find project directory on upper level
if isfile(project_dir):
project_dir = find_project_dir_above(project_dir)
@ -68,15 +79,18 @@ def cli(environment, project_dir, project_conf, filter, flags, severity,
results = []
with fs.cd(project_dir):
config = ProjectConfig.get_instance(
project_conf or join(project_dir, "platformio.ini"))
project_conf or join(project_dir, "platformio.ini")
)
config.validate(environment)
default_envs = config.default_envs()
for envname in config.envs():
skipenv = any([
environment and envname not in environment, not environment
and default_envs and envname not in default_envs
])
skipenv = any(
[
environment and envname not in environment,
not environment and default_envs and envname not in default_envs,
]
)
env_options = config.items(env=envname, as_dict=True)
env_dump = []
@ -84,7 +98,8 @@ def cli(environment, project_dir, project_conf, filter, flags, severity,
if k not in ("platform", "framework", "board"):
continue
env_dump.append(
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v))
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
)
default_filter = [
"+<%s/>" % basename(d)
@ -94,13 +109,12 @@ def cli(environment, project_dir, project_conf, filter, flags, severity,
tool_options = dict(
verbose=verbose,
silent=silent,
filter=filter
or env_options.get("check_filter", default_filter),
filter=filter or env_options.get("check_filter", default_filter),
flags=flags or env_options.get("check_flags"),
severity=[
DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]
] if silent else
(severity or env_options.get("check_severity")))
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
if silent
else (severity or env_options.get("check_severity")),
)
for tool in env_options.get("check_tool", ["cppcheck"]):
if skipenv:
@ -109,26 +123,29 @@ def cli(environment, project_dir, project_conf, filter, flags, severity,
if not silent and not json_output:
print_processing_header(tool, envname, env_dump)
ct = CheckToolFactory.new(tool, project_dir, config, envname,
tool_options)
ct = CheckToolFactory.new(
tool, project_dir, config, envname, tool_options
)
result = {"env": envname, "tool": tool, "duration": time()}
rc = ct.check(on_defect_callback=None if (
json_output or verbose
) else lambda defect: click.echo(repr(defect)))
rc = ct.check(
on_defect_callback=None
if (json_output or verbose)
else lambda defect: click.echo(repr(defect))
)
result['defects'] = ct.get_defects()
result['duration'] = time() - result['duration']
result['succeeded'] = (
rc == 0 and not any(d.severity == DefectItem.SEVERITY_HIGH
for d in result['defects']))
result["defects"] = ct.get_defects()
result["duration"] = time() - result["duration"]
result["succeeded"] = rc == 0 and not any(
d.severity == DefectItem.SEVERITY_HIGH for d in result["defects"]
)
results.append(result)
if verbose:
click.echo("\n".join(repr(d) for d in result['defects']))
click.echo("\n".join(repr(d) for d in result["defects"]))
if not json_output and not silent:
if not result['defects']:
if not result["defects"]:
click.echo("No defects found")
print_processing_footer(result)
@ -145,11 +162,13 @@ def cli(environment, project_dir, project_conf, filter, flags, severity,
def results_to_json(raw):
results = []
for item in raw:
item.update({
"ignored": item.get("succeeded") is None,
"succeeded": bool(item.get("succeeded")),
"defects": [d.to_json() for d in item.get("defects", [])]
})
item.update(
{
"ignored": item.get("succeeded") is None,
"succeeded": bool(item.get("succeeded")),
"defects": [d.to_json() for d in item.get("defects", [])],
}
)
results.append(item)
return results
@ -157,8 +176,9 @@ def results_to_json(raw):
def print_processing_header(tool, envname, envdump):
click.echo(
"Checking %s > %s (%s)" %
(click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump)))
"Checking %s > %s (%s)"
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
)
terminal_width, _ = click.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
@ -166,10 +186,17 @@ def print_processing_header(tool, envname, envdump):
def print_processing_footer(result):
is_failed = not result.get("succeeded")
util.print_labeled_bar(
"[%s] Took %.2f seconds" %
((click.style("FAILED", fg="red", bold=True) if is_failed else
click.style("PASSED", fg="green", bold=True)), result['duration']),
is_error=is_failed)
"[%s] Took %.2f seconds"
% (
(
click.style("FAILED", fg="red", bold=True)
if is_failed
else click.style("PASSED", fg="green", bold=True)
),
result["duration"],
),
is_error=is_failed,
)
def print_defects_stats(results):
@ -178,8 +205,7 @@ def print_defects_stats(results):
def _append_defect(component, defect):
if not components.get(component):
components[component] = Counter()
components[component].update(
{DefectItem.SEVERITY_LABELS[defect.severity]: 1})
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
for result in results:
for defect in result.get("defects", []):
@ -235,20 +261,32 @@ def print_check_summary(results):
status_str = click.style("PASSED", fg="green")
tabular_data.append(
(click.style(result['env'], fg="cyan"), result['tool'], status_str,
util.humanize_duration_time(result.get("duration"))))
(
click.style(result["env"], fg="cyan"),
result["tool"],
status_str,
util.humanize_duration_time(result.get("duration")),
)
)
click.echo(tabulate(tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Environment", "Tool", "Status",
"Duration")
]),
err=failed_nums)
click.echo(
tabulate(
tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Environment", "Tool", "Status", "Duration")
],
),
err=failed_nums,
)
util.print_labeled_bar(
"%s%d succeeded in %s" %
("%d failed, " % failed_nums if failed_nums else "", succeeded_nums,
util.humanize_duration_time(duration)),
"%s%d succeeded in %s"
% (
"%d failed, " % failed_nums if failed_nums else "",
succeeded_nums,
util.humanize_duration_time(duration),
),
is_error=failed_nums,
fg="red" if failed_nums else "green")
fg="red" if failed_nums else "green",
)

View File

@ -29,18 +29,19 @@ class DefectItem(object):
SEVERITY_LOW = 4
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
def __init__(self,
severity,
category,
message,
file="unknown",
line=0,
column=0,
id=None,
callstack=None,
cwe=None):
assert severity in (self.SEVERITY_HIGH, self.SEVERITY_MEDIUM,
self.SEVERITY_LOW)
def __init__(
self,
severity,
category,
message,
file="unknown",
line=0,
column=0,
id=None,
callstack=None,
cwe=None,
):
assert severity in (self.SEVERITY_HIGH, self.SEVERITY_MEDIUM, self.SEVERITY_LOW)
self.severity = severity
self.category = category
self.message = message
@ -61,14 +62,14 @@ class DefectItem(object):
defect_color = "yellow"
format_str = "{file}:{line}: [{severity}:{category}] {message} {id}"
return format_str.format(severity=click.style(
self.SEVERITY_LABELS[self.severity], fg=defect_color),
category=click.style(self.category.lower(),
fg=defect_color),
file=click.style(self.file, bold=True),
message=self.message,
line=self.line,
id="%s" % "[%s]" % self.id if self.id else "")
return format_str.format(
severity=click.style(self.SEVERITY_LABELS[self.severity], fg=defect_color),
category=click.style(self.category.lower(), fg=defect_color),
file=click.style(self.file, bold=True),
message=self.message,
line=self.line,
id="%s" % "[%s]" % self.id if self.id else "",
)
def __or__(self, defect):
return self.severity | defect.severity
@ -90,5 +91,5 @@ class DefectItem(object):
"column": self.column,
"callstack": self.callstack,
"id": self.id,
"cwe": self.cwe
"cwe": self.cwe,
}

View File

@ -18,7 +18,6 @@ from platformio.commands.check.tools.cppcheck import CppcheckCheckTool
class CheckToolFactory(object):
@staticmethod
def new(tool, project_dir, config, envname, options):
cls = None
@ -27,6 +26,5 @@ class CheckToolFactory(object):
elif tool == "clangtidy":
cls = ClangtidyCheckTool
else:
raise exception.PlatformioException("Unknown check tool `%s`" %
tool)
raise exception.PlatformioException("Unknown check tool `%s`" % tool)
return cls(project_dir, config, envname, options)

View File

@ -20,7 +20,6 @@ from platformio.project.helpers import get_project_dir, load_project_ide_data
class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
def __init__(self, project_dir, config, envname, options):
self.config = config
self.envname = envname
@ -35,14 +34,15 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
# detect all defects by default
if not self.options.get("severity"):
self.options['severity'] = [
DefectItem.SEVERITY_LOW, DefectItem.SEVERITY_MEDIUM,
DefectItem.SEVERITY_HIGH
self.options["severity"] = [
DefectItem.SEVERITY_LOW,
DefectItem.SEVERITY_MEDIUM,
DefectItem.SEVERITY_HIGH,
]
# cast to severity by ids
self.options['severity'] = [
self.options["severity"] = [
s if isinstance(s, int) else DefectItem.severity_to_int(s)
for s in self.options['severity']
for s in self.options["severity"]
]
def _load_cpp_data(self, project_dir, envname):
@ -51,8 +51,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return
self.cpp_includes = data.get("includes", [])
self.cpp_defines = data.get("defines", [])
self.cpp_defines.extend(
self._get_toolchain_defines(data.get("cc_path")))
self.cpp_defines.extend(self._get_toolchain_defines(data.get("cc_path")))
def get_flags(self, tool):
result = []
@ -61,18 +60,16 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
if ":" not in flag:
result.extend([f for f in flag.split(" ") if f])
elif flag.startswith("%s:" % tool):
result.extend(
[f for f in flag.split(":", 1)[1].split(" ") if f])
result.extend([f for f in flag.split(":", 1)[1].split(" ") if f])
return result
@staticmethod
def _get_toolchain_defines(cc_path):
defines = []
result = proc.exec_command("echo | %s -dM -E -x c++ -" % cc_path,
shell=True)
result = proc.exec_command("echo | %s -dM -E -x c++ -" % cc_path, shell=True)
for line in result['out'].split("\n"):
for line in result["out"].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
@ -105,7 +102,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
click.echo(line)
return
if defect.severity not in self.options['severity']:
if defect.severity not in self.options["severity"]:
return
self._defects.append(defect)
@ -125,8 +122,9 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
def get_project_src_files(self):
file_extensions = ["h", "hpp", "c", "cc", "cpp", "ino"]
return fs.match_src_files(get_project_dir(),
self.options.get("filter"), file_extensions)
return fs.match_src_files(
get_project_dir(), self.options.get("filter"), file_extensions
)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
@ -137,7 +135,8 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output))
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
self.clean_up()

View File

@ -21,10 +21,8 @@ from platformio.managers.core import get_core_package_dir
class ClangtidyCheckTool(CheckToolBase):
def tool_output_filter(self, line):
if not self.options.get(
"verbose") and "[clang-diagnostic-error]" in line:
if not self.options.get("verbose") and "[clang-diagnostic-error]" in line:
return ""
if "[CommonOptionsParser]" in line:
@ -37,8 +35,7 @@ class ClangtidyCheckTool(CheckToolBase):
return ""
def parse_defect(self, raw_line):
match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$",
raw_line)
match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$", raw_line)
if not match:
return raw_line
@ -50,8 +47,7 @@ class ClangtidyCheckTool(CheckToolBase):
elif category == "warning":
severity = DefectItem.SEVERITY_MEDIUM
return DefectItem(severity, category, message, file_, line, column,
defect_id)
return DefectItem(severity, category, message, file_, line, column, defect_id)
def configure_command(self):
tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy")

View File

@ -23,29 +23,42 @@ from platformio.project.helpers import get_project_core_dir
class CppcheckCheckTool(CheckToolBase):
def __init__(self, *args, **kwargs):
self._tmp_files = []
self.defect_fields = [
"severity", "message", "file", "line", "column", "callstack",
"cwe", "id"
"severity",
"message",
"file",
"line",
"column",
"callstack",
"cwe",
"id",
]
super(CppcheckCheckTool, self).__init__(*args, **kwargs)
def tool_output_filter(self, line):
if not self.options.get(
"verbose") and "--suppress=unmatchedSuppression:" in line:
if (
not self.options.get("verbose")
and "--suppress=unmatchedSuppression:" in line
):
return ""
if any(msg in line for msg in ("No C or C++ source files found",
"unrecognized command line option")):
if any(
msg in line
for msg in (
"No C or C++ source files found",
"unrecognized command line option",
)
):
self._bad_input = True
return line
def parse_defect(self, raw_line):
if "<&PIO&>" not in raw_line or any(f not in raw_line
for f in self.defect_fields):
if "<&PIO&>" not in raw_line or any(
f not in raw_line for f in self.defect_fields
):
return None
args = dict()
@ -54,13 +67,13 @@ class CppcheckCheckTool(CheckToolBase):
name, value = field.split("=", 1)
args[name] = value
args['category'] = args['severity']
if args['severity'] == "error":
args['severity'] = DefectItem.SEVERITY_HIGH
elif args['severity'] == "warning":
args['severity'] = DefectItem.SEVERITY_MEDIUM
args["category"] = args["severity"]
if args["severity"] == "error":
args["severity"] = DefectItem.SEVERITY_HIGH
elif args["severity"] == "warning":
args["severity"] = DefectItem.SEVERITY_MEDIUM
else:
args['severity'] = DefectItem.SEVERITY_LOW
args["severity"] = DefectItem.SEVERITY_LOW
return DefectItem(**args)
@ -68,20 +81,26 @@ class CppcheckCheckTool(CheckToolBase):
tool_path = join(get_core_package_dir("tool-cppcheck"), "cppcheck")
cmd = [
tool_path, "--error-exitcode=1",
"--verbose" if self.options.get("verbose") else "--quiet"
tool_path,
"--error-exitcode=1",
"--verbose" if self.options.get("verbose") else "--quiet",
]
cmd.append('--template="%s"' % "<&PIO&>".join(
["{0}={{{0}}}".format(f) for f in self.defect_fields]))
cmd.append(
'--template="%s"'
% "<&PIO&>".join(["{0}={{{0}}}".format(f) for f in self.defect_fields])
)
flags = self.get_flags("cppcheck")
if not self.is_flag_set("--platform", flags):
cmd.append("--platform=unspecified")
if not self.is_flag_set("--enable", flags):
enabled_checks = [
"warning", "style", "performance", "portability",
"unusedFunction"
"warning",
"style",
"performance",
"portability",
"unusedFunction",
]
cmd.append("--enable=%s" % ",".join(enabled_checks))

View File

@ -48,37 +48,37 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
@click.command("ci", short_help="Continuous Integration")
@click.argument("src", nargs=-1, callback=validate_path)
@click.option("-l",
"--lib",
multiple=True,
callback=validate_path,
metavar="DIRECTORY")
@click.option("-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
@click.option("--exclude", multiple=True)
@click.option("-b",
"--board",
multiple=True,
metavar="ID",
callback=validate_boards)
@click.option("--build-dir",
default=mkdtemp,
type=click.Path(file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option(
"--build-dir",
default=mkdtemp,
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
@click.option("--keep-build-dir", is_flag=True)
@click.option("-c",
"--project-conf",
type=click.Path(exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True))
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("-O", "--project-option", multiple=True)
@click.option("-v", "--verbose", is_flag=True)
@click.pass_context
def cli( # pylint: disable=too-many-arguments, too-many-branches
ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf,
project_option, verbose):
ctx,
src,
lib,
exclude,
board,
build_dir,
keep_build_dir,
project_conf,
project_option,
verbose,
):
if not src and getenv("PLATFORMIO_CI_SRC"):
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
@ -110,10 +110,9 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
_exclude_contents(build_dir, exclude)
# initialise project
ctx.invoke(cmd_init,
project_dir=build_dir,
board=board,
project_option=project_option)
ctx.invoke(
cmd_init, project_dir=build_dir, board=board, project_option=project_option
)
# process project
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
@ -127,27 +126,27 @@ def _copy_contents(dst_dir, contents):
for path in contents:
if isdir(path):
items['dirs'].add(path)
items["dirs"].add(path)
elif isfile(path):
items['files'].add(path)
items["files"].add(path)
dst_dir_name = basename(dst_dir)
if dst_dir_name == "src" and len(items['dirs']) == 1:
copytree(list(items['dirs']).pop(), dst_dir, symlinks=True)
if dst_dir_name == "src" and len(items["dirs"]) == 1:
copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
else:
if not isdir(dst_dir):
makedirs(dst_dir)
for d in items['dirs']:
for d in items["dirs"]:
copytree(d, join(dst_dir, basename(d)), symlinks=True)
if not items['files']:
if not items["files"]:
return
if dst_dir_name == "lib":
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
for f in items['files']:
for f in items["files"]:
dst_file = join(dst_dir, basename(f))
if f == dst_file:
continue

View File

@ -53,8 +53,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
if not isdir(get_project_cache_dir()):
os.makedirs(get_project_cache_dir())
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(),
prefix=".piodebug-")
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(), prefix=".piodebug-")
self._target_is_run = False
self._last_server_activity = 0
@ -70,25 +69,28 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
"PROG_PATH": prog_path,
"PROG_DIR": dirname(prog_path),
"PROG_NAME": basename(splitext(prog_path)[0]),
"DEBUG_PORT": self.debug_options['port'],
"UPLOAD_PROTOCOL": self.debug_options['upload_protocol'],
"INIT_BREAK": self.debug_options['init_break'] or "",
"LOAD_CMDS": "\n".join(self.debug_options['load_cmds'] or []),
"DEBUG_PORT": self.debug_options["port"],
"UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
"INIT_BREAK": self.debug_options["init_break"] or "",
"LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
}
self._debug_server.spawn(patterns)
if not patterns['DEBUG_PORT']:
patterns['DEBUG_PORT'] = self._debug_server.get_debug_port()
if not patterns["DEBUG_PORT"]:
patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()
self.generate_pioinit(self._gdbsrc_dir, patterns)
# start GDB client
args = [
"piogdb",
"-q",
"--directory", self._gdbsrc_dir,
"--directory", self.project_dir,
"-l", "10"
"--directory",
self._gdbsrc_dir,
"--directory",
self.project_dir,
"-l",
"10",
] # yapf: disable
args.extend(self.args)
if not gdb_path:
@ -96,13 +98,11 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
gdb_data_dir = self._get_data_dir(gdb_path)
if gdb_data_dir:
args.extend(["--data-directory", gdb_data_dir])
args.append(patterns['PROG_PATH'])
args.append(patterns["PROG_PATH"])
return reactor.spawnProcess(self,
gdb_path,
args,
path=self.project_dir,
env=os.environ)
return reactor.spawnProcess(
self, gdb_path, args, path=self.project_dir, env=os.environ
)
@staticmethod
def _get_data_dir(gdb_path):
@ -112,8 +112,9 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
return gdb_data_dir if isdir(gdb_data_dir) else None
def generate_pioinit(self, dst_dir, patterns):
server_exe = (self.debug_options.get("server")
or {}).get("executable", "").lower()
server_exe = (
(self.debug_options.get("server") or {}).get("executable", "").lower()
)
if "jlink" in server_exe:
cfg = initcfgs.GDB_JLINK_INIT_CONFIG
elif "st-util" in server_exe:
@ -122,43 +123,43 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
cfg = initcfgs.GDB_MSPDEBUG_INIT_CONFIG
elif "qemu" in server_exe:
cfg = initcfgs.GDB_QEMU_INIT_CONFIG
elif self.debug_options['require_debug_port']:
elif self.debug_options["require_debug_port"]:
cfg = initcfgs.GDB_BLACKMAGIC_INIT_CONFIG
else:
cfg = initcfgs.GDB_DEFAULT_INIT_CONFIG
commands = cfg.split("\n")
if self.debug_options['init_cmds']:
commands = self.debug_options['init_cmds']
commands.extend(self.debug_options['extra_cmds'])
if self.debug_options["init_cmds"]:
commands = self.debug_options["init_cmds"]
commands.extend(self.debug_options["extra_cmds"])
if not any("define pio_reset_target" in cmd for cmd in commands):
commands = [
"define pio_reset_target",
" echo Warning! Undefined pio_reset_target command\\n",
" mon reset",
"end"
"end",
] + commands # yapf: disable
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
commands = [
"define pio_reset_halt_target",
" echo Warning! Undefined pio_reset_halt_target command\\n",
" mon reset halt",
"end"
"end",
] + commands # yapf: disable
if not any("define pio_restart_target" in cmd for cmd in commands):
commands += [
"define pio_restart_target",
" pio_reset_halt_target",
" $INIT_BREAK",
" %s" % ("continue" if patterns['INIT_BREAK'] else "next"),
"end"
" %s" % ("continue" if patterns["INIT_BREAK"] else "next"),
"end",
] # yapf: disable
banner = [
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options['tool'],
"echo PlatformIO: Initializing remote target...\\n"
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options["tool"],
"echo PlatformIO: Initializing remote target...\\n",
]
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
commands = banner + commands + footer
@ -214,8 +215,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
self._handle_error(data)
# go to init break automatically
if self.INIT_COMPLETED_BANNER.encode() in data:
self._auto_continue_timer = task.LoopingCall(
self._auto_exec_continue)
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
self._auto_continue_timer.start(0.1)
def errReceived(self, data):
@ -236,29 +236,34 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
self._auto_continue_timer.stop()
self._auto_continue_timer = None
if not self.debug_options['init_break'] or self._target_is_run:
if not self.debug_options["init_break"] or self._target_is_run:
return
self.console_log(
"PlatformIO: Resume the execution to `debug_init_break = %s`" %
self.debug_options['init_break'])
self.console_log("PlatformIO: More configuration options -> "
"http://bit.ly/pio-debug")
self.transport.write(b"0-exec-continue\n" if helpers.
is_mi_mode(self.args) else b"continue\n")
"PlatformIO: Resume the execution to `debug_init_break = %s`"
% self.debug_options["init_break"]
)
self.console_log(
"PlatformIO: More configuration options -> " "http://bit.ly/pio-debug"
)
self.transport.write(
b"0-exec-continue\n" if helpers.is_mi_mode(self.args) else b"continue\n"
)
self._target_is_run = True
def _handle_error(self, data):
if (self.PIO_SRC_NAME.encode() not in data
or b"Error in sourced" not in data):
if self.PIO_SRC_NAME.encode() not in data or b"Error in sourced" not in data:
return
configuration = {"debug": self.debug_options, "env": self.env_options}
exd = re.sub(r'\\(?!")', "/", json.dumps(configuration))
exd = re.sub(r'"(?:[a-z]\:)?((/[^"/]+)+)"',
lambda m: '"%s"' % join(*m.group(1).split("/")[-2:]), exd,
re.I | re.M)
exd = re.sub(
r'"(?:[a-z]\:)?((/[^"/]+)+)"',
lambda m: '"%s"' % join(*m.group(1).split("/")[-2:]),
exd,
re.I | re.M,
)
mp = MeasurementProtocol()
mp['exd'] = "DebugGDBPioInitError: %s" % exd
mp['exf'] = 1
mp["exd"] = "DebugGDBPioInitError: %s" % exd
mp["exf"] = 1
mp.send("exception")
self.transport.loseConnection()

View File

@ -25,35 +25,35 @@ from platformio import exception, fs, proc, util
from platformio.commands.debug import helpers
from platformio.managers.core import inject_contrib_pysite
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (is_platformio_project,
load_project_ide_data)
from platformio.project.helpers import is_platformio_project, load_project_ide_data
@click.command("debug",
context_settings=dict(ignore_unknown_options=True),
short_help="PIO Unified Debugger")
@click.option("-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-c",
"--project-conf",
type=click.Path(exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True))
@click.command(
"debug",
context_settings=dict(ignore_unknown_options=True),
short_help="PIO Unified Debugger",
)
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("--environment", "-e", metavar="<environment>")
@click.option("--verbose", "-v", is_flag=True)
@click.option("--interface", type=click.Choice(["gdb"]))
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def cli(ctx, project_dir, project_conf, environment, verbose, interface,
__unprocessed):
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
# use env variables from Eclipse or CLion
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
if is_platformio_project(project_dir):
@ -63,7 +63,8 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface,
with fs.cd(project_dir):
config = ProjectConfig.get_instance(
project_conf or join(project_dir, "platformio.ini"))
project_conf or join(project_dir, "platformio.ini")
)
config.validate(envs=[environment] if environment else None)
env_name = environment or helpers.get_default_debug_env(config)
@ -74,68 +75,64 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface,
assert debug_options
if not interface:
return helpers.predebug_project(ctx, project_dir, env_name, False,
verbose)
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
configuration = load_project_ide_data(project_dir, env_name)
if not configuration:
raise exception.DebugInvalidOptions(
"Could not load debug configuration")
raise exception.DebugInvalidOptions("Could not load debug configuration")
if "--version" in __unprocessed:
result = proc.exec_command([configuration['gdb_path'], "--version"])
if result['returncode'] == 0:
return click.echo(result['out'])
raise exception.PlatformioException("\n".join(
[result['out'], result['err']]))
result = proc.exec_command([configuration["gdb_path"], "--version"])
if result["returncode"] == 0:
return click.echo(result["out"])
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
try:
fs.ensure_udev_rules()
except exception.InvalidUdevRules as e:
for line in str(e).split("\n") + [""]:
click.echo(
('~"%s\\n"' if helpers.is_mi_mode(__unprocessed) else "%s") %
line)
('~"%s\\n"' if helpers.is_mi_mode(__unprocessed) else "%s") % line
)
debug_options['load_cmds'] = helpers.configure_esp32_load_cmds(
debug_options, configuration)
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
debug_options, configuration
)
rebuild_prog = False
preload = debug_options['load_cmds'] == ["preload"]
load_mode = debug_options['load_mode']
preload = debug_options["load_cmds"] == ["preload"]
load_mode = debug_options["load_mode"]
if load_mode == "always":
rebuild_prog = (
preload
or not helpers.has_debug_symbols(configuration['prog_path']))
rebuild_prog = preload or not helpers.has_debug_symbols(
configuration["prog_path"]
)
elif load_mode == "modified":
rebuild_prog = (
helpers.is_prog_obsolete(configuration['prog_path'])
or not helpers.has_debug_symbols(configuration['prog_path']))
rebuild_prog = helpers.is_prog_obsolete(
configuration["prog_path"]
) or not helpers.has_debug_symbols(configuration["prog_path"])
else:
rebuild_prog = not isfile(configuration['prog_path'])
rebuild_prog = not isfile(configuration["prog_path"])
if preload or (not rebuild_prog and load_mode != "always"):
# don't load firmware through debug server
debug_options['load_cmds'] = []
debug_options["load_cmds"] = []
if rebuild_prog:
if helpers.is_mi_mode(__unprocessed):
click.echo('~"Preparing firmware for debugging...\\n"')
output = helpers.GDBBytesIO()
with util.capture_std_streams(output):
helpers.predebug_project(ctx, project_dir, env_name, preload,
verbose)
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
output.close()
else:
click.echo("Preparing firmware for debugging...")
helpers.predebug_project(ctx, project_dir, env_name, preload,
verbose)
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
# save SHA sum of newly created prog
if load_mode == "modified":
helpers.is_prog_obsolete(configuration['prog_path'])
helpers.is_prog_obsolete(configuration["prog_path"])
if not isfile(configuration['prog_path']):
if not isfile(configuration["prog_path"]):
raise exception.DebugInvalidOptions("Program/firmware is missed")
# run debugging client
@ -143,7 +140,7 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface,
from platformio.commands.debug.client import GDBClient, reactor
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
client.spawn(configuration['gdb_path'], configuration['prog_path'])
client.spawn(configuration["gdb_path"], configuration["prog_path"])
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
reactor.run()

View File

@ -20,8 +20,7 @@ from io import BytesIO
from os.path import isfile
from platformio import exception, fs, util
from platformio.commands.platform import \
platform_install as cmd_platform_install
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.run import cli as cmd_run
from platformio.managers.platform import PlatformFactory
from platformio.project.config import ProjectConfig
@ -57,41 +56,41 @@ def get_default_debug_env(config):
def predebug_project(ctx, project_dir, env_name, preload, verbose):
ctx.invoke(cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose)
ctx.invoke(
cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose,
)
if preload:
time.sleep(5)
def validate_debug_options(cmd_ctx, env_options):
def _cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return [
"$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items
]
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
try:
platform = PlatformFactory.newPlatform(env_options['platform'])
platform = PlatformFactory.newPlatform(env_options["platform"])
except exception.UnknownPlatform:
cmd_ctx.invoke(cmd_platform_install,
platforms=[env_options['platform']],
skip_default_package=True)
platform = PlatformFactory.newPlatform(env_options['platform'])
cmd_ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.newPlatform(env_options["platform"])
board_config = platform.board_config(env_options['board'])
board_config = platform.board_config(env_options["board"])
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
tool_settings = board_config.get("debug", {}).get("tools",
{}).get(tool_name, {})
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
server_options = None
# specific server per a system
if isinstance(tool_settings.get("server", {}), list):
for item in tool_settings['server'][:]:
tool_settings['server'] = item
for item in tool_settings["server"][:]:
tool_settings["server"] = item
if util.get_systype() in item.get("system", []):
break
@ -100,76 +99,87 @@ def validate_debug_options(cmd_ctx, env_options):
server_options = {
"cwd": None,
"executable": None,
"arguments": env_options.get("debug_server")
"arguments": env_options.get("debug_server"),
}
server_options['executable'] = server_options['arguments'][0]
server_options['arguments'] = server_options['arguments'][1:]
server_options["executable"] = server_options["arguments"][0]
server_options["arguments"] = server_options["arguments"][1:]
elif "server" in tool_settings:
server_package = tool_settings['server'].get("package")
server_package_dir = platform.get_package_dir(
server_package) if server_package else None
server_package = tool_settings["server"].get("package")
server_package_dir = (
platform.get_package_dir(server_package) if server_package else None
)
if server_package and not server_package_dir:
platform.install_packages(with_packages=[server_package],
skip_default_package=True,
silent=True)
platform.install_packages(
with_packages=[server_package], skip_default_package=True, silent=True
)
server_package_dir = platform.get_package_dir(server_package)
server_options = dict(
cwd=server_package_dir if server_package else None,
executable=tool_settings['server'].get("executable"),
executable=tool_settings["server"].get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir else a
for a in tool_settings['server'].get("arguments", [])
])
if server_package_dir
else a
for a in tool_settings["server"].get("arguments", [])
],
)
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
result = dict(
tool=tool_name,
upload_protocol=env_options.get(
"upload_protocol",
board_config.get("upload", {}).get("protocol")),
"upload_protocol", board_config.get("upload", {}).get("protocol")
),
load_cmds=_cleanup_cmds(
env_options.get(
"debug_load_cmds",
tool_settings.get("load_cmds",
tool_settings.get("load_cmd", "load")))),
load_mode=env_options.get("debug_load_mode",
tool_settings.get("load_mode", "always")),
tool_settings.get("load_cmds", tool_settings.get("load_cmd", "load")),
)
),
load_mode=env_options.get(
"debug_load_mode", tool_settings.get("load_mode", "always")
),
init_break=env_options.get(
"debug_init_break", tool_settings.get("init_break",
"tbreak main")),
"debug_init_break", tool_settings.get("init_break", "tbreak main")
),
init_cmds=_cleanup_cmds(
env_options.get("debug_init_cmds",
tool_settings.get("init_cmds"))),
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
),
extra_cmds=extra_cmds,
require_debug_port=tool_settings.get("require_debug_port", False),
port=reveal_debug_port(
env_options.get("debug_port", tool_settings.get("port")),
tool_name, tool_settings),
server=server_options)
tool_name,
tool_settings,
),
server=server_options,
)
return result
def configure_esp32_load_cmds(debug_options, configuration):
ignore_conds = [
debug_options['load_cmds'] != ["load"],
debug_options["load_cmds"] != ["load"],
"xtensa-esp32" not in configuration.get("cc_path", ""),
not configuration.get("flash_extra_images"), not all([
isfile(item['path'])
for item in configuration.get("flash_extra_images")
])
not configuration.get("flash_extra_images"),
not all(
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
),
]
if any(ignore_conds):
return debug_options['load_cmds']
return debug_options["load_cmds"]
mon_cmds = [
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
path=fs.to_unix_path(item['path']), offset=item['offset'])
path=fs.to_unix_path(item["path"]), offset=item["offset"]
)
for item in configuration.get("flash_extra_images")
]
mon_cmds.append('monitor program_esp32 "{%s.bin}" 0x10000 verify' %
fs.to_unix_path(configuration['prog_path'][:-4]))
mon_cmds.append(
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
% fs.to_unix_path(configuration["prog_path"][:-4])
)
return mon_cmds
@ -181,7 +191,7 @@ def has_debug_symbols(prog_path):
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
b"__PLATFORMIO_BUILD_DEBUG__": False
b"__PLATFORMIO_BUILD_DEBUG__": False,
}
with open(prog_path, "rb") as fp:
last_data = b""
@ -222,7 +232,6 @@ def is_prog_obsolete(prog_path):
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
@ -238,18 +247,21 @@ def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item['port']):
if not _is_match_pattern(item["port"]):
continue
port = item['port']
port = item["port"]
if tool_name.startswith("blackmagic"):
if "windows" in util.get_systype() and \
port.startswith("COM") and len(port) > 4:
if (
"windows" in util.get_systype()
and port.startswith("COM")
and len(port) > 4
):
port = "\\\\.\\%s" % port
if "GDB" in item['description']:
if "GDB" in item["description"]:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item['hwid']:
if hwid_str in item["hwid"]:
return port
return None
@ -261,5 +273,6 @@ def reveal_debug_port(env_debug_port, tool_name, tool_settings):
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise exception.DebugInvalidOptions(
"Please specify `debug_port` for environment")
"Please specify `debug_port` for environment"
)
return debug_port

View File

@ -32,7 +32,7 @@ class BaseProcess(protocol.ProcessProtocol, object):
COMMON_PATTERNS = {
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
"PYTHONEXE": get_pythonexe_path()
"PYTHONEXE": get_pythonexe_path(),
}
def apply_patterns(self, source, patterns=None):
@ -52,8 +52,7 @@ class BaseProcess(protocol.ProcessProtocol, object):
if isinstance(source, string_types):
source = _replace(source)
elif isinstance(source, (list, dict)):
items = enumerate(source) if isinstance(source,
list) else source.items()
items = enumerate(source) if isinstance(source, list) else source.items()
for key, value in items:
if isinstance(value, string_types):
source[key] = _replace(value)
@ -67,9 +66,9 @@ class BaseProcess(protocol.ProcessProtocol, object):
with open(LOG_FILE, "ab") as fp:
fp.write(data)
while data:
chunk = data[:self.STDOUT_CHUNK_SIZE]
chunk = data[: self.STDOUT_CHUNK_SIZE]
click.echo(chunk, nl=False)
data = data[self.STDOUT_CHUNK_SIZE:]
data = data[self.STDOUT_CHUNK_SIZE :]
@staticmethod
def errReceived(data):

View File

@ -24,7 +24,6 @@ from platformio.proc import where_is_program
class DebugServer(BaseProcess):
def __init__(self, debug_options, env_options):
self.debug_options = debug_options
self.env_options = env_options
@ -39,13 +38,16 @@ class DebugServer(BaseProcess):
if not server:
return None
server = self.apply_patterns(server, patterns)
server_executable = server['executable']
server_executable = server["executable"]
if not server_executable:
return None
if server['cwd']:
server_executable = join(server['cwd'], server_executable)
if ("windows" in systype and not server_executable.endswith(".exe")
and isfile(server_executable + ".exe")):
if server["cwd"]:
server_executable = join(server["cwd"], server_executable)
if (
"windows" in systype
and not server_executable.endswith(".exe")
and isfile(server_executable + ".exe")
):
server_executable = server_executable + ".exe"
if not isfile(server_executable):
@ -55,48 +57,56 @@ class DebugServer(BaseProcess):
"\nCould not launch Debug Server '%s'. Please check that it "
"is installed and is included in a system PATH\n\n"
"See documentation or contact contact@platformio.org:\n"
"http://docs.platformio.org/page/plus/debugging.html\n" %
server_executable)
"http://docs.platformio.org/page/plus/debugging.html\n"
% server_executable
)
self._debug_port = ":3333"
openocd_pipe_allowed = all([
not self.debug_options['port'],
"openocd" in server_executable
]) # yapf: disable
openocd_pipe_allowed = all(
[not self.debug_options["port"], "openocd" in server_executable]
) # yapf: disable
if openocd_pipe_allowed:
args = []
if server['cwd']:
args.extend(["-s", server['cwd']])
args.extend([
"-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"
])
args.extend(server['arguments'])
if server["cwd"]:
args.extend(["-s", server["cwd"]])
args.extend(
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
)
args.extend(server["arguments"])
str_args = " ".join(
[arg if arg.startswith("-") else '"%s"' % arg for arg in args])
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
)
self._debug_port = '| "%s" %s' % (server_executable, str_args)
self._debug_port = fs.to_unix_path(self._debug_port)
else:
env = os.environ.copy()
# prepend server "lib" folder to LD path
if ("windows" not in systype and server['cwd']
and isdir(join(server['cwd'], "lib"))):
ld_key = ("DYLD_LIBRARY_PATH"
if "darwin" in systype else "LD_LIBRARY_PATH")
env[ld_key] = join(server['cwd'], "lib")
if (
"windows" not in systype
and server["cwd"]
and isdir(join(server["cwd"], "lib"))
):
ld_key = (
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH"
)
env[ld_key] = join(server["cwd"], "lib")
if os.environ.get(ld_key):
env[ld_key] = "%s:%s" % (env[ld_key],
os.environ.get(ld_key))
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
# prepend BIN to PATH
if server['cwd'] and isdir(join(server['cwd'], "bin")):
env['PATH'] = "%s%s%s" % (
join(server['cwd'], "bin"), os.pathsep,
os.environ.get("PATH", os.environ.get("Path", "")))
if server["cwd"] and isdir(join(server["cwd"], "bin")):
env["PATH"] = "%s%s%s" % (
join(server["cwd"], "bin"),
os.pathsep,
os.environ.get("PATH", os.environ.get("Path", "")),
)
self._transport = reactor.spawnProcess(
self,
server_executable, [server_executable] + server['arguments'],
path=server['cwd'],
env=env)
server_executable,
[server_executable] + server["arguments"],
path=server["cwd"],
env=env,
)
if "mspdebug" in server_executable.lower():
self._debug_port = ":2000"
elif "jlink" in server_executable.lower():

View File

@ -36,27 +36,29 @@ def cli():
@click.option("--mdns", is_flag=True, help="List multicast DNS services")
@click.option("--json-output", is_flag=True)
def device_list( # pylint: disable=too-many-branches
serial, logical, mdns, json_output):
serial, logical, mdns, json_output
):
if not logical and not mdns:
serial = True
data = {}
if serial:
data['serial'] = util.get_serial_ports()
data["serial"] = util.get_serial_ports()
if logical:
data['logical'] = util.get_logical_devices()
data["logical"] = util.get_logical_devices()
if mdns:
data['mdns'] = util.get_mdns_services()
data["mdns"] = util.get_mdns_services()
single_key = list(data)[0] if len(list(data)) == 1 else None
if json_output:
return click.echo(
dump_json_to_unicode(data[single_key] if single_key else data))
dump_json_to_unicode(data[single_key] if single_key else data)
)
titles = {
"serial": "Serial Ports",
"logical": "Logical Devices",
"mdns": "Multicast DNS Services"
"mdns": "Multicast DNS Services",
}
for key, value in data.items():
@ -66,31 +68,38 @@ def device_list( # pylint: disable=too-many-branches
if key == "serial":
for item in value:
click.secho(item['port'], fg="cyan")
click.echo("-" * len(item['port']))
click.echo("Hardware ID: %s" % item['hwid'])
click.echo("Description: %s" % item['description'])
click.secho(item["port"], fg="cyan")
click.echo("-" * len(item["port"]))
click.echo("Hardware ID: %s" % item["hwid"])
click.echo("Description: %s" % item["description"])
click.echo("")
if key == "logical":
for item in value:
click.secho(item['path'], fg="cyan")
click.echo("-" * len(item['path']))
click.echo("Name: %s" % item['name'])
click.secho(item["path"], fg="cyan")
click.echo("-" * len(item["path"]))
click.echo("Name: %s" % item["name"])
click.echo("")
if key == "mdns":
for item in value:
click.secho(item['name'], fg="cyan")
click.echo("-" * len(item['name']))
click.echo("Type: %s" % item['type'])
click.echo("IP: %s" % item['ip'])
click.echo("Port: %s" % item['port'])
if item['properties']:
click.echo("Properties: %s" % ("; ".join([
"%s=%s" % (k, v)
for k, v in item['properties'].items()
])))
click.secho(item["name"], fg="cyan")
click.echo("-" * len(item["name"]))
click.echo("Type: %s" % item["type"])
click.echo("IP: %s" % item["ip"])
click.echo("Port: %s" % item["port"])
if item["properties"]:
click.echo(
"Properties: %s"
% (
"; ".join(
[
"%s=%s" % (k, v)
for k, v in item["properties"].items()
]
)
)
)
click.echo("")
if single_key:
@ -102,66 +111,71 @@ def device_list( # pylint: disable=too-many-branches
@cli.command("monitor", short_help="Monitor device (Serial)")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
@click.option("--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N")
@click.option("--rtscts",
is_flag=True,
help="Enable RTS/CTS flow control, default=Off")
@click.option("--xonxoff",
is_flag=True,
help="Enable software flow control, default=Off")
@click.option("--rts",
default=None,
type=click.IntRange(0, 1),
help="Set initial RTS line state")
@click.option("--dtr",
default=None,
type=click.IntRange(0, 1),
help="Set initial DTR line state")
@click.option(
"--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N",
)
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
@click.option(
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
)
@click.option(
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
)
@click.option(
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
)
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option("--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8")
@click.option(
"--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8",
)
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option("--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF")
@click.option("--raw",
is_flag=True,
help="Do not apply any encodings/transformations")
@click.option("--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)")
@click.option("--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)")
@click.option("--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
resolve_path=True))
@click.option(
"--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF",
)
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
@click.option(
"--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)",
)
@click.option(
"--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)",
)
@click.option(
"--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off",
)
@click.option(
"-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option(
"-e",
"--environment",
help="Load configuration from `platformio.ini` and specified environment")
help="Load configuration from `platformio.ini` and specified environment",
)
def device_monitor(**kwargs): # pylint: disable=too-many-branches
env_options = {}
try:
env_options = get_project_options(kwargs['project_dir'],
kwargs['environment'])
env_options = get_project_options(kwargs["project_dir"], kwargs["environment"])
for k in ("port", "speed", "rts", "dtr"):
k2 = "monitor_%s" % k
if k == "speed":
@ -173,10 +187,10 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
except exception.NotPlatformIOProject:
pass
if not kwargs['port']:
if not kwargs["port"]:
ports = util.get_serial_ports(filter_hwid=True)
if len(ports) == 1:
kwargs['port'] = ports[0]['port']
kwargs["port"] = ports[0]["port"]
sys.argv = ["monitor"] + env_options.get("monitor_flags", [])
for k, v in kwargs.items():
@ -194,17 +208,19 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
else:
sys.argv.extend([k, str(v)])
if kwargs['port'] and (set(["*", "?", "[", "]"]) & set(kwargs['port'])):
if kwargs["port"] and (set(["*", "?", "[", "]"]) & set(kwargs["port"])):
for item in util.get_serial_ports():
if fnmatch(item['port'], kwargs['port']):
kwargs['port'] = item['port']
if fnmatch(item["port"], kwargs["port"]):
kwargs["port"] = item["port"]
break
try:
miniterm.main(default_port=kwargs['port'],
default_baudrate=kwargs['baud'] or 9600,
default_rts=kwargs['rts'],
default_dtr=kwargs['dtr'])
miniterm.main(
default_port=kwargs["port"],
default_baudrate=kwargs["baud"] or 9600,
default_rts=kwargs["rts"],
default_dtr=kwargs["dtr"],
)
except Exception as e:
raise exception.MinitermException(e)

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-locals
import mimetypes
import socket
from os.path import isdir
@ -19,8 +21,7 @@ from os.path import isdir
import click
from platformio import exception
from platformio.managers.core import (get_core_package_dir,
inject_contrib_pysite)
from platformio.managers.core import get_core_package_dir, inject_contrib_pysite
@click.command("home", short_help="PIO Home")
@ -28,9 +29,12 @@ from platformio.managers.core import (get_core_package_dir,
@click.option(
"--host",
default="127.0.0.1",
help="HTTP host, default=127.0.0.1. "
"You can open PIO Home for inbound connections with --host=0.0.0.0")
@click.option("--no-open", is_flag=True) # pylint: disable=too-many-locals
help=(
"HTTP host, default=127.0.0.1. You can open PIO Home for inbound "
"connections with --host=0.0.0.0"
),
)
@click.option("--no-open", is_flag=True)
def cli(port, host, no_open):
# import contrib modules
inject_contrib_pysite()
@ -38,6 +42,7 @@ def cli(port, host, no_open):
from autobahn.twisted.resource import WebSocketResource
from twisted.internet import reactor
from twisted.web import server
# pylint: enable=import-error
from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.ide import IDERPC
@ -89,14 +94,18 @@ def cli(port, host, no_open):
else:
reactor.callLater(1, lambda: click.launch(home_url))
click.echo("\n".join([
"",
" ___I_",
" /\\-_--\\ PlatformIO Home",
"/ \\_-__\\",
"|[]| [] | %s" % home_url,
"|__|____|______________%s" % ("_" * len(host)),
]))
click.echo(
"\n".join(
[
"",
" ___I_",
" /\\-_--\\ PlatformIO Home",
"/ \\_-__\\",
"|[]| [] | %s" % home_url,
"|__|____|______________%s" % ("_" * len(host)),
]
)
)
click.echo("")
click.echo("Open PIO Home in your browser by this URL => %s" % home_url)

View File

@ -27,7 +27,6 @@ from platformio.proc import where_is_program
class AsyncSession(requests.Session):
def __init__(self, n=None, *args, **kwargs):
if n:
pool = reactor.getThreadPool()
@ -51,7 +50,8 @@ def requests_session():
@util.memoized(expire="60s")
def get_core_fullpath():
return where_is_program(
"platformio" + (".exe" if "windows" in util.get_systype() else ""))
"platformio" + (".exe" if "windows" in util.get_systype() else "")
)
@util.memoized(expire="10s")
@ -60,9 +60,7 @@ def is_twitter_blocked():
timeout = 2
try:
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
requests.get("http://%s" % ip,
allow_redirects=False,
timeout=timeout)
requests.get("http://%s" % ip, allow_redirects=False, timeout=timeout)
else:
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip, 80))
return False

View File

@ -17,8 +17,7 @@ from __future__ import absolute_import
from os.path import expanduser, join
from platformio import __version__, app, util
from platformio.project.helpers import (get_project_core_dir,
is_platformio_project)
from platformio.project.helpers import get_project_core_dir, is_platformio_project
class AppRPC(object):
@ -26,8 +25,13 @@ class AppRPC(object):
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
IGNORE_STORAGE_KEYS = [
"cid", "coreVersion", "coreSystype", "coreCaller", "coreSettings",
"homeDir", "projectsDir"
"cid",
"coreVersion",
"coreSystype",
"coreCaller",
"coreSettings",
"homeDir",
"projectsDir",
]
@staticmethod
@ -37,31 +41,28 @@ class AppRPC(object):
# base data
caller_id = app.get_session_var("caller_id")
storage['cid'] = app.get_cid()
storage['coreVersion'] = __version__
storage['coreSystype'] = util.get_systype()
storage['coreCaller'] = (str(caller_id).lower()
if caller_id else None)
storage['coreSettings'] = {
storage["cid"] = app.get_cid()
storage["coreVersion"] = __version__
storage["coreSystype"] = util.get_systype()
storage["coreCaller"] = str(caller_id).lower() if caller_id else None
storage["coreSettings"] = {
name: {
"description": data['description'],
"default_value": data['value'],
"value": app.get_setting(name)
"description": data["description"],
"default_value": data["value"],
"value": app.get_setting(name),
}
for name, data in app.DEFAULT_SETTINGS.items()
}
storage['homeDir'] = expanduser("~")
storage['projectsDir'] = storage['coreSettings']['projects_dir'][
'value']
storage["homeDir"] = expanduser("~")
storage["projectsDir"] = storage["coreSettings"]["projects_dir"]["value"]
# skip non-existing recent projects
storage['recentProjects'] = [
p for p in storage.get("recentProjects", [])
if is_platformio_project(p)
storage["recentProjects"] = [
p for p in storage.get("recentProjects", []) if is_platformio_project(p)
]
state['storage'] = storage
state["storage"] = storage
state.modified = False # skip saving extra fields
return state.as_dict()

View File

@ -19,20 +19,18 @@ from twisted.internet import defer # pylint: disable=import-error
class IDERPC(object):
def __init__(self):
self._queue = {}
def send_command(self, command, params, sid=0):
if not self._queue.get(sid):
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4005, message="PIO Home IDE agent is not started")
code=4005, message="PIO Home IDE agent is not started"
)
while self._queue[sid]:
self._queue[sid].pop().callback({
"id": time.time(),
"method": command,
"params": params
})
self._queue[sid].pop().callback(
{"id": time.time(), "method": command, "params": params}
)
def listen_commands(self, sid=0):
if sid not in self._queue:

View File

@ -22,7 +22,6 @@ from platformio.commands.home.rpc.handlers.os import OSRPC
class MiscRPC(object):
def load_latest_tweets(self, username):
cache_key = "piohome_latest_tweets_" + str(username)
cache_valid = "7d"
@ -31,10 +30,11 @@ class MiscRPC(object):
if cache_data:
cache_data = json.loads(cache_data)
# automatically update cache in background every 12 hours
if cache_data['time'] < (time.time() - (3600 * 12)):
reactor.callLater(5, self._preload_latest_tweets, username,
cache_key, cache_valid)
return cache_data['result']
if cache_data["time"] < (time.time() - (3600 * 12)):
reactor.callLater(
5, self._preload_latest_tweets, username, cache_key, cache_valid
)
return cache_data["result"]
result = self._preload_latest_tweets(username, cache_key, cache_valid)
return result
@ -43,12 +43,13 @@ class MiscRPC(object):
@defer.inlineCallbacks
def _preload_latest_tweets(username, cache_key, cache_valid):
result = yield OSRPC.fetch_content(
"https://api.platformio.org/tweets/" + username)
"https://api.platformio.org/tweets/" + username
)
result = json.loads(result)
with app.ContentCache() as cc:
cc.set(cache_key,
json.dumps({
"time": int(time.time()),
"result": result
}), cache_valid)
cc.set(
cache_key,
json.dumps({"time": int(time.time()), "result": result}),
cache_valid,
)
defer.returnValue(result)

View File

@ -30,19 +30,18 @@ from platformio.compat import PY2, get_filesystem_encoding
class OSRPC(object):
@staticmethod
@defer.inlineCallbacks
def fetch_content(uri, data=None, headers=None, cache_valid=None):
if not headers:
headers = {
"User-Agent":
("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
"AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 "
"Safari/603.3.8")
"User-Agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
"AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 "
"Safari/603.3.8"
)
}
cache_key = (app.ContentCache.key_from_args(uri, data)
if cache_valid else None)
cache_key = app.ContentCache.key_from_args(uri, data) if cache_valid else None
with app.ContentCache() as cc:
if cache_key:
result = cc.get(cache_key)
@ -66,7 +65,7 @@ class OSRPC(object):
defer.returnValue(result)
def request_content(self, uri, data=None, headers=None, cache_valid=None):
if uri.startswith('http'):
if uri.startswith("http"):
return self.fetch_content(uri, data, headers, cache_valid)
if not isfile(uri):
return None
@ -80,8 +79,8 @@ class OSRPC(object):
@staticmethod
def reveal_file(path):
return click.launch(
path.encode(get_filesystem_encoding()) if PY2 else path,
locate=True)
path.encode(get_filesystem_encoding()) if PY2 else path, locate=True
)
@staticmethod
def is_file(path):
@ -109,13 +108,11 @@ class OSRPC(object):
pathnames = [pathnames]
result = set()
for pathname in pathnames:
result |= set(
glob.glob(join(root, pathname) if root else pathname))
result |= set(glob.glob(join(root, pathname) if root else pathname))
return list(result)
@staticmethod
def list_dir(path):
def _cmp(x, y):
if x[1] and not y[1]:
return -1
@ -146,7 +143,7 @@ class OSRPC(object):
def get_logical_devices():
items = []
for item in util.get_logical_devices():
if item['name']:
item['name'] = item['name']
if item["name"]:
item["name"] = item["name"]
items.append(item)
return items

View File

@ -27,8 +27,7 @@ from twisted.internet import utils # pylint: disable=import-error
from platformio import __main__, __version__, fs
from platformio.commands.home import helpers
from platformio.compat import (PY2, get_filesystem_encoding, is_bytes,
string_types)
from platformio.compat import PY2, get_filesystem_encoding, is_bytes, string_types
try:
from thread import get_ident as thread_get_ident
@ -37,7 +36,6 @@ except ImportError:
class MultiThreadingStdStream(object):
def __init__(self, parent_stream):
self._buffers = {thread_get_ident(): parent_stream}
@ -54,7 +52,8 @@ class MultiThreadingStdStream(object):
thread_id = thread_get_ident()
self._ensure_thread_buffer(thread_id)
return self._buffers[thread_id].write(
value.decode() if is_bytes(value) else value)
value.decode() if is_bytes(value) else value
)
def get_value_and_reset(self):
result = ""
@ -68,7 +67,6 @@ class MultiThreadingStdStream(object):
class PIOCoreRPC(object):
@staticmethod
def version():
return __version__
@ -104,16 +102,15 @@ class PIOCoreRPC(object):
else:
result = yield PIOCoreRPC._call_inline(args, options)
try:
defer.returnValue(
PIOCoreRPC._process_result(result, to_json))
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
except ValueError:
# fall-back to subprocess method
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(
PIOCoreRPC._process_result(result, to_json))
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
except Exception as e: # pylint: disable=bare-except
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4003, message="PIO Core Call Error", data=str(e))
code=4003, message="PIO Core Call Error", data=str(e)
)
@staticmethod
def _call_inline(args, options):
@ -123,8 +120,11 @@ class PIOCoreRPC(object):
def _thread_task():
with fs.cd(cwd):
exit_code = __main__.main(["-c"] + args)
return (PIOCoreRPC.thread_stdout.get_value_and_reset(),
PIOCoreRPC.thread_stderr.get_value_and_reset(), exit_code)
return (
PIOCoreRPC.thread_stdout.get_value_and_reset(),
PIOCoreRPC.thread_stderr.get_value_and_reset(),
exit_code,
)
return threads.deferToThread(_thread_task)
@ -135,8 +135,8 @@ class PIOCoreRPC(object):
helpers.get_core_fullpath(),
args,
path=cwd,
env={k: v
for k, v in os.environ.items() if "%" not in k})
env={k: v for k, v in os.environ.items() if "%" not in k},
)
@staticmethod
def _process_result(result, to_json=False):

View File

@ -17,8 +17,7 @@ from __future__ import absolute_import
import os
import shutil
import time
from os.path import (basename, expanduser, getmtime, isdir, isfile, join,
realpath, sep)
from os.path import basename, expanduser, getmtime, isdir, isfile, join, realpath, sep
import jsonrpc # pylint: disable=import-error
@ -29,38 +28,37 @@ from platformio.compat import PY2, get_filesystem_encoding
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (get_project_libdeps_dir,
get_project_src_dir,
is_platformio_project)
from platformio.project.helpers import (
get_project_libdeps_dir,
get_project_src_dir,
is_platformio_project,
)
class ProjectRPC(object):
@staticmethod
def _get_projects(project_dirs=None):
def _get_project_data(project_dir):
data = {"boards": [], "envLibdepsDirs": [], "libExtraDirs": []}
config = ProjectConfig(join(project_dir, "platformio.ini"))
libdeps_dir = get_project_libdeps_dir()
data['libExtraDirs'].extend(
config.get("platformio", "lib_extra_dirs", []))
data["libExtraDirs"].extend(config.get("platformio", "lib_extra_dirs", []))
for section in config.sections():
if not section.startswith("env:"):
continue
data['envLibdepsDirs'].append(join(libdeps_dir, section[4:]))
data["envLibdepsDirs"].append(join(libdeps_dir, section[4:]))
if config.has_option(section, "board"):
data['boards'].append(config.get(section, "board"))
data['libExtraDirs'].extend(
config.get(section, "lib_extra_dirs", []))
data["boards"].append(config.get(section, "board"))
data["libExtraDirs"].extend(config.get(section, "lib_extra_dirs", []))
# skip non existing folders and resolve full path
for key in ("envLibdepsDirs", "libExtraDirs"):
data[key] = [
expanduser(d) if d.startswith("~") else realpath(d)
for d in data[key] if isdir(d)
for d in data[key]
if isdir(d)
]
return data
@ -69,7 +67,7 @@ class ProjectRPC(object):
return (sep).join(path.split(sep)[-2:])
if not project_dirs:
project_dirs = AppRPC.load_state()['storage']['recentProjects']
project_dirs = AppRPC.load_state()["storage"]["recentProjects"]
result = []
pm = PlatformManager()
@ -85,29 +83,27 @@ class ProjectRPC(object):
for board_id in data.get("boards", []):
name = board_id
try:
name = pm.board_config(board_id)['name']
name = pm.board_config(board_id)["name"]
except exception.PlatformioException:
pass
boards.append({"id": board_id, "name": name})
result.append({
"path":
project_dir,
"name":
_path_to_name(project_dir),
"modified":
int(getmtime(project_dir)),
"boards":
boards,
"envLibStorages": [{
"name": basename(d),
"path": d
} for d in data.get("envLibdepsDirs", [])],
"extraLibStorages": [{
"name": _path_to_name(d),
"path": d
} for d in data.get("libExtraDirs", [])]
})
result.append(
{
"path": project_dir,
"name": _path_to_name(project_dir),
"modified": int(getmtime(project_dir)),
"boards": boards,
"envLibStorages": [
{"name": basename(d), "path": d}
for d in data.get("envLibdepsDirs", [])
],
"extraLibStorages": [
{"name": _path_to_name(d), "path": d}
for d in data.get("libExtraDirs", [])
],
}
)
return result
def get_projects(self, project_dirs=None):
@ -117,7 +113,7 @@ class ProjectRPC(object):
def get_project_examples():
result = []
for manifest in PlatformManager().get_installed():
examples_dir = join(manifest['__pkg_dir'], "examples")
examples_dir = join(manifest["__pkg_dir"], "examples")
if not isdir(examples_dir):
continue
items = []
@ -126,28 +122,30 @@ class ProjectRPC(object):
try:
config = ProjectConfig(join(project_dir, "platformio.ini"))
config.validate(silent=True)
project_description = config.get("platformio",
"description")
project_description = config.get("platformio", "description")
except exception.PlatformIOProjectException:
continue
path_tokens = project_dir.split(sep)
items.append({
"name":
"/".join(path_tokens[path_tokens.index("examples") + 1:]),
"path":
project_dir,
"description":
project_description
})
result.append({
"platform": {
"title": manifest['title'],
"version": manifest['version']
},
"items": sorted(items, key=lambda item: item['name'])
})
return sorted(result, key=lambda data: data['platform']['title'])
items.append(
{
"name": "/".join(
path_tokens[path_tokens.index("examples") + 1 :]
),
"path": project_dir,
"description": project_description,
}
)
result.append(
{
"platform": {
"title": manifest["title"],
"version": manifest["version"],
},
"items": sorted(items, key=lambda item: item["name"]),
}
)
return sorted(result, key=lambda data: data["platform"]["title"])
def init(self, board, framework, project_dir):
assert project_dir
@ -157,9 +155,11 @@ class ProjectRPC(object):
args = ["init", "--board", board]
if framework:
args.extend(["--project-option", "framework = %s" % framework])
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
in ProjectGenerator.get_supported_ides()):
args.extend(["--ide", state['storage']['coreCaller']])
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._generate_project_main, project_dir, framework)
return d
@ -168,32 +168,35 @@ class ProjectRPC(object):
def _generate_project_main(_, project_dir, framework):
main_content = None
if framework == "arduino":
main_content = "\n".join([
"#include <Arduino.h>",
"",
"void setup() {",
" // put your setup code here, to run once:",
"}",
"",
"void loop() {",
" // put your main code here, to run repeatedly:",
"}"
""
]) # yapf: disable
main_content = "\n".join(
[
"#include <Arduino.h>",
"",
"void setup() {",
" // put your setup code here, to run once:",
"}",
"",
"void loop() {",
" // put your main code here, to run repeatedly:",
"}" "",
]
) # yapf: disable
elif framework == "mbed":
main_content = "\n".join([
"#include <mbed.h>",
"",
"int main() {",
"",
" // put your setup code here, to run once:",
"",
" while(1) {",
" // put your main code here, to run repeatedly:",
" }",
"}",
""
]) # yapf: disable
main_content = "\n".join(
[
"#include <mbed.h>",
"",
"int main() {",
"",
" // put your setup code here, to run once:",
"",
" while(1) {",
" // put your main code here, to run repeatedly:",
" }",
"}",
"",
]
) # yapf: disable
if not main_content:
return project_dir
with fs.cd(project_dir):
@ -210,41 +213,46 @@ class ProjectRPC(object):
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
board = str(board)
if arduino_project_dir and PY2:
arduino_project_dir = arduino_project_dir.encode(
get_filesystem_encoding())
arduino_project_dir = arduino_project_dir.encode(get_filesystem_encoding())
# don't import PIO Project
if is_platformio_project(arduino_project_dir):
return arduino_project_dir
is_arduino_project = any([
isfile(
join(arduino_project_dir,
"%s.%s" % (basename(arduino_project_dir), ext)))
for ext in ("ino", "pde")
])
is_arduino_project = any(
[
isfile(
join(
arduino_project_dir,
"%s.%s" % (basename(arduino_project_dir), ext),
)
)
for ext in ("ino", "pde")
]
)
if not is_arduino_project:
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4000,
message="Not an Arduino project: %s" % arduino_project_dir)
code=4000, message="Not an Arduino project: %s" % arduino_project_dir
)
state = AppRPC.load_state()
project_dir = join(state['storage']['projectsDir'],
time.strftime("%y%m%d-%H%M%S-") + board)
project_dir = join(
state["storage"]["projectsDir"], time.strftime("%y%m%d-%H%M%S-") + board
)
if not isdir(project_dir):
os.makedirs(project_dir)
args = ["init", "--board", board]
args.extend(["--project-option", "framework = arduino"])
if use_arduino_libs:
args.extend([
"--project-option",
"lib_extra_dirs = ~/Documents/Arduino/libraries"
])
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
in ProjectGenerator.get_supported_ides()):
args.extend(["--ide", state['storage']['coreCaller']])
args.extend(
["--project-option", "lib_extra_dirs = ~/Documents/Arduino/libraries"]
)
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._finalize_arduino_import, project_dir,
arduino_project_dir)
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
return d
@staticmethod
@ -260,18 +268,21 @@ class ProjectRPC(object):
def import_pio(project_dir):
if not project_dir or not is_platformio_project(project_dir):
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4001,
message="Not an PlatformIO project: %s" % project_dir)
code=4001, message="Not an PlatformIO project: %s" % project_dir
)
new_project_dir = join(
AppRPC.load_state()['storage']['projectsDir'],
time.strftime("%y%m%d-%H%M%S-") + basename(project_dir))
AppRPC.load_state()["storage"]["projectsDir"],
time.strftime("%y%m%d-%H%M%S-") + basename(project_dir),
)
shutil.copytree(project_dir, new_project_dir)
state = AppRPC.load_state()
args = ["init"]
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
in ProjectGenerator.get_supported_ides()):
args.extend(["--ide", state['storage']['coreCaller']])
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
d.addCallback(lambda _: new_project_dir)
return d

View File

@ -16,8 +16,7 @@
import click
import jsonrpc
from autobahn.twisted.websocket import (WebSocketServerFactory,
WebSocketServerProtocol)
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from jsonrpc.exceptions import JSONRPCDispatchException
from twisted.internet import defer
@ -25,40 +24,39 @@ from platformio.compat import PY2, dump_json_to_unicode, is_bytes
class JSONRPCServerProtocol(WebSocketServerProtocol):
def onMessage(self, payload, isBinary): # pylint: disable=unused-argument
# click.echo("> %s" % payload)
response = jsonrpc.JSONRPCResponseManager.handle(
payload, self.factory.dispatcher).data
payload, self.factory.dispatcher
).data
# if error
if "result" not in response:
self.sendJSONResponse(response)
return None
d = defer.maybeDeferred(lambda: response['result'])
d = defer.maybeDeferred(lambda: response["result"])
d.addCallback(self._callback, response)
d.addErrback(self._errback, response)
return None
def _callback(self, result, response):
response['result'] = result
response["result"] = result
self.sendJSONResponse(response)
def _errback(self, failure, response):
if isinstance(failure.value, JSONRPCDispatchException):
e = failure.value
else:
e = JSONRPCDispatchException(code=4999,
message=failure.getErrorMessage())
e = JSONRPCDispatchException(code=4999, message=failure.getErrorMessage())
del response["result"]
response['error'] = e.error._data # pylint: disable=protected-access
response["error"] = e.error._data # pylint: disable=protected-access
self.sendJSONResponse(response)
def sendJSONResponse(self, response):
# click.echo("< %s" % response)
if "error" in response:
click.secho("Error: %s" % response['error'], fg="red", err=True)
click.secho("Error: %s" % response["error"], fg="red", err=True)
response = dump_json_to_unicode(response)
if not PY2 and not is_bytes(response):
response = response.encode("utf-8")

View File

@ -17,14 +17,12 @@ from twisted.web import static # pylint: disable=import-error
class WebRoot(static.File):
def render_GET(self, request):
if request.args.get("__shutdown__", False):
reactor.stop()
return "Server has been stopped"
request.setHeader("cache-control",
"no-cache, no-store, must-revalidate")
request.setHeader("cache-control", "no-cache, no-store, must-revalidate")
request.setHeader("pragma", "no-cache")
request.setHeader("expires", "0")
return static.File.render_GET(self, request)

View File

@ -20,16 +20,17 @@ from os.path import isdir, isfile, join
import click
from platformio import exception, fs
from platformio.commands.platform import \
platform_install as cli_platform_install
from platformio.commands.platform import platform_install as cli_platform_install
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (get_project_include_dir,
get_project_lib_dir,
get_project_src_dir,
get_project_test_dir,
is_platformio_project)
from platformio.project.helpers import (
get_project_include_dir,
get_project_lib_dir,
get_project_src_dir,
get_project_test_dir,
is_platformio_project,
)
def validate_boards(ctx, param, value): # pylint: disable=W0613
@ -40,66 +41,66 @@ def validate_boards(ctx, param, value): # pylint: disable=W0613
except exception.UnknownBoard:
raise click.BadParameter(
"`%s`. Please search for board ID using `platformio boards` "
"command" % id_)
"command" % id_
)
return value
@click.command("init",
short_help="Initialize PlatformIO project or update existing")
@click.option("--project-dir",
"-d",
default=getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-b",
"--board",
multiple=True,
metavar="ID",
callback=validate_boards)
@click.option("--ide",
type=click.Choice(ProjectGenerator.get_supported_ides()))
@click.command("init", short_help="Initialize PlatformIO project or update existing")
@click.option(
"--project-dir",
"-d",
default=getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
@click.option("-O", "--project-option", multiple=True)
@click.option("--env-prefix", default="")
@click.option("-s", "--silent", is_flag=True)
@click.pass_context
def cli(
ctx, # pylint: disable=R0913
project_dir,
board,
ide,
project_option,
env_prefix,
silent):
ctx, # pylint: disable=R0913
project_dir,
board,
ide,
project_option,
env_prefix,
silent,
):
if not silent:
if project_dir == getcwd():
click.secho("\nThe current working directory",
fg="yellow",
nl=False)
click.secho("\nThe current working directory", fg="yellow", nl=False)
click.secho(" %s " % project_dir, fg="cyan", nl=False)
click.secho("will be used for the project.", fg="yellow")
click.echo("")
click.echo("The next files/directories have been created in %s" %
click.style(project_dir, fg="cyan"))
click.echo("%s - Put project header files here" %
click.style("include", fg="cyan"))
click.echo("%s - Put here project specific (private) libraries" %
click.style("lib", fg="cyan"))
click.echo("%s - Put project source files here" %
click.style("src", fg="cyan"))
click.echo("%s - Project Configuration File" %
click.style("platformio.ini", fg="cyan"))
click.echo(
"The next files/directories have been created in %s"
% click.style(project_dir, fg="cyan")
)
click.echo(
"%s - Put project header files here" % click.style("include", fg="cyan")
)
click.echo(
"%s - Put here project specific (private) libraries"
% click.style("lib", fg="cyan")
)
click.echo("%s - Put project source files here" % click.style("src", fg="cyan"))
click.echo(
"%s - Project Configuration File" % click.style("platformio.ini", fg="cyan")
)
is_new_project = not is_platformio_project(project_dir)
if is_new_project:
init_base_project(project_dir)
if board:
fill_project_envs(ctx, project_dir, board, project_option, env_prefix,
ide is not None)
fill_project_envs(
ctx, project_dir, board, project_option, env_prefix, ide is not None
)
if ide:
pg = ProjectGenerator(project_dir, ide, board)
@ -115,9 +116,9 @@ def cli(
if ide:
click.secho(
"\nProject has been successfully %s including configuration files "
"for `%s` IDE." %
("initialized" if is_new_project else "updated", ide),
fg="green")
"for `%s` IDE." % ("initialized" if is_new_project else "updated", ide),
fg="green",
)
else:
click.secho(
"\nProject has been successfully %s! Useful commands:\n"
@ -125,9 +126,10 @@ def cli(
"`pio run --target upload` or `pio run -t upload` "
"- upload firmware to a target\n"
"`pio run --target clean` - clean project (remove compiled files)"
"\n`pio run --help` - additional information" %
("initialized" if is_new_project else "updated"),
fg="green")
"\n`pio run --help` - additional information"
% ("initialized" if is_new_project else "updated"),
fg="green",
)
def init_base_project(project_dir):
@ -149,7 +151,8 @@ def init_base_project(project_dir):
def init_include_readme(include_dir):
with open(join(include_dir, "README"), "w") as f:
f.write("""
f.write(
"""
This directory is intended for project header files.
A header file is a file containing C declarations and macro definitions
@ -188,12 +191,15 @@ Read more about using header files in official GCC documentation:
* Computed Includes
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
""")
"""
)
def init_lib_readme(lib_dir):
with open(join(lib_dir, "README"), "w") as f:
f.write("""
# pylint: disable=line-too-long
f.write(
"""
This directory is intended for project specific (private) libraries.
PlatformIO will compile them to static libraries and link into executable file.
@ -239,12 +245,14 @@ libraries scanning project source files.
More information about PlatformIO Library Dependency Finder
- https://docs.platformio.org/page/librarymanager/ldf.html
""")
"""
)
def init_test_readme(test_dir):
with open(join(test_dir, "README"), "w") as f:
f.write("""
f.write(
"""
This directory is intended for PIO Unit Testing and project tests.
Unit Testing is a software testing method by which individual units of
@ -255,7 +263,8 @@ in the development cycle.
More information about PIO Unit Testing:
- https://docs.platformio.org/page/plus/unit-testing.html
""")
"""
)
def init_ci_conf(project_dir):
@ -263,7 +272,8 @@ def init_ci_conf(project_dir):
if isfile(conf_path):
return
with open(conf_path, "w") as f:
f.write("""# Continuous Integration (CI) is the practice, in software
f.write(
"""# Continuous Integration (CI) is the practice, in software
# engineering, of merging all developer working copies with a shared mainline
# several times a day < https://docs.platformio.org/page/ci/index.html >
#
@ -330,7 +340,8 @@ def init_ci_conf(project_dir):
#
# script:
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
""")
"""
)
def init_cvs_ignore(project_dir):
@ -341,16 +352,13 @@ def init_cvs_ignore(project_dir):
fp.write(".pio\n")
def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
force_download):
config = ProjectConfig(join(project_dir, "platformio.ini"),
parse_extra=False)
def fill_project_envs(
ctx, project_dir, board_ids, project_option, env_prefix, force_download
):
config = ProjectConfig(join(project_dir, "platformio.ini"), parse_extra=False)
used_boards = []
for section in config.sections():
cond = [
section.startswith("env:"),
config.has_option(section, "board")
]
cond = [section.startswith("env:"), config.has_option(section, "board")]
if all(cond):
used_boards.append(config.get(section, "board"))
@ -359,17 +367,17 @@ def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
modified = False
for id_ in board_ids:
board_config = pm.board_config(id_)
used_platforms.append(board_config['platform'])
used_platforms.append(board_config["platform"])
if id_ in used_boards:
continue
used_boards.append(id_)
modified = True
envopts = {"platform": board_config['platform'], "board": id_}
envopts = {"platform": board_config["platform"], "board": id_}
# find default framework for board
frameworks = board_config.get("frameworks")
if frameworks:
envopts['framework'] = frameworks[0]
envopts["framework"] = frameworks[0]
for item in project_option:
if "=" not in item:
@ -391,10 +399,9 @@ def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
def _install_dependent_platforms(ctx, platforms):
installed_platforms = [
p['name'] for p in PlatformManager().get_installed()
]
installed_platforms = [p["name"] for p in PlatformManager().get_installed()]
if set(platforms) <= set(installed_platforms):
return
ctx.invoke(cli_platform_install,
platforms=list(set(platforms) - set(installed_platforms)))
ctx.invoke(
cli_platform_install, platforms=list(set(platforms) - set(installed_platforms))
)

View File

@ -24,14 +24,15 @@ from tabulate import tabulate
from platformio import exception, fs, util
from platformio.commands import PlatformioCLI
from platformio.compat import dump_json_to_unicode
from platformio.managers.lib import (LibraryManager, get_builtin_libs,
is_builtin_lib)
from platformio.managers.lib import LibraryManager, get_builtin_libs, is_builtin_lib
from platformio.proc import is_ci
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (get_project_dir,
get_project_global_lib_dir,
get_project_libdeps_dir,
is_platformio_project)
from platformio.project.helpers import (
get_project_dir,
get_project_global_lib_dir,
get_project_libdeps_dir,
is_platformio_project,
)
try:
from urllib.parse import quote
@ -45,35 +46,38 @@ CTX_META_STORAGE_LIBDEPS_KEY = __name__ + ".storage_lib_deps"
@click.group(short_help="Library Manager")
@click.option("-d",
"--storage-dir",
multiple=True,
default=None,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True),
help="Manage custom library storage")
@click.option("-g",
"--global",
is_flag=True,
help="Manage global PlatformIO library storage")
@click.option(
"-d",
"--storage-dir",
multiple=True,
default=None,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
help="Manage custom library storage",
)
@click.option(
"-g", "--global", is_flag=True, help="Manage global PlatformIO library storage"
)
@click.option(
"-e",
"--environment",
multiple=True,
help=("Manage libraries for the specific project build environments "
"declared in `platformio.ini`"))
help=(
"Manage libraries for the specific project build environments "
"declared in `platformio.ini`"
),
)
@click.pass_context
def cli(ctx, **options):
storage_cmds = ("install", "uninstall", "update", "list")
# skip commands that don't need storage folder
if ctx.invoked_subcommand not in storage_cmds or \
(len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")):
if ctx.invoked_subcommand not in storage_cmds or (
len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")
):
return
storage_dirs = list(options['storage_dir'])
if options['global']:
storage_dirs = list(options["storage_dir"])
if options["global"]:
storage_dirs.append(get_project_global_lib_dir())
if not storage_dirs:
if is_platformio_project():
@ -84,15 +88,16 @@ def cli(ctx, **options):
"Warning! Global library storage is used automatically. "
"Please use `platformio lib --global %s` command to remove "
"this warning." % ctx.invoked_subcommand,
fg="yellow")
fg="yellow",
)
if not storage_dirs:
raise exception.NotGlobalLibDir(get_project_dir(),
get_project_global_lib_dir(),
ctx.invoked_subcommand)
raise exception.NotGlobalLibDir(
get_project_dir(), get_project_global_lib_dir(), ctx.invoked_subcommand
)
in_silence = PlatformioCLI.in_silence()
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options['environment']
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options["environment"]
ctx.meta[CTX_META_INPUT_DIRS_KEY] = storage_dirs
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = []
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY] = {}
@ -102,16 +107,16 @@ def cli(ctx, **options):
continue
with fs.cd(storage_dir):
libdeps_dir = get_project_libdeps_dir()
config = ProjectConfig.get_instance(join(storage_dir,
"platformio.ini"))
config.validate(options['environment'], silent=in_silence)
config = ProjectConfig.get_instance(join(storage_dir, "platformio.ini"))
config.validate(options["environment"], silent=in_silence)
for env in config.envs():
if options['environment'] and env not in options['environment']:
if options["environment"] and env not in options["environment"]:
continue
storage_dir = join(libdeps_dir, env)
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
"env:" + env, "lib_deps", [])
"env:" + env, "lib_deps", []
)
@cli.command("install", short_help="Install library")
@ -119,21 +124,19 @@ def cli(ctx, **options):
@click.option(
"--save",
is_flag=True,
help="Save installed libraries into the `platformio.ini` dependency list")
@click.option("-s",
"--silent",
is_flag=True,
help="Suppress progress reporting")
@click.option("--interactive",
is_flag=True,
help="Allow to make a choice for all prompts")
@click.option("-f",
"--force",
is_flag=True,
help="Reinstall/redownload library if exists")
help="Save installed libraries into the `platformio.ini` dependency list",
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"--interactive", is_flag=True, help="Allow to make a choice for all prompts"
)
@click.option(
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
)
@click.pass_context
def lib_install( # pylint: disable=too-many-arguments
ctx, libraries, save, silent, interactive, force):
ctx, libraries, save, silent, interactive, force
):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
storage_libdeps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, [])
@ -144,25 +147,22 @@ def lib_install( # pylint: disable=too-many-arguments
lm = LibraryManager(storage_dir)
if libraries:
for library in libraries:
pkg_dir = lm.install(library,
silent=silent,
interactive=interactive,
force=force)
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
installed_manifests[library] = lm.load_manifest(pkg_dir)
elif storage_dir in storage_libdeps:
builtin_lib_storages = None
for library in storage_libdeps[storage_dir]:
try:
pkg_dir = lm.install(library,
silent=silent,
interactive=interactive,
force=force)
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
installed_manifests[library] = lm.load_manifest(pkg_dir)
except exception.LibNotFound as e:
if builtin_lib_storages is None:
builtin_lib_storages = get_builtin_libs()
if not silent or not is_builtin_lib(
builtin_lib_storages, library):
if not silent or not is_builtin_lib(builtin_lib_storages, library):
click.secho("Warning! %s" % e, fg="yellow")
if not save or not libraries:
@ -183,8 +183,8 @@ def lib_install( # pylint: disable=too-many-arguments
continue
manifest = installed_manifests[library]
try:
assert library.lower() == manifest['name'].lower()
assert semantic_version.Version(manifest['version'])
assert library.lower() == manifest["name"].lower()
assert semantic_version.Version(manifest["version"])
lib_deps.append("{name}@^{version}".format(**manifest))
except (AssertionError, ValueError):
lib_deps.append(library)
@ -206,13 +206,15 @@ def lib_uninstall(ctx, libraries):
@cli.command("update", short_help="Update installed libraries")
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
@click.option("-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead")
@click.option("--dry-run",
is_flag=True,
help="Do not update, only check for the new versions")
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("--json-output", is_flag=True)
@click.pass_context
def lib_update(ctx, libraries, only_check, dry_run, json_output):
@ -226,9 +228,7 @@ def lib_update(ctx, libraries, only_check, dry_run, json_output):
_libraries = libraries
if not _libraries:
_libraries = [
manifest['__pkg_dir'] for manifest in lm.get_installed()
]
_libraries = [manifest["__pkg_dir"] for manifest in lm.get_installed()]
if only_check and json_output:
result = []
@ -245,7 +245,7 @@ def lib_update(ctx, libraries, only_check, dry_run, json_output):
if not latest:
continue
manifest = lm.load_manifest(pkg_dir)
manifest['versionLatest'] = latest
manifest["versionLatest"] = latest
result.append(manifest)
json_result[storage_dir] = result
else:
@ -254,8 +254,10 @@ def lib_update(ctx, libraries, only_check, dry_run, json_output):
if json_output:
return click.echo(
dump_json_to_unicode(json_result[storage_dirs[0]]
if len(storage_dirs) == 1 else json_result))
dump_json_to_unicode(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
return True
@ -274,15 +276,17 @@ def lib_list(ctx, json_output):
if json_output:
json_result[storage_dir] = items
elif items:
for item in sorted(items, key=lambda i: i['name']):
for item in sorted(items, key=lambda i: i["name"]):
print_lib_item(item)
else:
click.echo("No items found")
if json_output:
return click.echo(
dump_json_to_unicode(json_result[storage_dirs[0]]
if len(storage_dirs) == 1 else json_result))
dump_json_to_unicode(
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
)
)
return True
@ -298,9 +302,11 @@ def lib_list(ctx, json_output):
@click.option("-f", "--framework", multiple=True)
@click.option("-p", "--platform", multiple=True)
@click.option("-i", "--header", multiple=True)
@click.option("--noninteractive",
is_flag=True,
help="Do not prompt, automatically paginate with delay")
@click.option(
"--noninteractive",
is_flag=True,
help="Do not prompt, automatically paginate with delay",
)
def lib_search(query, json_output, page, noninteractive, **filters):
if not query:
query = []
@ -311,55 +317,61 @@ def lib_search(query, json_output, page, noninteractive, **filters):
for value in values:
query.append('%s:"%s"' % (key, value))
result = util.get_api_result("/v2/lib/search",
dict(query=" ".join(query), page=page),
cache_valid="1d")
result = util.get_api_result(
"/v2/lib/search", dict(query=" ".join(query), page=page), cache_valid="1d"
)
if json_output:
click.echo(dump_json_to_unicode(result))
return
if result['total'] == 0:
if result["total"] == 0:
click.secho(
"Nothing has been found by your request\n"
"Try a less-specific search or use truncation (or wildcard) "
"operator",
fg="yellow",
nl=False)
nl=False,
)
click.secho(" *", fg="green")
click.secho("For example: DS*, PCA*, DHT* and etc.\n", fg="yellow")
click.echo("For more examples and advanced search syntax, "
"please use documentation:")
click.echo(
"For more examples and advanced search syntax, " "please use documentation:"
)
click.secho(
"https://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
fg="cyan")
fg="cyan",
)
return
click.secho("Found %d libraries:\n" % result['total'],
fg="green" if result['total'] else "yellow")
click.secho(
"Found %d libraries:\n" % result["total"],
fg="green" if result["total"] else "yellow",
)
while True:
for item in result['items']:
for item in result["items"]:
print_lib_item(item)
if (int(result['page']) * int(result['perpage']) >= int(
result['total'])):
if int(result["page"]) * int(result["perpage"]) >= int(result["total"]):
break
if noninteractive:
click.echo()
click.secho("Loading next %d libraries... Press Ctrl+C to stop!" %
result['perpage'],
fg="yellow")
click.secho(
"Loading next %d libraries... Press Ctrl+C to stop!"
% result["perpage"],
fg="yellow",
)
click.echo()
time.sleep(5)
elif not click.confirm("Show next libraries?"):
break
result = util.get_api_result("/v2/lib/search", {
"query": " ".join(query),
"page": int(result['page']) + 1
},
cache_valid="1d")
result = util.get_api_result(
"/v2/lib/search",
{"query": " ".join(query), "page": int(result["page"]) + 1},
cache_valid="1d",
)
@cli.command("builtin", short_help="List built-in libraries")
@ -371,13 +383,13 @@ def lib_builtin(storage, json_output):
return click.echo(dump_json_to_unicode(items))
for storage_ in items:
if not storage_['items']:
if not storage_["items"]:
continue
click.secho(storage_['name'], fg="green")
click.echo("*" * len(storage_['name']))
click.secho(storage_["name"], fg="green")
click.echo("*" * len(storage_["name"]))
click.echo()
for item in sorted(storage_['items'], key=lambda i: i['name']):
for item in sorted(storage_["items"], key=lambda i: i["name"]):
print_lib_item(item)
return True
@ -389,27 +401,29 @@ def lib_builtin(storage, json_output):
def lib_show(library, json_output):
lm = LibraryManager()
name, requirements, _ = lm.parse_pkg_uri(library)
lib_id = lm.search_lib_id({
"name": name,
"requirements": requirements
},
silent=json_output,
interactive=not json_output)
lib_id = lm.search_lib_id(
{"name": name, "requirements": requirements},
silent=json_output,
interactive=not json_output,
)
lib = util.get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
if json_output:
return click.echo(dump_json_to_unicode(lib))
click.secho(lib['name'], fg="cyan")
click.echo("=" * len(lib['name']))
click.secho("#ID: %d" % lib['id'], bold=True)
click.echo(lib['description'])
click.secho(lib["name"], fg="cyan")
click.echo("=" * len(lib["name"]))
click.secho("#ID: %d" % lib["id"], bold=True)
click.echo(lib["description"])
click.echo()
click.echo(
"Version: %s, released %s" %
(lib['version']['name'],
time.strftime("%c", util.parse_date(lib['version']['released']))))
click.echo("Manifest: %s" % lib['confurl'])
"Version: %s, released %s"
% (
lib["version"]["name"],
time.strftime("%c", util.parse_date(lib["version"]["released"])),
)
)
click.echo("Manifest: %s" % lib["confurl"])
for key in ("homepage", "repository", "license"):
if key not in lib or not lib[key]:
continue
@ -436,23 +450,33 @@ def lib_show(library, json_output):
if _authors:
blocks.append(("Authors", _authors))
blocks.append(("Keywords", lib['keywords']))
blocks.append(("Keywords", lib["keywords"]))
for key in ("frameworks", "platforms"):
if key not in lib or not lib[key]:
continue
blocks.append(("Compatible %s" % key, [i['title'] for i in lib[key]]))
blocks.append(("Headers", lib['headers']))
blocks.append(("Examples", lib['examples']))
blocks.append(("Versions", [
"%s, released %s" %
(v['name'], time.strftime("%c", util.parse_date(v['released'])))
for v in lib['versions']
]))
blocks.append(("Unique Downloads", [
"Today: %s" % lib['dlstats']['day'],
"Week: %s" % lib['dlstats']['week'],
"Month: %s" % lib['dlstats']['month']
]))
blocks.append(("Compatible %s" % key, [i["title"] for i in lib[key]]))
blocks.append(("Headers", lib["headers"]))
blocks.append(("Examples", lib["examples"]))
blocks.append(
(
"Versions",
[
"%s, released %s"
% (v["name"], time.strftime("%c", util.parse_date(v["released"])))
for v in lib["versions"]
],
)
)
blocks.append(
(
"Unique Downloads",
[
"Today: %s" % lib["dlstats"]["day"],
"Week: %s" % lib["dlstats"]["week"],
"Month: %s" % lib["dlstats"]["month"],
],
)
)
for (title, rows) in blocks:
click.echo()
@ -467,16 +491,15 @@ def lib_show(library, json_output):
@cli.command("register", short_help="Register a new library")
@click.argument("config_url")
def lib_register(config_url):
if (not config_url.startswith("http://")
and not config_url.startswith("https://")):
if not config_url.startswith("http://") and not config_url.startswith("https://"):
raise exception.InvalidLibConfURL(config_url)
result = util.get_api_result("/lib/register",
data=dict(config_url=config_url))
if "message" in result and result['message']:
click.secho(result['message'],
fg="green" if "successed" in result and result['successed']
else "red")
result = util.get_api_result("/lib/register", data=dict(config_url=config_url))
if "message" in result and result["message"]:
click.secho(
result["message"],
fg="green" if "successed" in result and result["successed"] else "red",
)
@cli.command("stats", short_help="Library Registry Statistics")
@ -488,46 +511,56 @@ def lib_stats(json_output):
return click.echo(dump_json_to_unicode(result))
for key in ("updated", "added"):
tabular_data = [(click.style(item['name'], fg="cyan"),
time.strftime("%c", util.parse_date(item['date'])),
"https://platformio.org/lib/show/%s/%s" %
(item['id'], quote(item['name'])))
for item in result.get(key, [])]
table = tabulate(tabular_data,
headers=[
click.style("RECENTLY " + key.upper(), bold=True),
"Date", "URL"
])
tabular_data = [
(
click.style(item["name"], fg="cyan"),
time.strftime("%c", util.parse_date(item["date"])),
"https://platformio.org/lib/show/%s/%s"
% (item["id"], quote(item["name"])),
)
for item in result.get(key, [])
]
table = tabulate(
tabular_data,
headers=[click.style("RECENTLY " + key.upper(), bold=True), "Date", "URL"],
)
click.echo(table)
click.echo()
for key in ("lastkeywords", "topkeywords"):
tabular_data = [(click.style(name, fg="cyan"),
"https://platformio.org/lib/search?query=" +
quote("keyword:%s" % name))
for name in result.get(key, [])]
tabular_data = [
(
click.style(name, fg="cyan"),
"https://platformio.org/lib/search?query=" + quote("keyword:%s" % name),
)
for name in result.get(key, [])
]
table = tabulate(
tabular_data,
headers=[
click.style(
("RECENT" if key == "lastkeywords" else "POPULAR") +
" KEYWORDS",
bold=True), "URL"
])
("RECENT" if key == "lastkeywords" else "POPULAR") + " KEYWORDS",
bold=True,
),
"URL",
],
)
click.echo(table)
click.echo()
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
"Month")):
tabular_data = [(click.style(item['name'], fg="cyan"),
"https://platformio.org/lib/show/%s/%s" %
(item['id'], quote(item['name'])))
for item in result.get(key, [])]
table = tabulate(tabular_data,
headers=[
click.style("FEATURED: " + title.upper(),
bold=True), "URL"
])
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth", "Month")):
tabular_data = [
(
click.style(item["name"], fg="cyan"),
"https://platformio.org/lib/show/%s/%s"
% (item["id"], quote(item["name"])),
)
for item in result.get(key, [])
]
table = tabulate(
tabular_data,
headers=[click.style("FEATURED: " + title.upper(), bold=True), "URL"],
)
click.echo(table)
click.echo()
@ -538,15 +571,16 @@ def print_storage_header(storage_dirs, storage_dir):
if storage_dirs and storage_dirs[0] != storage_dir:
click.echo("")
click.echo(
click.style("Library Storage: ", bold=True) +
click.style(storage_dir, fg="blue"))
click.style("Library Storage: ", bold=True)
+ click.style(storage_dir, fg="blue")
)
def print_lib_item(item):
click.secho(item['name'], fg="cyan")
click.echo("=" * len(item['name']))
click.secho(item["name"], fg="cyan")
click.echo("=" * len(item["name"]))
if "id" in item:
click.secho("#ID: %d" % item['id'], bold=True)
click.secho("#ID: %d" % item["id"], bold=True)
if "description" in item or "url" in item:
click.echo(item.get("description", item.get("url", "")))
click.echo()
@ -562,14 +596,26 @@ def print_lib_item(item):
for key in ("frameworks", "platforms"):
if key not in item:
continue
click.echo("Compatible %s: %s" % (key, ", ".join(
[i['title'] if isinstance(i, dict) else i for i in item[key]])))
click.echo(
"Compatible %s: %s"
% (
key,
", ".join(
[i["title"] if isinstance(i, dict) else i for i in item[key]]
),
)
)
if "authors" in item or "authornames" in item:
click.echo("Authors: %s" % ", ".join(
item.get("authornames",
[a.get("name", "") for a in item.get("authors", [])])))
click.echo(
"Authors: %s"
% ", ".join(
item.get(
"authornames", [a.get("name", "") for a in item.get("authors", [])]
)
)
)
if "__src_url" in item:
click.secho("Source: %s" % item['__src_url'])
click.secho("Source: %s" % item["__src_url"])
click.echo()

View File

@ -29,24 +29,27 @@ def cli():
def _print_platforms(platforms):
for platform in platforms:
click.echo("{name} ~ {title}".format(name=click.style(platform['name'],
fg="cyan"),
title=platform['title']))
click.echo("=" * (3 + len(platform['name'] + platform['title'])))
click.echo(platform['description'])
click.echo(
"{name} ~ {title}".format(
name=click.style(platform["name"], fg="cyan"), title=platform["title"]
)
)
click.echo("=" * (3 + len(platform["name"] + platform["title"])))
click.echo(platform["description"])
click.echo()
if "homepage" in platform:
click.echo("Home: %s" % platform['homepage'])
if "frameworks" in platform and platform['frameworks']:
click.echo("Frameworks: %s" % ", ".join(platform['frameworks']))
click.echo("Home: %s" % platform["homepage"])
if "frameworks" in platform and platform["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(platform["frameworks"]))
if "packages" in platform:
click.echo("Packages: %s" % ", ".join(platform['packages']))
click.echo("Packages: %s" % ", ".join(platform["packages"]))
if "version" in platform:
if "__src_url" in platform:
click.echo("Version: #%s (%s)" %
(platform['version'], platform['__src_url']))
click.echo(
"Version: #%s (%s)" % (platform["version"], platform["__src_url"])
)
else:
click.echo("Version: " + platform['version'])
click.echo("Version: " + platform["version"])
click.echo()
@ -54,7 +57,7 @@ def _get_registry_platforms():
platforms = util.get_api_result("/platforms", cache_valid="7d")
pm = PlatformManager()
for platform in platforms or []:
platform['versions'] = pm.get_all_repo_versions(platform['name'])
platform["versions"] = pm.get_all_repo_versions(platform["name"])
return platforms
@ -65,22 +68,22 @@ def _get_platform_data(*args, **kwargs):
return _get_registry_platform_data(*args, **kwargs)
def _get_installed_platform_data(platform,
with_boards=True,
expose_packages=True):
def _get_installed_platform_data(platform, with_boards=True, expose_packages=True):
p = PlatformFactory.newPlatform(platform)
data = dict(name=p.name,
title=p.title,
description=p.description,
version=p.version,
homepage=p.homepage,
repository=p.repository_url,
url=p.vendor_url,
docs=p.docs_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
packages=list(p.packages) if p.packages else [])
data = dict(
name=p.name,
title=p.title,
description=p.description,
version=p.version,
homepage=p.homepage,
repository=p.repository_url,
url=p.vendor_url,
docs=p.docs_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
packages=list(p.packages) if p.packages else [],
)
# if dump to API
# del data['version']
@ -94,18 +97,20 @@ def _get_installed_platform_data(platform,
data[key] = manifest[key]
if with_boards:
data['boards'] = [c.get_brief_data() for c in p.get_boards().values()]
data["boards"] = [c.get_brief_data() for c in p.get_boards().values()]
if not data['packages'] or not expose_packages:
if not data["packages"] or not expose_packages:
return data
data['packages'] = []
data["packages"] = []
installed_pkgs = p.get_installed_packages()
for name, opts in p.packages.items():
item = dict(name=name,
type=p.get_package_type(name),
requirements=opts.get("version"),
optional=opts.get("optional") is True)
item = dict(
name=name,
type=p.get_package_type(name),
requirements=opts.get("version"),
optional=opts.get("optional") is True,
)
if name in installed_pkgs:
for key, value in installed_pkgs[name].items():
if key not in ("url", "version", "description"):
@ -113,40 +118,42 @@ def _get_installed_platform_data(platform,
item[key] = value
if key == "version":
item["originalVersion"] = util.get_original_version(value)
data['packages'].append(item)
data["packages"].append(item)
return data
def _get_registry_platform_data( # pylint: disable=unused-argument
platform,
with_boards=True,
expose_packages=True):
platform, with_boards=True, expose_packages=True
):
_data = None
for p in _get_registry_platforms():
if p['name'] == platform:
if p["name"] == platform:
_data = p
break
if not _data:
return None
data = dict(name=_data['name'],
title=_data['title'],
description=_data['description'],
homepage=_data['homepage'],
repository=_data['repository'],
url=_data['url'],
license=_data['license'],
forDesktop=_data['forDesktop'],
frameworks=_data['frameworks'],
packages=_data['packages'],
versions=_data['versions'])
data = dict(
name=_data["name"],
title=_data["title"],
description=_data["description"],
homepage=_data["homepage"],
repository=_data["repository"],
url=_data["url"],
license=_data["license"],
forDesktop=_data["forDesktop"],
frameworks=_data["frameworks"],
packages=_data["packages"],
versions=_data["versions"],
)
if with_boards:
data['boards'] = [
board for board in PlatformManager().get_registered_boards()
if board['platform'] == _data['name']
data["boards"] = [
board
for board in PlatformManager().get_registered_boards()
if board["platform"] == _data["name"]
]
return data
@ -164,9 +171,10 @@ def platform_search(query, json_output):
if query and query.lower() not in search_data.lower():
continue
platforms.append(
_get_registry_platform_data(platform['name'],
with_boards=False,
expose_packages=False))
_get_registry_platform_data(
platform["name"], with_boards=False, expose_packages=False
)
)
if json_output:
click.echo(dump_json_to_unicode(platforms))
@ -185,15 +193,15 @@ def platform_frameworks(query, json_output):
search_data = dump_json_to_unicode(framework)
if query and query.lower() not in search_data.lower():
continue
framework['homepage'] = ("https://platformio.org/frameworks/" +
framework['name'])
framework['platforms'] = [
platform['name'] for platform in _get_registry_platforms()
if framework['name'] in platform['frameworks']
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
framework["platforms"] = [
platform["name"]
for platform in _get_registry_platforms()
if framework["name"] in platform["frameworks"]
]
frameworks.append(framework)
frameworks = sorted(frameworks, key=lambda manifest: manifest['name'])
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
if json_output:
click.echo(dump_json_to_unicode(frameworks))
else:
@ -207,11 +215,12 @@ def platform_list(json_output):
pm = PlatformManager()
for manifest in pm.get_installed():
platforms.append(
_get_installed_platform_data(manifest['__pkg_dir'],
with_boards=False,
expose_packages=False))
_get_installed_platform_data(
manifest["__pkg_dir"], with_boards=False, expose_packages=False
)
)
platforms = sorted(platforms, key=lambda manifest: manifest['name'])
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
if json_output:
click.echo(dump_json_to_unicode(platforms))
else:
@ -228,55 +237,58 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
if json_output:
return click.echo(dump_json_to_unicode(data))
click.echo("{name} ~ {title}".format(name=click.style(data['name'],
fg="cyan"),
title=data['title']))
click.echo("=" * (3 + len(data['name'] + data['title'])))
click.echo(data['description'])
click.echo(
"{name} ~ {title}".format(
name=click.style(data["name"], fg="cyan"), title=data["title"]
)
)
click.echo("=" * (3 + len(data["name"] + data["title"])))
click.echo(data["description"])
click.echo()
if "version" in data:
click.echo("Version: %s" % data['version'])
if data['homepage']:
click.echo("Home: %s" % data['homepage'])
if data['repository']:
click.echo("Repository: %s" % data['repository'])
if data['url']:
click.echo("Vendor: %s" % data['url'])
if data['license']:
click.echo("License: %s" % data['license'])
if data['frameworks']:
click.echo("Frameworks: %s" % ", ".join(data['frameworks']))
click.echo("Version: %s" % data["version"])
if data["homepage"]:
click.echo("Home: %s" % data["homepage"])
if data["repository"]:
click.echo("Repository: %s" % data["repository"])
if data["url"]:
click.echo("Vendor: %s" % data["url"])
if data["license"]:
click.echo("License: %s" % data["license"])
if data["frameworks"]:
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
if not data['packages']:
if not data["packages"]:
return None
if not isinstance(data['packages'][0], dict):
click.echo("Packages: %s" % ", ".join(data['packages']))
if not isinstance(data["packages"][0], dict):
click.echo("Packages: %s" % ", ".join(data["packages"]))
else:
click.echo()
click.secho("Packages", bold=True)
click.echo("--------")
for item in data['packages']:
for item in data["packages"]:
click.echo()
click.echo("Package %s" % click.style(item['name'], fg="yellow"))
click.echo("-" * (8 + len(item['name'])))
if item['type']:
click.echo("Type: %s" % item['type'])
click.echo("Requirements: %s" % item['requirements'])
click.echo("Installed: %s" %
("Yes" if item.get("version") else "No (optional)"))
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
click.echo("-" * (8 + len(item["name"])))
if item["type"]:
click.echo("Type: %s" % item["type"])
click.echo("Requirements: %s" % item["requirements"])
click.echo(
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
)
if "version" in item:
click.echo("Version: %s" % item['version'])
click.echo("Version: %s" % item["version"])
if "originalVersion" in item:
click.echo("Original version: %s" % item['originalVersion'])
click.echo("Original version: %s" % item["originalVersion"])
if "description" in item:
click.echo("Description: %s" % item['description'])
click.echo("Description: %s" % item["description"])
if data['boards']:
if data["boards"]:
click.echo()
click.secho("Boards", bold=True)
click.echo("------")
print_boards(data['boards'])
print_boards(data["boards"])
return True
@ -290,20 +302,26 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
"-f",
"--force",
is_flag=True,
help="Reinstall/redownload dev/platform and its packages if exist")
def platform_install(platforms, with_package, without_package,
skip_default_package, force):
help="Reinstall/redownload dev/platform and its packages if exist",
)
def platform_install(
platforms, with_package, without_package, skip_default_package, force
):
pm = PlatformManager()
for platform in platforms:
if pm.install(name=platform,
with_packages=with_package,
without_packages=without_package,
skip_default_package=skip_default_package,
force=force):
click.secho("The platform '%s' has been successfully installed!\n"
"The rest of packages will be installed automatically "
"depending on your build environment." % platform,
fg="green")
if pm.install(
name=platform,
with_packages=with_package,
without_packages=without_package,
skip_default_package=skip_default_package,
force=force,
):
click.secho(
"The platform '%s' has been successfully installed!\n"
"The rest of packages will be installed automatically "
"depending on your build environment." % platform,
fg="green",
)
@cli.command("uninstall", short_help="Uninstall development platform")
@ -312,35 +330,39 @@ def platform_uninstall(platforms):
pm = PlatformManager()
for platform in platforms:
if pm.uninstall(platform):
click.secho("The platform '%s' has been successfully "
"uninstalled!" % platform,
fg="green")
click.secho(
"The platform '%s' has been successfully " "uninstalled!" % platform,
fg="green",
)
@cli.command("update", short_help="Update installed development platforms")
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
@click.option("-p",
"--only-packages",
is_flag=True,
help="Update only the platform packages")
@click.option("-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead")
@click.option("--dry-run",
is_flag=True,
help="Do not update, only check for the new versions")
@click.option(
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
)
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("--json-output", is_flag=True)
def platform_update( # pylint: disable=too-many-locals
platforms, only_packages, only_check, dry_run, json_output):
platforms, only_packages, only_check, dry_run, json_output
):
pm = PlatformManager()
pkg_dir_to_name = {}
if not platforms:
platforms = []
for manifest in pm.get_installed():
platforms.append(manifest['__pkg_dir'])
pkg_dir_to_name[manifest['__pkg_dir']] = manifest.get(
"title", manifest['name'])
platforms.append(manifest["__pkg_dir"])
pkg_dir_to_name[manifest["__pkg_dir"]] = manifest.get(
"title", manifest["name"]
)
only_check = dry_run or only_check
@ -356,14 +378,16 @@ def platform_update( # pylint: disable=too-many-locals
if not pkg_dir:
continue
latest = pm.outdated(pkg_dir, requirements)
if (not latest and not PlatformFactory.newPlatform(
pkg_dir).are_outdated_packages()):
if (
not latest
and not PlatformFactory.newPlatform(pkg_dir).are_outdated_packages()
):
continue
data = _get_installed_platform_data(pkg_dir,
with_boards=False,
expose_packages=False)
data = _get_installed_platform_data(
pkg_dir, with_boards=False, expose_packages=False
)
if latest:
data['versionLatest'] = latest
data["versionLatest"] = latest
result.append(data)
return click.echo(dump_json_to_unicode(result))
@ -371,8 +395,9 @@ def platform_update( # pylint: disable=too-many-locals
app.clean_cache()
for platform in platforms:
click.echo(
"Platform %s" %
click.style(pkg_dir_to_name.get(platform, platform), fg="cyan"))
"Platform %s"
% click.style(pkg_dir_to_name.get(platform, platform), fg="cyan")
)
click.echo("--------")
pm.update(platform, only_packages=only_packages, only_check=only_check)
click.echo()

View File

@ -43,13 +43,12 @@ def remote_agent():
@remote_agent.command("start", short_help="Start agent")
@click.option("-n", "--name")
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
@click.option("-d",
"--working-dir",
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
type=click.Path(file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option(
"-d",
"--working-dir",
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
def remote_agent_start(**kwargs):
pioplus_call(sys.argv[1:])
@ -64,15 +63,16 @@ def remote_agent_list():
pioplus_call(sys.argv[1:])
@cli.command("update",
short_help="Update installed Platforms, Packages and Libraries")
@click.option("-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead")
@click.option("--dry-run",
is_flag=True,
help="Do not update, only check for the new versions")
@cli.command("update", short_help="Update installed Platforms, Packages and Libraries")
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
def remote_update(only_check, dry_run):
pioplus_call(sys.argv[1:])
@ -81,14 +81,14 @@ def remote_update(only_check, dry_run):
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=True,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option(
"-d",
"--project-dir",
default=getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("--disable-auto-clean", is_flag=True)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
@ -102,14 +102,14 @@ def remote_run(**kwargs):
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
@click.option("--upload-port")
@click.option("--test-port")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option(
"-d",
"--project-dir",
default=getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-r", "--force-remote", is_flag=True)
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@ -131,58 +131,61 @@ def device_list(json_output):
@remote_device.command("monitor", short_help="Monitor remote device")
@click.option("--port", "-p", help="Port, a number or a device name")
@click.option("--baud",
"-b",
type=int,
default=9600,
help="Set baud rate, default=9600")
@click.option("--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N")
@click.option("--rtscts",
is_flag=True,
help="Enable RTS/CTS flow control, default=Off")
@click.option("--xonxoff",
is_flag=True,
help="Enable software flow control, default=Off")
@click.option("--rts",
default=None,
type=click.IntRange(0, 1),
help="Set initial RTS line state")
@click.option("--dtr",
default=None,
type=click.IntRange(0, 1),
help="Set initial DTR line state")
@click.option(
"--baud", "-b", type=int, default=9600, help="Set baud rate, default=9600"
)
@click.option(
"--parity",
default="N",
type=click.Choice(["N", "E", "O", "S", "M"]),
help="Set parity, default=N",
)
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
@click.option(
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
)
@click.option(
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
)
@click.option(
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
)
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
@click.option("--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8")
@click.option(
"--encoding",
default="UTF-8",
help="Set the encoding for the serial port (e.g. hexlify, "
"Latin1, UTF-8), default: UTF-8",
)
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
@click.option("--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF")
@click.option("--raw",
is_flag=True,
help="Do not apply any encodings/transformations")
@click.option("--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)")
@click.option("--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)")
@click.option("--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off")
@click.option(
"--eol",
default="CRLF",
type=click.Choice(["CR", "LF", "CRLF"]),
help="End of line mode, default=CRLF",
)
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
@click.option(
"--exit-char",
type=int,
default=3,
help="ASCII code of special character that is used to exit "
"the application, default=3 (Ctrl+C)",
)
@click.option(
"--menu-char",
type=int,
default=20,
help="ASCII code of special character that is used to "
"control miniterm (menu), default=20 (DEC)",
)
@click.option(
"--quiet",
is_flag=True,
help="Diagnostics: suppress non-error messages, default=Off",
)
@click.pass_context
def device_monitor(ctx, **kwargs):
def _tx_target(sock_dir):
try:
pioplus_call(sys.argv[1:] + ["--sock", sock_dir])
@ -192,13 +195,13 @@ def device_monitor(ctx, **kwargs):
sock_dir = mkdtemp(suffix="pioplus")
sock_file = join(sock_dir, "sock")
try:
t = threading.Thread(target=_tx_target, args=(sock_dir, ))
t = threading.Thread(target=_tx_target, args=(sock_dir,))
t.start()
while t.is_alive() and not isfile(sock_file):
sleep(0.1)
if not t.is_alive():
return
kwargs['port'] = get_file_contents(sock_file)
kwargs["port"] = get_file_contents(sock_file)
ctx.invoke(cmd_device_monitor, **kwargs)
t.join(2)
finally:

View File

@ -22,13 +22,11 @@ from tabulate import tabulate
from platformio import exception, fs, util
from platformio.commands.device import device_monitor as cmd_device_monitor
from platformio.commands.run.helpers import (clean_build_dir,
handle_legacy_libdeps)
from platformio.commands.run.helpers import clean_build_dir, handle_legacy_libdeps
from platformio.commands.run.processor import EnvironmentProcessor
from platformio.commands.test.processor import CTX_META_TEST_IS_RUNNING
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (find_project_dir_above,
get_project_build_dir)
from platformio.project.helpers import find_project_dir_above, get_project_build_dir
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
@ -42,34 +40,47 @@ except NotImplementedError:
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=True,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-c",
"--project-conf",
type=click.Path(exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True))
@click.option("-j",
"--jobs",
type=int,
default=DEFAULT_JOB_NUMS,
help=("Allow N jobs at once. "
"Default is a number of CPUs in a system (N=%d)" %
DEFAULT_JOB_NUMS))
@click.option(
"-d",
"--project-dir",
default=getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option(
"-j",
"--jobs",
type=int,
default=DEFAULT_JOB_NUMS,
help=(
"Allow N jobs at once. "
"Default is a number of CPUs in a system (N=%d)" % DEFAULT_JOB_NUMS
),
)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
@click.option("--disable-auto-clean", is_flag=True)
@click.pass_context
def cli(ctx, environment, target, upload_port, project_dir, project_conf, jobs,
silent, verbose, disable_auto_clean):
def cli(
ctx,
environment,
target,
upload_port,
project_dir,
project_conf,
jobs,
silent,
verbose,
disable_auto_clean,
):
# find project directory on upper level
if isfile(project_dir):
project_dir = find_project_dir_above(project_dir)
@ -78,7 +89,8 @@ def cli(ctx, environment, target, upload_port, project_dir, project_conf, jobs,
with fs.cd(project_dir):
config = ProjectConfig.get_instance(
project_conf or join(project_dir, "platformio.ini"))
project_conf or join(project_dir, "platformio.ini")
)
config.validate(environment)
# clean obsolete build dir
@ -88,36 +100,48 @@ def cli(ctx, environment, target, upload_port, project_dir, project_conf, jobs,
except: # pylint: disable=bare-except
click.secho(
"Can not remove temporary directory `%s`. Please remove "
"it manually to avoid build issues" %
get_project_build_dir(force=True),
fg="yellow")
"it manually to avoid build issues"
% get_project_build_dir(force=True),
fg="yellow",
)
handle_legacy_libdeps(project_dir, config)
default_envs = config.default_envs()
results = []
for env in config.envs():
skipenv = any([
environment and env not in environment, not environment
and default_envs and env not in default_envs
])
skipenv = any(
[
environment and env not in environment,
not environment and default_envs and env not in default_envs,
]
)
if skipenv:
results.append({"env": env})
continue
# print empty line between multi environment project
if not silent and any(
r.get("succeeded") is not None for r in results):
if not silent and any(r.get("succeeded") is not None for r in results):
click.echo()
results.append(
process_env(ctx, env, config, environment, target, upload_port,
silent, verbose, jobs, is_test_running))
process_env(
ctx,
env,
config,
environment,
target,
upload_port,
silent,
verbose,
jobs,
is_test_running,
)
)
command_failed = any(r.get("succeeded") is False for r in results)
if (not is_test_running and (command_failed or not silent)
and len(results) > 1):
if not is_test_running and (command_failed or not silent) and len(results) > 1:
print_processing_summary(results)
if command_failed:
@ -125,24 +149,39 @@ def cli(ctx, environment, target, upload_port, project_dir, project_conf, jobs,
return True
def process_env(ctx, name, config, environments, targets, upload_port, silent,
verbose, jobs, is_test_running):
def process_env(
ctx,
name,
config,
environments,
targets,
upload_port,
silent,
verbose,
jobs,
is_test_running,
):
if not is_test_running and not silent:
print_processing_header(name, config, verbose)
ep = EnvironmentProcessor(ctx, name, config, targets, upload_port, silent,
verbose, jobs)
ep = EnvironmentProcessor(
ctx, name, config, targets, upload_port, silent, verbose, jobs
)
result = {"env": name, "duration": time(), "succeeded": ep.process()}
result['duration'] = time() - result['duration']
result["duration"] = time() - result["duration"]
# print footer on error or when is not unit testing
if not is_test_running and (not silent or not result['succeeded']):
if not is_test_running and (not silent or not result["succeeded"]):
print_processing_footer(result)
if (result['succeeded'] and "monitor" in ep.get_build_targets()
and "nobuild" not in ep.get_build_targets()):
ctx.invoke(cmd_device_monitor,
environment=environments[0] if environments else None)
if (
result["succeeded"]
and "monitor" in ep.get_build_targets()
and "nobuild" not in ep.get_build_targets()
):
ctx.invoke(
cmd_device_monitor, environment=environments[0] if environments else None
)
return result
@ -151,10 +190,11 @@ def print_processing_header(env, config, verbose=False):
env_dump = []
for k, v in config.items(env=env):
if verbose or k in ("platform", "framework", "board"):
env_dump.append("%s: %s" %
(k, ", ".join(v) if isinstance(v, list) else v))
click.echo("Processing %s (%s)" %
(click.style(env, fg="cyan", bold=True), "; ".join(env_dump)))
env_dump.append("%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v))
click.echo(
"Processing %s (%s)"
% (click.style(env, fg="cyan", bold=True), "; ".join(env_dump))
)
terminal_width, _ = click.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
@ -162,10 +202,17 @@ def print_processing_header(env, config, verbose=False):
def print_processing_footer(result):
is_failed = not result.get("succeeded")
util.print_labeled_bar(
"[%s] Took %.2f seconds" %
((click.style("FAILED", fg="red", bold=True) if is_failed else
click.style("SUCCESS", fg="green", bold=True)), result['duration']),
is_error=is_failed)
"[%s] Took %.2f seconds"
% (
(
click.style("FAILED", fg="red", bold=True)
if is_failed
else click.style("SUCCESS", fg="green", bold=True)
),
result["duration"],
),
is_error=is_failed,
)
def print_processing_summary(results):
@ -186,20 +233,31 @@ def print_processing_summary(results):
status_str = click.style("SUCCESS", fg="green")
tabular_data.append(
(click.style(result['env'], fg="cyan"), status_str,
util.humanize_duration_time(result.get("duration"))))
(
click.style(result["env"], fg="cyan"),
status_str,
util.humanize_duration_time(result.get("duration")),
)
)
click.echo()
click.echo(tabulate(tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Environment", "Status", "Duration")
]),
err=failed_nums)
click.echo(
tabulate(
tabular_data,
headers=[
click.style(s, bold=True) for s in ("Environment", "Status", "Duration")
],
),
err=failed_nums,
)
util.print_labeled_bar(
"%s%d succeeded in %s" %
("%d failed, " % failed_nums if failed_nums else "", succeeded_nums,
util.humanize_duration_time(duration)),
"%s%d succeeded in %s"
% (
"%d failed, " % failed_nums if failed_nums else "",
succeeded_nums,
util.humanize_duration_time(duration),
),
is_error=failed_nums,
fg="red" if failed_nums else "green")
fg="red" if failed_nums else "green",
)

View File

@ -18,15 +18,16 @@ from os.path import isdir, isfile, join
import click
from platformio import fs
from platformio.project.helpers import (compute_project_checksum,
get_project_dir,
get_project_libdeps_dir)
from platformio.project.helpers import (
compute_project_checksum,
get_project_dir,
get_project_libdeps_dir,
)
def handle_legacy_libdeps(project_dir, config):
legacy_libdeps_dir = join(project_dir, ".piolibdeps")
if (not isdir(legacy_libdeps_dir)
or legacy_libdeps_dir == get_project_libdeps_dir()):
if not isdir(legacy_libdeps_dir) or legacy_libdeps_dir == get_project_libdeps_dir():
return
if not config.has_section("env"):
config.add_section("env")
@ -39,7 +40,8 @@ def handle_legacy_libdeps(project_dir, config):
" file using `lib_deps` option and remove `{0}` folder."
"\nMore details -> http://docs.platformio.org/page/projectconf/"
"section_env_library.html#lib-deps".format(legacy_libdeps_dir),
fg="yellow")
fg="yellow",
)
def clean_build_dir(build_dir, config):

View File

@ -13,8 +13,7 @@
# limitations under the License.
from platformio import exception, telemetry
from platformio.commands.platform import \
platform_install as cmd_platform_install
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
from platformio.managers.platform import PlatformFactory
@ -22,10 +21,9 @@ from platformio.managers.platform import PlatformFactory
class EnvironmentProcessor(object):
def __init__( # pylint: disable=too-many-arguments
self, cmd_ctx, name, config, targets, upload_port, silent, verbose,
jobs):
self, cmd_ctx, name, config, targets, upload_port, silent, verbose, jobs
):
self.cmd_ctx = cmd_ctx
self.name = name
self.config = config
@ -40,12 +38,13 @@ class EnvironmentProcessor(object):
variables = {"pioenv": self.name, "project_config": self.config.path}
if CTX_META_TEST_RUNNING_NAME in self.cmd_ctx.meta:
variables['piotest_running_name'] = self.cmd_ctx.meta[
CTX_META_TEST_RUNNING_NAME]
variables["piotest_running_name"] = self.cmd_ctx.meta[
CTX_META_TEST_RUNNING_NAME
]
if self.upload_port:
# override upload port with a custom from CLI
variables['upload_port'] = self.upload_port
variables["upload_port"] = self.upload_port
return variables
def get_build_targets(self):
@ -67,13 +66,14 @@ class EnvironmentProcessor(object):
build_targets.remove("monitor")
try:
p = PlatformFactory.newPlatform(self.options['platform'])
p = PlatformFactory.newPlatform(self.options["platform"])
except exception.UnknownPlatform:
self.cmd_ctx.invoke(cmd_platform_install,
platforms=[self.options['platform']],
skip_default_package=True)
p = PlatformFactory.newPlatform(self.options['platform'])
self.cmd_ctx.invoke(
cmd_platform_install,
platforms=[self.options["platform"]],
skip_default_package=True,
)
p = PlatformFactory.newPlatform(self.options["platform"])
result = p.run(build_vars, build_targets, self.silent, self.verbose,
self.jobs)
return result['returncode'] == 0
result = p.run(build_vars, build_targets, self.silent, self.verbose, self.jobs)
return result["returncode"] == 0

View File

@ -42,20 +42,24 @@ def settings_get(name):
raw_value = app.get_setting(key)
formatted_value = format_value(raw_value)
if raw_value != options['value']:
default_formatted_value = format_value(options['value'])
if raw_value != options["value"]:
default_formatted_value = format_value(options["value"])
formatted_value += "%s" % (
"\n" if len(default_formatted_value) > 10 else " ")
formatted_value += "[%s]" % click.style(default_formatted_value,
fg="yellow")
"\n" if len(default_formatted_value) > 10 else " "
)
formatted_value += "[%s]" % click.style(
default_formatted_value, fg="yellow"
)
tabular_data.append(
(click.style(key,
fg="cyan"), formatted_value, options['description']))
(click.style(key, fg="cyan"), formatted_value, options["description"])
)
click.echo(
tabulate(tabular_data,
headers=["Name", "Current value [Default]", "Description"]))
tabulate(
tabular_data, headers=["Name", "Current value [Default]", "Description"]
)
)
@cli.command("set", short_help="Set new value for the setting")

View File

@ -31,51 +31,72 @@ from platformio.project.helpers import get_project_test_dir
@click.command("test", short_help="Unit Testing")
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
@click.option("--filter",
"-f",
multiple=True,
metavar="<pattern>",
help="Filter tests by a pattern")
@click.option("--ignore",
"-i",
multiple=True,
metavar="<pattern>",
help="Ignore tests by a pattern")
@click.option(
"--filter",
"-f",
multiple=True,
metavar="<pattern>",
help="Filter tests by a pattern",
)
@click.option(
"--ignore",
"-i",
multiple=True,
metavar="<pattern>",
help="Ignore tests by a pattern",
)
@click.option("--upload-port")
@click.option("--test-port")
@click.option("-d",
"--project-dir",
default=getcwd,
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
writable=True,
resolve_path=True))
@click.option("-c",
"--project-conf",
type=click.Path(exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True))
@click.option(
"-d",
"--project-dir",
default=getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
),
)
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@click.option("--without-testing", is_flag=True)
@click.option("--no-reset", is_flag=True)
@click.option("--monitor-rts",
default=None,
type=click.IntRange(0, 1),
help="Set initial RTS line state for Serial Monitor")
@click.option("--monitor-dtr",
default=None,
type=click.IntRange(0, 1),
help="Set initial DTR line state for Serial Monitor")
@click.option(
"--monitor-rts",
default=None,
type=click.IntRange(0, 1),
help="Set initial RTS line state for Serial Monitor",
)
@click.option(
"--monitor-dtr",
default=None,
type=click.IntRange(0, 1),
help="Set initial DTR line state for Serial Monitor",
)
@click.option("--verbose", "-v", is_flag=True)
@click.pass_context
def cli( # pylint: disable=redefined-builtin
ctx, environment, ignore, filter, upload_port, test_port, project_dir,
project_conf, without_building, without_uploading, without_testing,
no_reset, monitor_rts, monitor_dtr, verbose):
ctx,
environment,
ignore,
filter,
upload_port,
test_port,
project_dir,
project_conf,
without_building,
without_uploading,
without_testing,
no_reset,
monitor_rts,
monitor_dtr,
verbose,
):
with fs.cd(project_dir):
test_dir = get_project_test_dir()
if not isdir(test_dir):
@ -83,7 +104,8 @@ def cli( # pylint: disable=redefined-builtin
test_names = get_test_names(test_dir)
config = ProjectConfig.get_instance(
project_conf or join(project_dir, "platformio.ini"))
project_conf or join(project_dir, "platformio.ini")
)
config.validate(envs=environment)
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
@ -99,19 +121,16 @@ def cli( # pylint: disable=redefined-builtin
# filter and ignore patterns
patterns = dict(filter=list(filter), ignore=list(ignore))
for key in patterns:
patterns[key].extend(
config.get(section, "test_%s" % key, []))
patterns[key].extend(config.get(section, "test_%s" % key, []))
skip_conditions = [
environment and envname not in environment,
not environment and default_envs
and envname not in default_envs,
testname != "*" and patterns['filter'] and
not any([fnmatch(testname, p)
for p in patterns['filter']]),
not environment and default_envs and envname not in default_envs,
testname != "*"
and any([fnmatch(testname, p)
for p in patterns['ignore']]),
and patterns["filter"]
and not any([fnmatch(testname, p) for p in patterns["filter"]]),
testname != "*"
and any([fnmatch(testname, p) for p in patterns["ignore"]]),
]
if any(skip_conditions):
results.append({"env": envname, "test": testname})
@ -120,29 +139,36 @@ def cli( # pylint: disable=redefined-builtin
click.echo()
print_processing_header(testname, envname)
cls = (NativeTestProcessor
if config.get(section, "platform") == "native" else
EmbeddedTestProcessor)
cls = (
NativeTestProcessor
if config.get(section, "platform") == "native"
else EmbeddedTestProcessor
)
tp = cls(
ctx, testname, envname,
dict(project_config=config,
project_dir=project_dir,
upload_port=upload_port,
test_port=test_port,
without_building=without_building,
without_uploading=without_uploading,
without_testing=without_testing,
no_reset=no_reset,
monitor_rts=monitor_rts,
monitor_dtr=monitor_dtr,
verbose=verbose))
ctx,
testname,
envname,
dict(
project_config=config,
project_dir=project_dir,
upload_port=upload_port,
test_port=test_port,
without_building=without_building,
without_uploading=without_uploading,
without_testing=without_testing,
no_reset=no_reset,
monitor_rts=monitor_rts,
monitor_dtr=monitor_dtr,
verbose=verbose,
),
)
result = {
"env": envname,
"test": testname,
"duration": time(),
"succeeded": tp.process()
"succeeded": tp.process(),
}
result['duration'] = time() - result['duration']
result["duration"] = time() - result["duration"]
results.append(result)
print_processing_footer(result)
@ -168,8 +194,13 @@ def get_test_names(test_dir):
def print_processing_header(test, env):
click.echo("Processing %s in %s environment" % (click.style(
test, fg="yellow", bold=True), click.style(env, fg="cyan", bold=True)))
click.echo(
"Processing %s in %s environment"
% (
click.style(test, fg="yellow", bold=True),
click.style(env, fg="cyan", bold=True),
)
)
terminal_width, _ = click.get_terminal_size()
click.secho("-" * terminal_width, bold=True)
@ -177,10 +208,17 @@ def print_processing_header(test, env):
def print_processing_footer(result):
is_failed = not result.get("succeeded")
util.print_labeled_bar(
"[%s] Took %.2f seconds" %
((click.style("FAILED", fg="red", bold=True) if is_failed else
click.style("PASSED", fg="green", bold=True)), result['duration']),
is_error=is_failed)
"[%s] Took %.2f seconds"
% (
(
click.style("FAILED", fg="red", bold=True)
if is_failed
else click.style("PASSED", fg="green", bold=True)
),
result["duration"],
),
is_error=is_failed,
)
def print_testing_summary(results):
@ -203,20 +241,32 @@ def print_testing_summary(results):
status_str = click.style("PASSED", fg="green")
tabular_data.append(
(result['test'], click.style(result['env'], fg="cyan"), status_str,
util.humanize_duration_time(result.get("duration"))))
(
result["test"],
click.style(result["env"], fg="cyan"),
status_str,
util.humanize_duration_time(result.get("duration")),
)
)
click.echo(tabulate(tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Test", "Environment", "Status",
"Duration")
]),
err=failed_nums)
click.echo(
tabulate(
tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Test", "Environment", "Status", "Duration")
],
),
err=failed_nums,
)
util.print_labeled_bar(
"%s%d succeeded in %s" %
("%d failed, " % failed_nums if failed_nums else "", succeeded_nums,
util.humanize_duration_time(duration)),
"%s%d succeeded in %s"
% (
"%d failed, " % failed_nums if failed_nums else "",
succeeded_nums,
util.humanize_duration_time(duration),
),
is_error=failed_nums,
fg="red" if failed_nums else "green")
fg="red" if failed_nums else "green",
)

View File

@ -27,47 +27,50 @@ class EmbeddedTestProcessor(TestProcessorBase):
SERIAL_TIMEOUT = 600
def process(self):
if not self.options['without_building']:
if not self.options["without_building"]:
self.print_progress("Building...")
target = ["__test"]
if self.options['without_uploading']:
if self.options["without_uploading"]:
target.append("checkprogsize")
if not self.build_or_upload(target):
return False
if not self.options['without_uploading']:
if not self.options["without_uploading"]:
self.print_progress("Uploading...")
target = ["upload"]
if self.options['without_building']:
if self.options["without_building"]:
target.append("nobuild")
else:
target.append("__test")
if not self.build_or_upload(target):
return False
if self.options['without_testing']:
if self.options["without_testing"]:
return None
self.print_progress("Testing...")
return self.run()
def run(self):
click.echo("If you don't see any output for the first 10 secs, "
"please reset board (press reset button)")
click.echo(
"If you don't see any output for the first 10 secs, "
"please reset board (press reset button)"
)
click.echo()
try:
ser = serial.Serial(baudrate=self.get_baudrate(),
timeout=self.SERIAL_TIMEOUT)
ser = serial.Serial(
baudrate=self.get_baudrate(), timeout=self.SERIAL_TIMEOUT
)
ser.port = self.get_test_port()
ser.rts = self.options['monitor_rts']
ser.dtr = self.options['monitor_dtr']
ser.rts = self.options["monitor_rts"]
ser.dtr = self.options["monitor_dtr"]
ser.open()
except serial.SerialException as e:
click.secho(str(e), fg="red", err=True)
return False
if not self.options['no_reset']:
if not self.options["no_reset"]:
ser.flushInput()
ser.setDTR(False)
ser.setRTS(False)
@ -105,17 +108,16 @@ class EmbeddedTestProcessor(TestProcessorBase):
return self.env_options.get("test_port")
assert set(["platform", "board"]) & set(self.env_options.keys())
p = PlatformFactory.newPlatform(self.env_options['platform'])
board_hwids = p.board_config(self.env_options['board']).get(
"build.hwids", [])
p = PlatformFactory.newPlatform(self.env_options["platform"])
board_hwids = p.board_config(self.env_options["board"]).get("build.hwids", [])
port = None
elapsed = 0
while elapsed < 5 and not port:
for item in util.get_serialports():
port = item['port']
port = item["port"]
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item['hwid']:
if hwid_str in item["hwid"]:
return port
# check if port is already configured
@ -131,5 +133,6 @@ class EmbeddedTestProcessor(TestProcessorBase):
if not port:
raise exception.PlatformioException(
"Please specify `test_port` for environment or use "
"global `--test-port` option.")
"global `--test-port` option."
)
return port

View File

@ -21,23 +21,23 @@ from platformio.project.helpers import get_project_build_dir
class NativeTestProcessor(TestProcessorBase):
def process(self):
if not self.options['without_building']:
if not self.options["without_building"]:
self.print_progress("Building...")
if not self.build_or_upload(["__test"]):
return False
if self.options['without_testing']:
if self.options["without_testing"]:
return None
self.print_progress("Testing...")
return self.run()
def run(self):
with fs.cd(self.options['project_dir']):
with fs.cd(self.options["project_dir"]):
build_dir = get_project_build_dir()
result = proc.exec_command(
[join(build_dir, self.env_name, "program")],
stdout=LineBufferedAsyncPipe(self.on_run_out),
stderr=LineBufferedAsyncPipe(self.on_run_out))
stderr=LineBufferedAsyncPipe(self.on_run_out),
)
assert "returncode" in result
return result['returncode'] == 0 and not self._run_failed
return result["returncode"] == 0 and not self._run_failed

View File

@ -29,7 +29,7 @@ TRANSPORT_OPTIONS = {
"putchar": "Serial.write(c)",
"flush": "Serial.flush()",
"begin": "Serial.begin($baudrate)",
"end": "Serial.end()"
"end": "Serial.end()",
},
"mbed": {
"include": "#include <mbed.h>",
@ -37,7 +37,7 @@ TRANSPORT_OPTIONS = {
"putchar": "pc.putc(c)",
"flush": "",
"begin": "pc.baud($baudrate)",
"end": ""
"end": "",
},
"espidf": {
"include": "#include <stdio.h>",
@ -45,7 +45,7 @@ TRANSPORT_OPTIONS = {
"putchar": "putchar(c)",
"flush": "fflush(stdout)",
"begin": "",
"end": ""
"end": "",
},
"native": {
"include": "#include <stdio.h>",
@ -53,7 +53,7 @@ TRANSPORT_OPTIONS = {
"putchar": "putchar(c)",
"flush": "fflush(stdout)",
"begin": "",
"end": ""
"end": "",
},
"custom": {
"include": '#include "unittest_transport.h"',
@ -61,8 +61,8 @@ TRANSPORT_OPTIONS = {
"putchar": "unittest_uart_putchar(c)",
"flush": "unittest_uart_flush()",
"begin": "unittest_uart_begin()",
"end": "unittest_uart_end()"
}
"end": "unittest_uart_end()",
},
}
CTX_META_TEST_IS_RUNNING = __name__ + ".test_running"
@ -79,8 +79,7 @@ class TestProcessorBase(object):
self.test_name = testname
self.options = options
self.env_name = envname
self.env_options = options['project_config'].items(env=envname,
as_dict=True)
self.env_options = options["project_config"].items(env=envname, as_dict=True)
self._run_failed = False
self._outputcpp_generated = False
@ -90,10 +89,11 @@ class TestProcessorBase(object):
elif "framework" in self.env_options:
transport = self.env_options.get("framework")[0]
if "test_transport" in self.env_options:
transport = self.env_options['test_transport']
transport = self.env_options["test_transport"]
if transport not in TRANSPORT_OPTIONS:
raise exception.PlatformioException(
"Unknown Unit Test transport `%s`" % transport)
"Unknown Unit Test transport `%s`" % transport
)
return transport.lower()
def get_baudrate(self):
@ -112,13 +112,16 @@ class TestProcessorBase(object):
try:
from platformio.commands.run import cli as cmd_run
return self.cmd_ctx.invoke(cmd_run,
project_dir=self.options['project_dir'],
upload_port=self.options['upload_port'],
silent=not self.options['verbose'],
environment=[self.env_name],
disable_auto_clean="nobuild" in target,
target=target)
return self.cmd_ctx.invoke(
cmd_run,
project_dir=self.options["project_dir"],
upload_port=self.options["upload_port"],
silent=not self.options["verbose"],
environment=[self.env_name],
disable_auto_clean="nobuild" in target,
target=target,
)
except exception.ReturnErrorCode:
return False
@ -131,8 +134,7 @@ class TestProcessorBase(object):
def on_run_out(self, line):
line = line.strip()
if line.endswith(":PASS"):
click.echo("%s\t[%s]" %
(line[:-5], click.style("PASSED", fg="green")))
click.echo("%s\t[%s]" % (line[:-5], click.style("PASSED", fg="green")))
elif ":FAIL" in line:
self._run_failed = True
click.echo("%s\t[%s]" % (line, click.style("FAILED", fg="red")))
@ -142,36 +144,38 @@ class TestProcessorBase(object):
def generate_outputcpp(self, test_dir):
assert isdir(test_dir)
cpp_tpl = "\n".join([
"$include",
"#include <output_export.h>",
"",
"$object",
"",
"#ifdef __GNUC__",
"void output_start(unsigned int baudrate __attribute__((unused)))",
"#else",
"void output_start(unsigned int baudrate)",
"#endif",
"{",
" $begin;",
"}",
"",
"void output_char(int c)",
"{",
" $putchar;",
"}",
"",
"void output_flush(void)",
"{",
" $flush;",
"}",
"",
"void output_complete(void)",
"{",
" $end;",
"}"
]) # yapf: disable
cpp_tpl = "\n".join(
[
"$include",
"#include <output_export.h>",
"",
"$object",
"",
"#ifdef __GNUC__",
"void output_start(unsigned int baudrate __attribute__((unused)))",
"#else",
"void output_start(unsigned int baudrate)",
"#endif",
"{",
" $begin;",
"}",
"",
"void output_char(int c)",
"{",
" $putchar;",
"}",
"",
"void output_flush(void)",
"{",
" $flush;",
"}",
"",
"void output_complete(void)",
"{",
" $end;",
"}",
]
) # yapf: disable
def delete_tmptest_file(file_):
try:
@ -181,10 +185,10 @@ class TestProcessorBase(object):
click.secho(
"Warning: Could not remove temporary file '%s'. "
"Please remove it manually." % file_,
fg="yellow")
fg="yellow",
)
tpl = Template(cpp_tpl).substitute(
TRANSPORT_OPTIONS[self.get_transport()])
tpl = Template(cpp_tpl).substitute(TRANSPORT_OPTIONS[self.get_transport()])
data = Template(tpl).substitute(baudrate=self.get_baudrate())
tmp_file = join(test_dir, "output_export.cpp")

View File

@ -22,18 +22,19 @@ from platformio.managers.core import update_core_packages
from platformio.managers.lib import LibraryManager
@click.command("update",
short_help="Update installed platforms, packages and libraries")
@click.option("--core-packages",
is_flag=True,
help="Update only the core packages")
@click.option("-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead")
@click.option("--dry-run",
is_flag=True,
help="Do not update, only check for the new versions")
@click.command(
"update", short_help="Update installed platforms, packages and libraries"
)
@click.option("--core-packages", is_flag=True, help="Update only the core packages")
@click.option(
"-c",
"--only-check",
is_flag=True,
help="DEPRECATED. Please use `--dry-run` instead",
)
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.pass_context
def cli(ctx, core_packages, only_check, dry_run):
# cleanup lib search results, cached board and platform lists

View File

@ -25,21 +25,23 @@ from platformio.proc import exec_command, get_pythonexe_path
from platformio.project.helpers import get_project_cache_dir
@click.command("upgrade",
short_help="Upgrade PlatformIO to the latest version")
@click.command("upgrade", short_help="Upgrade PlatformIO to the latest version")
@click.option("--dev", is_flag=True, help="Use development branch")
def cli(dev):
if not dev and __version__ == get_latest_version():
return click.secho(
"You're up-to-date!\nPlatformIO %s is currently the "
"newest version available." % __version__,
fg="green")
fg="green",
)
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
cmds = (["pip", "install", "--upgrade",
get_pip_package(to_develop)], ["platformio", "--version"])
cmds = (
["pip", "install", "--upgrade", get_pip_package(to_develop)],
["platformio", "--version"],
)
cmd = None
r = {}
@ -49,26 +51,26 @@ def cli(dev):
r = exec_command(cmd)
# try pip with disabled cache
if r['returncode'] != 0 and cmd[2] == "pip":
if r["returncode"] != 0 and cmd[2] == "pip":
cmd.insert(3, "--no-cache-dir")
r = exec_command(cmd)
assert r['returncode'] == 0
assert "version" in r['out']
actual_version = r['out'].strip().split("version", 1)[1].strip()
click.secho("PlatformIO has been successfully upgraded to %s" %
actual_version,
fg="green")
assert r["returncode"] == 0
assert "version" in r["out"]
actual_version = r["out"].strip().split("version", 1)[1].strip()
click.secho(
"PlatformIO has been successfully upgraded to %s" % actual_version,
fg="green",
)
click.echo("Release notes: ", nl=False)
click.secho("https://docs.platformio.org/en/latest/history.html",
fg="cyan")
click.secho("https://docs.platformio.org/en/latest/history.html", fg="cyan")
except Exception as e: # pylint: disable=broad-except
if not r:
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
permission_errors = ("permission denied", "not permitted")
if (any(m in r['err'].lower() for m in permission_errors)
and not WINDOWS):
click.secho("""
if any(m in r["err"].lower() for m in permission_errors) and not WINDOWS:
click.secho(
"""
-----------------
Permission denied
-----------------
@ -78,10 +80,11 @@ You need the `sudo` permission to install Python packages. Try
WARNING! Don't use `sudo` for the rest PlatformIO commands.
""",
fg="yellow",
err=True)
fg="yellow",
err=True,
)
raise exception.ReturnErrorCode(1)
raise exception.UpgradeError("\n".join([str(cmd), r['out'], r['err']]))
raise exception.UpgradeError("\n".join([str(cmd), r["out"], r["err"]]))
return True
@ -89,18 +92,17 @@ WARNING! Don't use `sudo` for the rest PlatformIO commands.
def get_pip_package(to_develop):
if not to_develop:
return "platformio"
dl_url = ("https://github.com/platformio/"
"platformio-core/archive/develop.zip")
dl_url = "https://github.com/platformio/" "platformio-core/archive/develop.zip"
cache_dir = get_project_cache_dir()
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
try:
with open(pkg_name, "w") as fp:
r = exec_command(["curl", "-fsSL", dl_url],
stdout=fp,
universal_newlines=True)
assert r['returncode'] == 0
r = exec_command(
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
)
assert r["returncode"] == 0
# check ZIP structure
with ZipFile(pkg_name) as zp:
assert zp.testzip() is None
@ -127,7 +129,8 @@ def get_develop_latest_version():
r = requests.get(
"https://raw.githubusercontent.com/platformio/platformio"
"/develop/platformio/__init__.py",
headers=util.get_request_defheaders())
headers=util.get_request_defheaders(),
)
r.raise_for_status()
for line in r.text.split("\n"):
line = line.strip()
@ -145,7 +148,8 @@ def get_develop_latest_version():
def get_pypi_latest_version():
r = requests.get("https://pypi.org/pypi/platformio/json",
headers=util.get_request_defheaders())
r = requests.get(
"https://pypi.org/pypi/platformio/json", headers=util.get_request_defheaders()
)
r.raise_for_status()
return r.json()['info']['version']
return r.json()["info"]["version"]

View File

@ -20,8 +20,8 @@ import re
import sys
PY2 = sys.version_info[0] == 2
CYGWIN = sys.platform.startswith('cygwin')
WINDOWS = sys.platform.startswith('win')
CYGWIN = sys.platform.startswith("cygwin")
WINDOWS = sys.platform.startswith("win")
def get_filesystem_encoding():
@ -56,13 +56,12 @@ if PY2:
def dump_json_to_unicode(obj):
if isinstance(obj, unicode):
return obj
return json.dumps(obj,
encoding=get_filesystem_encoding(),
ensure_ascii=False,
sort_keys=True).encode("utf8")
return json.dumps(
obj, encoding=get_filesystem_encoding(), ensure_ascii=False, sort_keys=True
).encode("utf8")
_magic_check = re.compile('([*?[])')
_magic_check_bytes = re.compile(b'([*?[])')
_magic_check = re.compile("([*?[])")
_magic_check_bytes = re.compile(b"([*?[])")
def glob_escape(pathname):
"""Escape all special characters."""
@ -72,14 +71,16 @@ if PY2:
# escaped.
drive, pathname = os.path.splitdrive(pathname)
if isinstance(pathname, bytes):
pathname = _magic_check_bytes.sub(br'[\1]', pathname)
pathname = _magic_check_bytes.sub(br"[\1]", pathname)
else:
pathname = _magic_check.sub(r'[\1]', pathname)
pathname = _magic_check.sub(r"[\1]", pathname)
return drive + pathname
else:
from glob import escape as glob_escape # pylint: disable=no-name-in-module
string_types = (str, )
string_types = (str,)
def is_bytes(x):
return isinstance(x, (bytes, memoryview, bytearray))

View File

@ -22,8 +22,11 @@ import click
import requests
from platformio import util
from platformio.exception import (FDSHASumMismatch, FDSizeMismatch,
FDUnrecognizedStatusCode)
from platformio.exception import (
FDSHASumMismatch,
FDSizeMismatch,
FDUnrecognizedStatusCode,
)
from platformio.proc import exec_command
@ -34,17 +37,22 @@ class FileDownloader(object):
def __init__(self, url, dest_dir=None):
self._request = None
# make connection
self._request = requests.get(url,
stream=True,
headers=util.get_request_defheaders(),
verify=version_info >= (2, 7, 9))
self._request = requests.get(
url,
stream=True,
headers=util.get_request_defheaders(),
verify=version_info >= (2, 7, 9),
)
if self._request.status_code != 200:
raise FDUnrecognizedStatusCode(self._request.status_code, url)
disposition = self._request.headers.get("content-disposition")
if disposition and "filename=" in disposition:
self._fname = disposition[disposition.index("filename=") +
9:].replace('"', "").replace("'", "")
self._fname = (
disposition[disposition.index("filename=") + 9 :]
.replace('"', "")
.replace("'", "")
)
else:
self._fname = [p for p in url.split("/") if p][-1]
self._fname = str(self._fname)
@ -64,7 +72,7 @@ class FileDownloader(object):
def get_size(self):
if "content-length" not in self._request.headers:
return -1
return int(self._request.headers['content-length'])
return int(self._request.headers["content-length"])
def start(self, with_progress=True):
label = "Downloading"
@ -101,11 +109,11 @@ class FileDownloader(object):
dlsha1 = None
try:
result = exec_command(["sha1sum", self._destination])
dlsha1 = result['out']
dlsha1 = result["out"]
except (OSError, ValueError):
try:
result = exec_command(["shasum", "-a", "1", self._destination])
dlsha1 = result['out']
dlsha1 = result["out"]
except (OSError, ValueError):
pass
if not dlsha1:

View File

@ -64,8 +64,10 @@ class IncompatiblePlatform(PlatformioException):
class PlatformNotInstalledYet(PlatformioException):
MESSAGE = ("The platform '{0}' has not been installed yet. "
"Use `platformio platform install {0}` command")
MESSAGE = (
"The platform '{0}' has not been installed yet. "
"Use `platformio platform install {0}` command"
)
class UnknownBoard(PlatformioException):
@ -102,22 +104,27 @@ class MissingPackageManifest(PlatformIOPackageException):
class UndefinedPackageVersion(PlatformIOPackageException):
MESSAGE = ("Could not find a version that satisfies the requirement '{0}'"
" for your system '{1}'")
MESSAGE = (
"Could not find a version that satisfies the requirement '{0}'"
" for your system '{1}'"
)
class PackageInstallError(PlatformIOPackageException):
MESSAGE = ("Could not install '{0}' with version requirements '{1}' "
"for your system '{2}'.\n\n"
"Please try this solution -> http://bit.ly/faq-package-manager")
MESSAGE = (
"Could not install '{0}' with version requirements '{1}' "
"for your system '{2}'.\n\n"
"Please try this solution -> http://bit.ly/faq-package-manager"
)
class ExtractArchiveItemError(PlatformIOPackageException):
MESSAGE = (
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
"tool or check this solution -> http://bit.ly/faq-package-manager")
"tool or check this solution -> http://bit.ly/faq-package-manager"
)
class UnsupportedArchiveType(PlatformIOPackageException):
@ -132,14 +139,17 @@ class FDUnrecognizedStatusCode(PlatformIOPackageException):
class FDSizeMismatch(PlatformIOPackageException):
MESSAGE = ("The size ({0:d} bytes) of downloaded file '{1}' "
"is not equal to remote size ({2:d} bytes)")
MESSAGE = (
"The size ({0:d} bytes) of downloaded file '{1}' "
"is not equal to remote size ({2:d} bytes)"
)
class FDSHASumMismatch(PlatformIOPackageException):
MESSAGE = ("The 'sha1' sum '{0}' of downloaded file '{1}' "
"is not equal to remote '{2}'")
MESSAGE = (
"The 'sha1' sum '{0}' of downloaded file '{1}' " "is not equal to remote '{2}'"
)
#
@ -156,12 +166,13 @@ class NotPlatformIOProject(PlatformIOProjectException):
MESSAGE = (
"Not a PlatformIO project. `platformio.ini` file has not been "
"found in current working directory ({0}). To initialize new project "
"please use `platformio init` command")
"please use `platformio init` command"
)
class InvalidProjectConf(PlatformIOProjectException):
MESSAGE = ("Invalid '{0}' (project configuration file): '{1}'")
MESSAGE = "Invalid '{0}' (project configuration file): '{1}'"
class UndefinedEnvPlatform(PlatformIOProjectException):
@ -191,9 +202,11 @@ class ProjectOptionValueError(PlatformIOProjectException):
class LibNotFound(PlatformioException):
MESSAGE = ("Library `{0}` has not been found in PlatformIO Registry.\n"
"You can ignore this message, if `{0}` is a built-in library "
"(included in framework, SDK). E.g., SPI, Wire, etc.")
MESSAGE = (
"Library `{0}` has not been found in PlatformIO Registry.\n"
"You can ignore this message, if `{0}` is a built-in library "
"(included in framework, SDK). E.g., SPI, Wire, etc."
)
class NotGlobalLibDir(UserSideException):
@ -203,7 +216,8 @@ class NotGlobalLibDir(UserSideException):
"To manage libraries in global storage `{1}`,\n"
"please use `platformio lib --global {2}` or specify custom storage "
"`platformio lib --storage-dir /path/to/storage/ {2}`.\n"
"Check `platformio lib --help` for details.")
"Check `platformio lib --help` for details."
)
class InvalidLibConfURL(PlatformioException):
@ -224,7 +238,8 @@ class MissedUdevRules(InvalidUdevRules):
MESSAGE = (
"Warning! Please install `99-platformio-udev.rules`. \nMode details: "
"https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules")
"https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules"
)
class OutdatedUdevRules(InvalidUdevRules):
@ -232,7 +247,8 @@ class OutdatedUdevRules(InvalidUdevRules):
MESSAGE = (
"Warning! Your `{0}` are outdated. Please update or reinstall them."
"\n Mode details: https://docs.platformio.org"
"/en/latest/faq.html#platformio-udev-rules")
"/en/latest/faq.html#platformio-udev-rules"
)
#
@ -260,7 +276,8 @@ class InternetIsOffline(UserSideException):
MESSAGE = (
"You are not connected to the Internet.\n"
"If you build a project first time, we need Internet connection "
"to install all dependencies and toolchains.")
"to install all dependencies and toolchains."
)
class BuildScriptNotFound(PlatformioException):
@ -285,9 +302,11 @@ class InvalidJSONFile(PlatformioException):
class CIBuildEnvsEmpty(PlatformioException):
MESSAGE = ("Can't find PlatformIO build environments.\n"
"Please specify `--board` or path to `platformio.ini` with "
"predefined environments using `--project-conf` option")
MESSAGE = (
"Can't find PlatformIO build environments.\n"
"Please specify `--board` or path to `platformio.ini` with "
"predefined environments using `--project-conf` option"
)
class UpgradeError(PlatformioException):
@ -307,13 +326,16 @@ class HomeDirPermissionsError(PlatformioException):
"current user and PlatformIO can not store configuration data.\n"
"Please check the permissions and owner of that directory.\n"
"Otherwise, please remove manually `{0}` directory and PlatformIO "
"will create new from the current user.")
"will create new from the current user."
)
class CygwinEnvDetected(PlatformioException):
MESSAGE = ("PlatformIO does not work within Cygwin environment. "
"Use native Terminal instead.")
MESSAGE = (
"PlatformIO does not work within Cygwin environment. "
"Use native Terminal instead."
)
class DebugSupportError(PlatformioException):
@ -322,7 +344,8 @@ class DebugSupportError(PlatformioException):
"Currently, PlatformIO does not support debugging for `{0}`.\n"
"Please request support at https://github.com/platformio/"
"platformio-core/issues \nor visit -> https://docs.platformio.org"
"/page/plus/debugging.html")
"/page/plus/debugging.html"
)
class DebugInvalidOptions(PlatformioException):
@ -331,8 +354,10 @@ class DebugInvalidOptions(PlatformioException):
class TestDirNotExists(PlatformioException):
MESSAGE = "A test folder '{0}' does not exist.\nPlease create 'test' "\
"directory in project's root and put a test set.\n"\
"More details about Unit "\
"Testing: http://docs.platformio.org/page/plus/"\
"unit-testing.html"
MESSAGE = (
"A test folder '{0}' does not exist.\nPlease create 'test' "
"directory in project's root and put a test set.\n"
"More details about Unit "
"Testing: http://docs.platformio.org/page/plus/"
"unit-testing.html"
)

View File

@ -27,7 +27,6 @@ from platformio.compat import WINDOWS, get_file_contents, glob_escape
class cd(object):
def __init__(self, new_path):
self.new_path = new_path
self.prev_path = os.getcwd()
@ -65,10 +64,10 @@ def format_filesize(filesize):
if filesize < base:
return "%d%s" % (filesize, suffix)
for i, suffix in enumerate("KMGTPEZY"):
unit = base**(i + 2)
unit = base ** (i + 2)
if filesize >= unit:
continue
if filesize % (base**(i + 1)):
if filesize % (base ** (i + 1)):
return "%.2f%sB" % ((base * filesize / unit), suffix)
break
return "%d%sB" % ((base * filesize / unit), suffix)
@ -78,21 +77,24 @@ def ensure_udev_rules():
from platformio.util import get_systype
def _rules_to_set(rules_path):
return set(l.strip() for l in get_file_contents(rules_path).split("\n")
if l.strip() and not l.startswith("#"))
return set(
l.strip()
for l in get_file_contents(rules_path).split("\n")
if l.strip() and not l.startswith("#")
)
if "linux" not in get_systype():
return None
installed_rules = [
"/etc/udev/rules.d/99-platformio-udev.rules",
"/lib/udev/rules.d/99-platformio-udev.rules"
"/lib/udev/rules.d/99-platformio-udev.rules",
]
if not any(os.path.isfile(p) for p in installed_rules):
raise exception.MissedUdevRules
origin_path = os.path.abspath(
os.path.join(get_source_dir(), "..", "scripts",
"99-platformio-udev.rules"))
os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules")
)
if not os.path.isfile(origin_path):
return None
@ -117,7 +119,6 @@ def path_endswith_ext(path, extensions):
def match_src_files(src_dir, src_filter=None, src_exts=None):
def _append_build_item(items, item, src_dir):
if not src_exts or path_endswith_ext(item, src_exts):
items.add(item.replace(src_dir + os.sep, ""))
@ -135,8 +136,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None):
if os.path.isdir(item):
for root, _, files in os.walk(item, followlinks=True):
for f in files:
_append_build_item(items, os.path.join(root, f),
src_dir)
_append_build_item(items, os.path.join(root, f), src_dir)
else:
_append_build_item(items, item, src_dir)
if action == "+":
@ -153,7 +153,6 @@ def to_unix_path(path):
def rmtree(path):
def _onerror(func, path, __):
try:
st_mode = os.stat(path).st_mode
@ -161,9 +160,10 @@ def rmtree(path):
os.chmod(path, st_mode | stat.S_IWRITE)
func(path)
except Exception as e: # pylint: disable=broad-except
click.secho("%s \nPlease manually remove the file `%s`" %
(str(e), path),
fg="red",
err=True)
click.secho(
"%s \nPlease manually remove the file `%s`" % (str(e), path),
fg="red",
err=True,
)
return shutil.rmtree(path, onerror=_onerror)

View File

@ -23,17 +23,17 @@ from platformio import fs, util
from platformio.compat import get_file_contents
from platformio.proc import where_is_program
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (get_project_lib_dir,
get_project_libdeps_dir,
get_project_src_dir,
load_project_ide_data)
from platformio.project.helpers import (
get_project_lib_dir,
get_project_libdeps_dir,
get_project_src_dir,
load_project_ide_data,
)
class ProjectGenerator(object):
def __init__(self, project_dir, ide, boards):
self.config = ProjectConfig.get_instance(
join(project_dir, "platformio.ini"))
self.config = ProjectConfig.get_instance(join(project_dir, "platformio.ini"))
self.config.validate()
self.project_dir = project_dir
self.ide = str(ide)
@ -42,8 +42,7 @@ class ProjectGenerator(object):
@staticmethod
def get_supported_ides():
tpls_dir = join(fs.get_source_dir(), "ide", "tpls")
return sorted(
[d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
return sorted([d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
def get_best_envname(self, boards=None):
envname = None
@ -72,28 +71,29 @@ class ProjectGenerator(object):
"project_dir": self.project_dir,
"env_name": self.env_name,
"user_home_dir": abspath(expanduser("~")),
"platformio_path":
sys.argv[0] if isfile(sys.argv[0])
else where_is_program("platformio"),
"platformio_path": sys.argv[0]
if isfile(sys.argv[0])
else where_is_program("platformio"),
"env_path": os.getenv("PATH"),
"env_pathsep": os.pathsep
} # yapf: disable
"env_pathsep": os.pathsep,
} # yapf: disable
# default env configuration
tpl_vars.update(self.config.items(env=self.env_name, as_dict=True))
# build data
tpl_vars.update(
load_project_ide_data(self.project_dir, self.env_name) or {})
tpl_vars.update(load_project_ide_data(self.project_dir, self.env_name) or {})
with fs.cd(self.project_dir):
tpl_vars.update({
"src_files": self.get_src_files(),
"project_src_dir": get_project_src_dir(),
"project_lib_dir": get_project_lib_dir(),
"project_libdeps_dir": join(
get_project_libdeps_dir(), self.env_name)
}) # yapf: disable
tpl_vars.update(
{
"src_files": self.get_src_files(),
"project_src_dir": get_project_src_dir(),
"project_lib_dir": get_project_lib_dir(),
"project_libdeps_dir": join(
get_project_libdeps_dir(), self.env_name
),
}
) # yapf: disable
for key, value in tpl_vars.items():
if key.endswith(("_path", "_dir")):
@ -103,7 +103,7 @@ class ProjectGenerator(object):
continue
tpl_vars[key] = [fs.to_unix_path(inc) for inc in tpl_vars[key]]
tpl_vars['to_unix_path'] = fs.to_unix_path
tpl_vars["to_unix_path"] = fs.to_unix_path
return tpl_vars
def get_src_files(self):

View File

@ -26,10 +26,12 @@ LOCKFILE_INTERFACE_MSVCRT = 2
try:
import fcntl
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_FCNTL
except ImportError:
try:
import msvcrt
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_MSVCRT
except ImportError:
LOCKFILE_CURRENT_INTERFACE = None
@ -40,7 +42,6 @@ class LockFileExists(Exception):
class LockFile(object):
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
self.timeout = timeout
self.delay = delay

View File

@ -49,12 +49,16 @@ def on_platformio_end(ctx, result): # pylint: disable=unused-argument
check_platformio_upgrade()
check_internal_updates(ctx, "platforms")
check_internal_updates(ctx, "libraries")
except (exception.InternetIsOffline, exception.GetLatestVersionError,
exception.APIRequestError):
except (
exception.InternetIsOffline,
exception.GetLatestVersionError,
exception.APIRequestError,
):
click.secho(
"Failed to check for PlatformIO upgrades. "
"Please check your Internet connection.",
fg="red")
fg="red",
)
def on_platformio_exception(e):
@ -78,15 +82,17 @@ def set_caller(caller=None):
class Upgrader(object):
def __init__(self, from_version, to_version):
self.from_version = semantic_version.Version.coerce(
util.pepver_to_semver(from_version))
util.pepver_to_semver(from_version)
)
self.to_version = semantic_version.Version.coerce(
util.pepver_to_semver(to_version))
util.pepver_to_semver(to_version)
)
self._upgraders = [(semantic_version.Version("3.5.0-a.2"),
self._update_dev_platforms)]
self._upgraders = [
(semantic_version.Version("3.5.0-a.2"), self._update_dev_platforms)
]
def run(self, ctx):
if self.from_version > self.to_version:
@ -114,19 +120,21 @@ def after_upgrade(ctx):
if last_version == "0.0.0":
app.set_state_item("last_version", __version__)
elif semantic_version.Version.coerce(util.pepver_to_semver(
last_version)) > semantic_version.Version.coerce(
util.pepver_to_semver(__version__)):
elif semantic_version.Version.coerce(
util.pepver_to_semver(last_version)
) > semantic_version.Version.coerce(util.pepver_to_semver(__version__)):
click.secho("*" * terminal_width, fg="yellow")
click.secho("Obsolete PIO Core v%s is used (previous was %s)" %
(__version__, last_version),
fg="yellow")
click.secho("Please remove multiple PIO Cores from a system:",
fg="yellow")
click.secho(
"Obsolete PIO Core v%s is used (previous was %s)"
% (__version__, last_version),
fg="yellow",
)
click.secho("Please remove multiple PIO Cores from a system:", fg="yellow")
click.secho(
"https://docs.platformio.org/page/faq.html"
"#multiple-pio-cores-in-a-system",
fg="cyan")
fg="cyan",
)
click.secho("*" * terminal_width, fg="yellow")
return
else:
@ -139,37 +147,53 @@ def after_upgrade(ctx):
u = Upgrader(last_version, __version__)
if u.run(ctx):
app.set_state_item("last_version", __version__)
click.secho("PlatformIO has been successfully upgraded to %s!\n" %
__version__,
fg="green")
telemetry.on_event(category="Auto",
action="Upgrade",
label="%s > %s" % (last_version, __version__))
click.secho(
"PlatformIO has been successfully upgraded to %s!\n" % __version__,
fg="green",
)
telemetry.on_event(
category="Auto",
action="Upgrade",
label="%s > %s" % (last_version, __version__),
)
else:
raise exception.UpgradeError("Auto upgrading...")
click.echo("")
# PlatformIO banner
click.echo("*" * terminal_width)
click.echo("If you like %s, please:" %
(click.style("PlatformIO", fg="cyan")))
click.echo("- %s us on Twitter to stay up-to-date "
"on the latest project news > %s" %
(click.style("follow", fg="cyan"),
click.style("https://twitter.com/PlatformIO_Org", fg="cyan")))
click.echo("If you like %s, please:" % (click.style("PlatformIO", fg="cyan")))
click.echo(
"- %s it on GitHub > %s" %
(click.style("star", fg="cyan"),
click.style("https://github.com/platformio/platformio", fg="cyan")))
"- %s us on Twitter to stay up-to-date "
"on the latest project news > %s"
% (
click.style("follow", fg="cyan"),
click.style("https://twitter.com/PlatformIO_Org", fg="cyan"),
)
)
click.echo(
"- %s it on GitHub > %s"
% (
click.style("star", fg="cyan"),
click.style("https://github.com/platformio/platformio", fg="cyan"),
)
)
if not getenv("PLATFORMIO_IDE"):
click.echo(
"- %s PlatformIO IDE for IoT development > %s" %
(click.style("try", fg="cyan"),
click.style("https://platformio.org/platformio-ide", fg="cyan")))
"- %s PlatformIO IDE for IoT development > %s"
% (
click.style("try", fg="cyan"),
click.style("https://platformio.org/platformio-ide", fg="cyan"),
)
)
if not is_ci():
click.echo("- %s us with PlatformIO Plus > %s" %
(click.style("support", fg="cyan"),
click.style("https://pioplus.com", fg="cyan")))
click.echo(
"- %s us with PlatformIO Plus > %s"
% (
click.style("support", fg="cyan"),
click.style("https://pioplus.com", fg="cyan"),
)
)
click.echo("*" * terminal_width)
click.echo("")
@ -181,7 +205,7 @@ def check_platformio_upgrade():
if (time() - interval) < last_check.get("platformio_upgrade", 0):
return
last_check['platformio_upgrade'] = int(time())
last_check["platformio_upgrade"] = int(time())
app.set_state_item("last_check", last_check)
util.internet_on(raise_exception=True)
@ -190,23 +214,23 @@ def check_platformio_upgrade():
update_core_packages(silent=True)
latest_version = get_latest_version()
if semantic_version.Version.coerce(util.pepver_to_semver(
latest_version)) <= semantic_version.Version.coerce(
util.pepver_to_semver(__version__)):
if semantic_version.Version.coerce(
util.pepver_to_semver(latest_version)
) <= semantic_version.Version.coerce(util.pepver_to_semver(__version__)):
return
terminal_width, _ = click.get_terminal_size()
click.echo("")
click.echo("*" * terminal_width)
click.secho("There is a new version %s of PlatformIO available.\n"
"Please upgrade it via `" % latest_version,
fg="yellow",
nl=False)
click.secho(
"There is a new version %s of PlatformIO available.\n"
"Please upgrade it via `" % latest_version,
fg="yellow",
nl=False,
)
if getenv("PLATFORMIO_IDE"):
click.secho("PlatformIO IDE Menu: Upgrade PlatformIO",
fg="cyan",
nl=False)
click.secho("PlatformIO IDE Menu: Upgrade PlatformIO", fg="cyan", nl=False)
click.secho("`.", fg="yellow")
elif join("Cellar", "platformio") in fs.get_source_dir():
click.secho("brew update && brew upgrade", fg="cyan", nl=False)
@ -217,8 +241,7 @@ def check_platformio_upgrade():
click.secho("pip install -U platformio", fg="cyan", nl=False)
click.secho("` command.", fg="yellow")
click.secho("Changes: ", fg="yellow", nl=False)
click.secho("https://docs.platformio.org/en/latest/history.html",
fg="cyan")
click.secho("https://docs.platformio.org/en/latest/history.html", fg="cyan")
click.echo("*" * terminal_width)
click.echo("")
@ -229,7 +252,7 @@ def check_internal_updates(ctx, what):
if (time() - interval) < last_check.get(what + "_update", 0):
return
last_check[what + '_update'] = int(time())
last_check[what + "_update"] = int(time())
app.set_state_item("last_check", last_check)
util.internet_on(raise_exception=True)
@ -237,15 +260,17 @@ def check_internal_updates(ctx, what):
pm = PlatformManager() if what == "platforms" else LibraryManager()
outdated_items = []
for manifest in pm.get_installed():
if manifest['name'] in outdated_items:
if manifest["name"] in outdated_items:
continue
conds = [
pm.outdated(manifest['__pkg_dir']), what == "platforms"
pm.outdated(manifest["__pkg_dir"]),
what == "platforms"
and PlatformFactory.newPlatform(
manifest['__pkg_dir']).are_outdated_packages()
manifest["__pkg_dir"]
).are_outdated_packages(),
]
if any(conds):
outdated_items.append(manifest['name'])
outdated_items.append(manifest["name"])
if not outdated_items:
return
@ -254,26 +279,32 @@ def check_internal_updates(ctx, what):
click.echo("")
click.echo("*" * terminal_width)
click.secho("There are the new updates for %s (%s)" %
(what, ", ".join(outdated_items)),
fg="yellow")
click.secho(
"There are the new updates for %s (%s)" % (what, ", ".join(outdated_items)),
fg="yellow",
)
if not app.get_setting("auto_update_" + what):
click.secho("Please update them via ", fg="yellow", nl=False)
click.secho("`platformio %s update`" %
("lib --global" if what == "libraries" else "platform"),
fg="cyan",
nl=False)
click.secho(
"`platformio %s update`"
% ("lib --global" if what == "libraries" else "platform"),
fg="cyan",
nl=False,
)
click.secho(" command.\n", fg="yellow")
click.secho(
"If you want to manually check for the new versions "
"without updating, please use ",
fg="yellow",
nl=False)
click.secho("`platformio %s update --dry-run`" %
("lib --global" if what == "libraries" else "platform"),
fg="cyan",
nl=False)
nl=False,
)
click.secho(
"`platformio %s update --dry-run`"
% ("lib --global" if what == "libraries" else "platform"),
fg="cyan",
nl=False,
)
click.secho(" command.", fg="yellow")
else:
click.secho("Please wait while updating %s ..." % what, fg="yellow")
@ -284,9 +315,7 @@ def check_internal_updates(ctx, what):
ctx.invoke(cmd_lib_update, libraries=outdated_items)
click.echo()
telemetry.on_event(category="Auto",
action="Update",
label=what.title())
telemetry.on_event(category="Auto", action="Update", label=what.title())
click.echo("*" * terminal_width)
click.echo("")

View File

@ -25,13 +25,12 @@ from platformio.project.helpers import get_project_packages_dir
CORE_PACKAGES = {
"contrib-piohome": "^2.3.2",
"contrib-pysite":
"~2.%d%d.190418" % (sys.version_info[0], sys.version_info[1]),
"contrib-pysite": "~2.%d%d.190418" % (sys.version_info[0], sys.version_info[1]),
"tool-pioplus": "^2.5.2",
"tool-unity": "~1.20403.0",
"tool-scons": "~2.20501.7" if PY2 else "~3.30101.0",
"tool-cppcheck": "~1.189.0",
"tool-clangtidy": "^1.80000.0"
"tool-clangtidy": "^1.80000.0",
}
PIOPLUS_AUTO_UPDATES_MAX = 100
@ -40,20 +39,19 @@ PIOPLUS_AUTO_UPDATES_MAX = 100
class CorePackageManager(PackageManager):
def __init__(self):
super(CorePackageManager, self).__init__(get_project_packages_dir(), [
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
"http%s://dl.platformio.org/packages/manifest.json" %
("" if sys.version_info < (2, 7, 9) else "s")
])
super(CorePackageManager, self).__init__(
get_project_packages_dir(),
[
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
"http%s://dl.platformio.org/packages/manifest.json"
% ("" if sys.version_info < (2, 7, 9) else "s"),
],
)
def install( # pylint: disable=keyword-arg-before-vararg
self,
name,
requirements=None,
*args,
**kwargs):
self, name, requirements=None, *args, **kwargs
):
PackageManager.install(self, name, requirements, *args, **kwargs)
self.cleanup_packages()
return self.get_package_dir(name, requirements)
@ -70,12 +68,12 @@ class CorePackageManager(PackageManager):
pkg_dir = self.get_package_dir(name, requirements)
if not pkg_dir:
continue
best_pkg_versions[name] = self.load_manifest(pkg_dir)['version']
best_pkg_versions[name] = self.load_manifest(pkg_dir)["version"]
for manifest in self.get_installed():
if manifest['name'] not in best_pkg_versions:
if manifest["name"] not in best_pkg_versions:
continue
if manifest['version'] != best_pkg_versions[manifest['name']]:
self.uninstall(manifest['__pkg_dir'], after_update=True)
if manifest["version"] != best_pkg_versions[manifest["name"]]:
self.uninstall(manifest["__pkg_dir"], after_update=True)
self.cache_reset()
return True
@ -104,6 +102,7 @@ def update_core_packages(only_check=False, silent=False):
def inject_contrib_pysite():
from site import addsitedir
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
if contrib_pysite_dir in sys.path:
return
@ -116,16 +115,18 @@ def pioplus_call(args, **kwargs):
raise exception.PlatformioException(
"PlatformIO Core Plus v%s does not run under Python version %s.\n"
"Minimum supported version is 2.7.6, please upgrade Python.\n"
"Python 3 is not yet supported.\n" % (__version__, sys.version))
"Python 3 is not yet supported.\n" % (__version__, sys.version)
)
pioplus_path = join(get_core_package_dir("tool-pioplus"), "pioplus")
pythonexe_path = get_pythonexe_path()
os.environ['PYTHONEXEPATH'] = pythonexe_path
os.environ['PYTHONPYSITEDIR'] = get_core_package_dir("contrib-pysite")
os.environ['PIOCOREPYSITEDIR'] = dirname(fs.get_source_dir() or "")
if dirname(pythonexe_path) not in os.environ['PATH'].split(os.pathsep):
os.environ['PATH'] = (os.pathsep).join(
[dirname(pythonexe_path), os.environ['PATH']])
os.environ["PYTHONEXEPATH"] = pythonexe_path
os.environ["PYTHONPYSITEDIR"] = get_core_package_dir("contrib-pysite")
os.environ["PIOCOREPYSITEDIR"] = dirname(fs.get_source_dir() or "")
if dirname(pythonexe_path) not in os.environ["PATH"].split(os.pathsep):
os.environ["PATH"] = (os.pathsep).join(
[dirname(pythonexe_path), os.environ["PATH"]]
)
copy_pythonpath_to_osenv()
code = subprocess.call([pioplus_path] + args, **kwargs)

View File

@ -41,10 +41,7 @@ class LibraryManager(BasePkgManager):
@property
def manifest_names(self):
return [
".library.json", "library.json", "library.properties",
"module.json"
]
return [".library.json", "library.json", "library.properties", "module.json"]
def get_manifest_path(self, pkg_dir):
path = BasePkgManager.get_manifest_path(self, pkg_dir)
@ -71,36 +68,37 @@ class LibraryManager(BasePkgManager):
# if Arduino library.properties
if "sentence" in manifest:
manifest['frameworks'] = ["arduino"]
manifest['description'] = manifest['sentence']
del manifest['sentence']
manifest["frameworks"] = ["arduino"]
manifest["description"] = manifest["sentence"]
del manifest["sentence"]
if "author" in manifest:
if isinstance(manifest['author'], dict):
manifest['authors'] = [manifest['author']]
if isinstance(manifest["author"], dict):
manifest["authors"] = [manifest["author"]]
else:
manifest['authors'] = [{"name": manifest['author']}]
del manifest['author']
manifest["authors"] = [{"name": manifest["author"]}]
del manifest["author"]
if "authors" in manifest and not isinstance(manifest['authors'], list):
manifest['authors'] = [manifest['authors']]
if "authors" in manifest and not isinstance(manifest["authors"], list):
manifest["authors"] = [manifest["authors"]]
if "keywords" not in manifest:
keywords = []
for keyword in re.split(r"[\s/]+",
manifest.get("category", "Uncategorized")):
for keyword in re.split(
r"[\s/]+", manifest.get("category", "Uncategorized")
):
keyword = keyword.strip()
if not keyword:
continue
keywords.append(keyword.lower())
manifest['keywords'] = keywords
manifest["keywords"] = keywords
if "category" in manifest:
del manifest['category']
del manifest["category"]
# don't replace VCS URL
if "url" in manifest and "description" in manifest:
manifest['homepage'] = manifest['url']
del manifest['url']
manifest["homepage"] = manifest["url"]
del manifest["url"]
if "architectures" in manifest:
platforms = []
@ -110,26 +108,23 @@ class LibraryManager(BasePkgManager):
"samd": "atmelsam",
"esp8266": "espressif8266",
"esp32": "espressif32",
"arc32": "intel_arc32"
"arc32": "intel_arc32",
}
for arch in manifest['architectures'].split(","):
for arch in manifest["architectures"].split(","):
arch = arch.strip()
if arch == "*":
platforms = "*"
break
if arch in platforms_map:
platforms.append(platforms_map[arch])
manifest['platforms'] = platforms
del manifest['architectures']
manifest["platforms"] = platforms
del manifest["architectures"]
# convert listed items via comma to array
for key in ("keywords", "frameworks", "platforms"):
if key not in manifest or \
not isinstance(manifest[key], string_types):
if key not in manifest or not isinstance(manifest[key], string_types):
continue
manifest[key] = [
i.strip() for i in manifest[key].split(",") if i.strip()
]
manifest[key] = [i.strip() for i in manifest[key].split(",") if i.strip()]
return manifest
@ -153,13 +148,10 @@ class LibraryManager(BasePkgManager):
if item[k] == "*":
del item[k]
elif isinstance(item[k], string_types):
item[k] = [
i.strip() for i in item[k].split(",") if i.strip()
]
item[k] = [i.strip() for i in item[k].split(",") if i.strip()]
return items
def max_satisfying_repo_version(self, versions, requirements=None):
def _cmp_dates(datestr1, datestr2):
date1 = util.parse_date(datestr1)
date2 = util.parse_date(datestr2)
@ -169,61 +161,66 @@ class LibraryManager(BasePkgManager):
semver_spec = None
try:
semver_spec = semantic_version.SimpleSpec(
requirements) if requirements else None
semver_spec = (
semantic_version.SimpleSpec(requirements) if requirements else None
)
except ValueError:
pass
item = {}
for v in versions:
semver_new = self.parse_semver_version(v['name'])
semver_new = self.parse_semver_version(v["name"])
if semver_spec:
if not semver_new or semver_new not in semver_spec:
continue
if not item or self.parse_semver_version(
item['name']) < semver_new:
if not item or self.parse_semver_version(item["name"]) < semver_new:
item = v
elif requirements:
if requirements == v['name']:
if requirements == v["name"]:
return v
else:
if not item or _cmp_dates(item['released'],
v['released']) == -1:
if not item or _cmp_dates(item["released"], v["released"]) == -1:
item = v
return item
def get_latest_repo_version(self, name, requirements, silent=False):
item = self.max_satisfying_repo_version(
util.get_api_result("/lib/info/%d" % self.search_lib_id(
{
"name": name,
"requirements": requirements
}, silent=silent),
cache_valid="1h")['versions'], requirements)
return item['name'] if item else None
util.get_api_result(
"/lib/info/%d"
% self.search_lib_id(
{"name": name, "requirements": requirements}, silent=silent
),
cache_valid="1h",
)["versions"],
requirements,
)
return item["name"] if item else None
def _install_from_piorepo(self, name, requirements):
assert name.startswith("id="), name
version = self.get_latest_repo_version(name, requirements)
if not version:
raise exception.UndefinedPackageVersion(requirements or "latest",
util.get_systype())
dl_data = util.get_api_result("/lib/download/" + str(name[3:]),
dict(version=version),
cache_valid="30d")
raise exception.UndefinedPackageVersion(
requirements or "latest", util.get_systype()
)
dl_data = util.get_api_result(
"/lib/download/" + str(name[3:]), dict(version=version), cache_valid="30d"
)
assert dl_data
return self._install_from_url(
name, dl_data['url'].replace("http://", "https://")
if app.get_setting("strict_ssl") else dl_data['url'], requirements)
name,
dl_data["url"].replace("http://", "https://")
if app.get_setting("strict_ssl")
else dl_data["url"],
requirements,
)
def search_lib_id( # pylint: disable=too-many-branches
self,
filters,
silent=False,
interactive=False):
self, filters, silent=False, interactive=False
):
assert isinstance(filters, dict)
assert "name" in filters
@ -234,8 +231,10 @@ class LibraryManager(BasePkgManager):
# looking in PIO Library Registry
if not silent:
click.echo("Looking for %s library in registry" %
click.style(filters['name'], fg="cyan"))
click.echo(
"Looking for %s library in registry"
% click.style(filters["name"], fg="cyan")
)
query = []
for key in filters:
if key not in ("name", "authors", "frameworks", "platforms"):
@ -244,25 +243,29 @@ class LibraryManager(BasePkgManager):
if not isinstance(values, list):
values = [v.strip() for v in values.split(",") if v]
for value in values:
query.append('%s:"%s"' %
(key[:-1] if key.endswith("s") else key, value))
query.append(
'%s:"%s"' % (key[:-1] if key.endswith("s") else key, value)
)
lib_info = None
result = util.get_api_result("/v2/lib/search",
dict(query=" ".join(query)),
cache_valid="1h")
if result['total'] == 1:
lib_info = result['items'][0]
elif result['total'] > 1:
result = util.get_api_result(
"/v2/lib/search", dict(query=" ".join(query)), cache_valid="1h"
)
if result["total"] == 1:
lib_info = result["items"][0]
elif result["total"] > 1:
if silent and not interactive:
lib_info = result['items'][0]
lib_info = result["items"][0]
else:
click.secho("Conflict: More than one library has been found "
"by request %s:" % json.dumps(filters),
fg="yellow",
err=True)
click.secho(
"Conflict: More than one library has been found "
"by request %s:" % json.dumps(filters),
fg="yellow",
err=True,
)
from platformio.commands.lib import print_lib_item
for item in result['items']:
for item in result["items"]:
print_lib_item(item)
if not interactive:
@ -270,36 +273,39 @@ class LibraryManager(BasePkgManager):
"Automatically chose the first available library "
"(use `--interactive` option to make a choice)",
fg="yellow",
err=True)
lib_info = result['items'][0]
err=True,
)
lib_info = result["items"][0]
else:
deplib_id = click.prompt("Please choose library ID",
type=click.Choice([
str(i['id'])
for i in result['items']
]))
for item in result['items']:
if item['id'] == int(deplib_id):
deplib_id = click.prompt(
"Please choose library ID",
type=click.Choice([str(i["id"]) for i in result["items"]]),
)
for item in result["items"]:
if item["id"] == int(deplib_id):
lib_info = item
break
if not lib_info:
if list(filters) == ["name"]:
raise exception.LibNotFound(filters['name'])
raise exception.LibNotFound(filters["name"])
raise exception.LibNotFound(str(filters))
if not silent:
click.echo("Found: %s" % click.style(
"https://platformio.org/lib/show/{id}/{name}".format(
**lib_info),
fg="blue"))
return int(lib_info['id'])
click.echo(
"Found: %s"
% click.style(
"https://platformio.org/lib/show/{id}/{name}".format(**lib_info),
fg="blue",
)
)
return int(lib_info["id"])
def _get_lib_id_from_installed(self, filters):
if filters['name'].startswith("id="):
return int(filters['name'][3:])
if filters["name"].startswith("id="):
return int(filters["name"][3:])
package_dir = self.get_package_dir(
filters['name'], filters.get("requirements",
filters.get("version")))
filters["name"], filters.get("requirements", filters.get("version"))
)
if not package_dir:
return None
manifest = self.load_manifest(package_dir)
@ -311,52 +317,55 @@ class LibraryManager(BasePkgManager):
continue
if key not in manifest:
return None
if not util.items_in_list(util.items_to_list(filters[key]),
util.items_to_list(manifest[key])):
if not util.items_in_list(
util.items_to_list(filters[key]), util.items_to_list(manifest[key])
):
return None
if "authors" in filters:
if "authors" not in manifest:
return None
manifest_authors = manifest['authors']
manifest_authors = manifest["authors"]
if not isinstance(manifest_authors, list):
manifest_authors = [manifest_authors]
manifest_authors = [
a['name'] for a in manifest_authors
a["name"]
for a in manifest_authors
if isinstance(a, dict) and "name" in a
]
filter_authors = filters['authors']
filter_authors = filters["authors"]
if not isinstance(filter_authors, list):
filter_authors = [filter_authors]
if not set(filter_authors) <= set(manifest_authors):
return None
return int(manifest['id'])
return int(manifest["id"])
def install( # pylint: disable=arguments-differ
self,
name,
requirements=None,
silent=False,
after_update=False,
interactive=False,
force=False):
self,
name,
requirements=None,
silent=False,
after_update=False,
interactive=False,
force=False,
):
_name, _requirements, _url = self.parse_pkg_uri(name, requirements)
if not _url:
name = "id=%d" % self.search_lib_id(
{
"name": _name,
"requirements": _requirements
},
{"name": _name, "requirements": _requirements},
silent=silent,
interactive=interactive)
interactive=interactive,
)
requirements = _requirements
pkg_dir = BasePkgManager.install(self,
name,
requirements,
silent=silent,
after_update=after_update,
force=force)
pkg_dir = BasePkgManager.install(
self,
name,
requirements,
silent=silent,
after_update=after_update,
force=force,
)
if not pkg_dir:
return None
@ -369,7 +378,7 @@ class LibraryManager(BasePkgManager):
click.secho("Installing dependencies", fg="yellow")
builtin_lib_storages = None
for filters in self.normalize_dependencies(manifest['dependencies']):
for filters in self.normalize_dependencies(manifest["dependencies"]):
assert "name" in filters
# avoid circle dependencies
@ -381,35 +390,42 @@ class LibraryManager(BasePkgManager):
self.INSTALL_HISTORY.append(history_key)
if any(s in filters.get("version", "") for s in ("\\", "/")):
self.install("{name}={version}".format(**filters),
silent=silent,
after_update=after_update,
interactive=interactive,
force=force)
self.install(
"{name}={version}".format(**filters),
silent=silent,
after_update=after_update,
interactive=interactive,
force=force,
)
else:
try:
lib_id = self.search_lib_id(filters, silent, interactive)
except exception.LibNotFound as e:
if builtin_lib_storages is None:
builtin_lib_storages = get_builtin_libs()
if not silent or is_builtin_lib(builtin_lib_storages,
filters['name']):
if not silent or is_builtin_lib(
builtin_lib_storages, filters["name"]
):
click.secho("Warning! %s" % e, fg="yellow")
continue
if filters.get("version"):
self.install(lib_id,
filters.get("version"),
silent=silent,
after_update=after_update,
interactive=interactive,
force=force)
self.install(
lib_id,
filters.get("version"),
silent=silent,
after_update=after_update,
interactive=interactive,
force=force,
)
else:
self.install(lib_id,
silent=silent,
after_update=after_update,
interactive=interactive,
force=force)
self.install(
lib_id,
silent=silent,
after_update=after_update,
interactive=interactive,
force=force,
)
return pkg_dir
@ -418,21 +434,23 @@ def get_builtin_libs(storage_names=None):
storage_names = storage_names or []
pm = PlatformManager()
for manifest in pm.get_installed():
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
p = PlatformFactory.newPlatform(manifest["__pkg_dir"])
for storage in p.get_lib_storages():
if storage_names and storage['name'] not in storage_names:
if storage_names and storage["name"] not in storage_names:
continue
lm = LibraryManager(storage['path'])
items.append({
"name": storage['name'],
"path": storage['path'],
"items": lm.get_installed()
})
lm = LibraryManager(storage["path"])
items.append(
{
"name": storage["name"],
"path": storage["path"],
"items": lm.get_installed(),
}
)
return items
def is_builtin_lib(storages, name):
for storage in storages or []:
if any(l.get("name") == name for l in storage['items']):
if any(l.get("name") == name for l in storage["items"]):
return True
return False

View File

@ -36,7 +36,6 @@ from platformio.vcsclient import VCSClientFactory
class PackageRepoIterator(object):
def __init__(self, package, repositories):
assert isinstance(repositories, list)
self.package = package
@ -87,8 +86,9 @@ class PkgRepoMixin(object):
item = None
reqspec = None
try:
reqspec = semantic_version.SimpleSpec(
requirements) if requirements else None
reqspec = (
semantic_version.SimpleSpec(requirements) if requirements else None
)
except ValueError:
pass
@ -99,33 +99,32 @@ class PkgRepoMixin(object):
# if PkgRepoMixin.PIO_VERSION not in requirements.SimpleSpec(
# v['engines']['platformio']):
# continue
specver = semantic_version.Version(v['version'])
specver = semantic_version.Version(v["version"])
if reqspec and specver not in reqspec:
continue
if not item or semantic_version.Version(item['version']) < specver:
if not item or semantic_version.Version(item["version"]) < specver:
item = v
return item
def get_latest_repo_version( # pylint: disable=unused-argument
self,
name,
requirements,
silent=False):
self, name, requirements, silent=False
):
version = None
for versions in PackageRepoIterator(name, self.repositories):
pkgdata = self.max_satisfying_repo_version(versions, requirements)
if not pkgdata:
continue
if not version or semantic_version.compare(pkgdata['version'],
version) == 1:
version = pkgdata['version']
if (
not version
or semantic_version.compare(pkgdata["version"], version) == 1
):
version = pkgdata["version"]
return version
def get_all_repo_versions(self, name):
result = []
for versions in PackageRepoIterator(name, self.repositories):
result.extend(
[semantic_version.Version(v['version']) for v in versions])
result.extend([semantic_version.Version(v["version"]) for v in versions])
return [str(v) for v in sorted(set(result))]
@ -154,7 +153,8 @@ class PkgInstallerMixin(object):
if result:
return result
result = [
join(src_dir, name) for name in sorted(os.listdir(src_dir))
join(src_dir, name)
for name in sorted(os.listdir(src_dir))
if isdir(join(src_dir, name))
]
self.cache_set(cache_key, result)
@ -189,14 +189,17 @@ class PkgInstallerMixin(object):
click.secho(
"Error: Please read http://bit.ly/package-manager-ioerror",
fg="red",
err=True)
err=True,
)
raise e
if sha1:
fd.verify(sha1)
dst_path = fd.get_filepath()
if not self.FILE_CACHE_VALID or getsize(
dst_path) > PkgInstallerMixin.FILE_CACHE_MAX_SIZE:
if (
not self.FILE_CACHE_VALID
or getsize(dst_path) > PkgInstallerMixin.FILE_CACHE_MAX_SIZE
):
return dst_path
with app.ContentCache() as cc:
@ -232,15 +235,15 @@ class PkgInstallerMixin(object):
return None
@staticmethod
def parse_pkg_uri( # pylint: disable=too-many-branches
text, requirements=None):
def parse_pkg_uri(text, requirements=None): # pylint: disable=too-many-branches
text = str(text)
name, url = None, None
# Parse requirements
req_conditions = [
"@" in text, not requirements, ":" not in text
or text.rfind("/") < text.rfind("@")
"@" in text,
not requirements,
":" not in text or text.rfind("/") < text.rfind("@"),
]
if all(req_conditions):
text, requirements = text.rsplit("@", 1)
@ -259,17 +262,16 @@ class PkgInstallerMixin(object):
elif "/" in text or "\\" in text:
git_conditions = [
# Handle GitHub URL (https://github.com/user/package)
text.startswith("https://github.com/") and not text.endswith(
(".zip", ".tar.gz")),
(text.split("#", 1)[0]
if "#" in text else text).endswith(".git")
text.startswith("https://github.com/")
and not text.endswith((".zip", ".tar.gz")),
(text.split("#", 1)[0] if "#" in text else text).endswith(".git"),
]
hg_conditions = [
# Handle Developer Mbed URL
# (https://developer.mbed.org/users/user/code/package/)
# (https://os.mbed.com/users/user/code/package/)
text.startswith("https://developer.mbed.org"),
text.startswith("https://os.mbed.com")
text.startswith("https://os.mbed.com"),
]
if any(git_conditions):
url = "git+" + text
@ -296,9 +298,9 @@ class PkgInstallerMixin(object):
@staticmethod
def get_install_dirname(manifest):
name = re.sub(r"[^\da-z\_\-\. ]", "_", manifest['name'], flags=re.I)
name = re.sub(r"[^\da-z\_\-\. ]", "_", manifest["name"], flags=re.I)
if "id" in manifest:
name += "_ID%d" % manifest['id']
name += "_ID%d" % manifest["id"]
return str(name)
@classmethod
@ -322,8 +324,7 @@ class PkgInstallerMixin(object):
return None
def manifest_exists(self, pkg_dir):
return self.get_manifest_path(pkg_dir) or \
self.get_src_manifest_path(pkg_dir)
return self.get_manifest_path(pkg_dir) or self.get_src_manifest_path(pkg_dir)
def load_manifest(self, pkg_dir):
cache_key = "load_manifest-%s" % pkg_dir
@ -353,19 +354,19 @@ class PkgInstallerMixin(object):
if src_manifest:
if "version" in src_manifest:
manifest['version'] = src_manifest['version']
manifest['__src_url'] = src_manifest['url']
manifest["version"] = src_manifest["version"]
manifest["__src_url"] = src_manifest["url"]
# handle a custom package name
autogen_name = self.parse_pkg_uri(manifest['__src_url'])[0]
if "name" not in manifest or autogen_name != src_manifest['name']:
manifest['name'] = src_manifest['name']
autogen_name = self.parse_pkg_uri(manifest["__src_url"])[0]
if "name" not in manifest or autogen_name != src_manifest["name"]:
manifest["name"] = src_manifest["name"]
if "name" not in manifest:
manifest['name'] = basename(pkg_dir)
manifest["name"] = basename(pkg_dir)
if "version" not in manifest:
manifest['version'] = "0.0.0"
manifest["version"] = "0.0.0"
manifest['__pkg_dir'] = pkg_dir
manifest["__pkg_dir"] = pkg_dir
self.cache_set(cache_key, manifest)
return manifest
@ -390,25 +391,24 @@ class PkgInstallerMixin(object):
continue
elif pkg_id and manifest.get("id") != pkg_id:
continue
elif not pkg_id and manifest['name'] != name:
elif not pkg_id and manifest["name"] != name:
continue
elif not PkgRepoMixin.is_system_compatible(manifest.get("system")):
continue
# strict version or VCS HASH
if requirements and requirements == manifest['version']:
if requirements and requirements == manifest["version"]:
return manifest
try:
if requirements and not semantic_version.SimpleSpec(
requirements).match(
self.parse_semver_version(manifest['version'],
raise_exception=True)):
if requirements and not semantic_version.SimpleSpec(requirements).match(
self.parse_semver_version(manifest["version"], raise_exception=True)
):
continue
elif not best or (self.parse_semver_version(
manifest['version'], raise_exception=True) >
self.parse_semver_version(
best['version'], raise_exception=True)):
elif not best or (
self.parse_semver_version(manifest["version"], raise_exception=True)
> self.parse_semver_version(best["version"], raise_exception=True)
):
best = manifest
except ValueError:
pass
@ -417,12 +417,15 @@ class PkgInstallerMixin(object):
def get_package_dir(self, name, requirements=None, url=None):
manifest = self.get_package(name, requirements, url)
return manifest.get("__pkg_dir") if manifest and isdir(
manifest.get("__pkg_dir")) else None
return (
manifest.get("__pkg_dir")
if manifest and isdir(manifest.get("__pkg_dir"))
else None
)
def get_package_by_dir(self, pkg_dir):
for manifest in self.get_installed():
if manifest['__pkg_dir'] == abspath(pkg_dir):
if manifest["__pkg_dir"] == abspath(pkg_dir):
return manifest
return None
@ -443,9 +446,9 @@ class PkgInstallerMixin(object):
if not pkgdata:
continue
try:
pkg_dir = self._install_from_url(name, pkgdata['url'],
requirements,
pkgdata.get("sha1"))
pkg_dir = self._install_from_url(
name, pkgdata["url"], requirements, pkgdata.get("sha1")
)
break
except Exception as e: # pylint: disable=broad-except
click.secho("Warning! Package Mirror: %s" % e, fg="yellow")
@ -455,16 +458,12 @@ class PkgInstallerMixin(object):
util.internet_on(raise_exception=True)
raise exception.UnknownPackage(name)
if not pkgdata:
raise exception.UndefinedPackageVersion(requirements or "latest",
util.get_systype())
raise exception.UndefinedPackageVersion(
requirements or "latest", util.get_systype()
)
return pkg_dir
def _install_from_url(self,
name,
url,
requirements=None,
sha1=None,
track=False):
def _install_from_url(self, name, url, requirements=None, sha1=None, track=False):
tmp_dir = mkdtemp("-package", self.TMP_FOLDER_PREFIX, self.package_dir)
src_manifest_dir = None
src_manifest = {"name": name, "url": url, "requirements": requirements}
@ -486,7 +485,7 @@ class PkgInstallerMixin(object):
vcs = VCSClientFactory.newClient(tmp_dir, url)
assert vcs.export()
src_manifest_dir = vcs.storage_dir
src_manifest['version'] = vcs.get_current_revision()
src_manifest["version"] = vcs.get_current_revision()
_tmp_dir = tmp_dir
if not src_manifest_dir:
@ -515,7 +514,8 @@ class PkgInstallerMixin(object):
json.dump(_data, fp)
def _install_from_tmp_dir( # pylint: disable=too-many-branches
self, tmp_dir, requirements=None):
self, tmp_dir, requirements=None
):
tmp_manifest = self.load_manifest(tmp_dir)
assert set(["name", "version"]) <= set(tmp_manifest)
@ -523,28 +523,30 @@ class PkgInstallerMixin(object):
pkg_dir = join(self.package_dir, pkg_dirname)
cur_manifest = self.load_manifest(pkg_dir)
tmp_semver = self.parse_semver_version(tmp_manifest['version'])
tmp_semver = self.parse_semver_version(tmp_manifest["version"])
cur_semver = None
if cur_manifest:
cur_semver = self.parse_semver_version(cur_manifest['version'])
cur_semver = self.parse_semver_version(cur_manifest["version"])
# package should satisfy requirements
if requirements:
mismatch_error = (
"Package version %s doesn't satisfy requirements %s" %
(tmp_manifest['version'], requirements))
mismatch_error = "Package version %s doesn't satisfy requirements %s" % (
tmp_manifest["version"],
requirements,
)
try:
assert tmp_semver and tmp_semver in semantic_version.SimpleSpec(
requirements), mismatch_error
requirements
), mismatch_error
except (AssertionError, ValueError):
assert tmp_manifest['version'] == requirements, mismatch_error
assert tmp_manifest["version"] == requirements, mismatch_error
# check if package already exists
if cur_manifest:
# 0-overwrite, 1-rename, 2-fix to a version
action = 0
if "__src_url" in cur_manifest:
if cur_manifest['__src_url'] != tmp_manifest.get("__src_url"):
if cur_manifest["__src_url"] != tmp_manifest.get("__src_url"):
action = 1
elif "__src_url" in tmp_manifest:
action = 2
@ -556,25 +558,25 @@ class PkgInstallerMixin(object):
# rename
if action == 1:
target_dirname = "%s@%s" % (pkg_dirname,
cur_manifest['version'])
target_dirname = "%s@%s" % (pkg_dirname, cur_manifest["version"])
if "__src_url" in cur_manifest:
target_dirname = "%s@src-%s" % (
pkg_dirname,
hashlib.md5(
hashlib_encode_data(
cur_manifest['__src_url'])).hexdigest())
hashlib_encode_data(cur_manifest["__src_url"])
).hexdigest(),
)
shutil.move(pkg_dir, join(self.package_dir, target_dirname))
# fix to a version
elif action == 2:
target_dirname = "%s@%s" % (pkg_dirname,
tmp_manifest['version'])
target_dirname = "%s@%s" % (pkg_dirname, tmp_manifest["version"])
if "__src_url" in tmp_manifest:
target_dirname = "%s@src-%s" % (
pkg_dirname,
hashlib.md5(
hashlib_encode_data(
tmp_manifest['__src_url'])).hexdigest())
hashlib_encode_data(tmp_manifest["__src_url"])
).hexdigest(),
)
pkg_dir = join(self.package_dir, target_dirname)
# remove previous/not-satisfied package
@ -622,9 +624,9 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
if "__src_url" in manifest:
try:
vcs = VCSClientFactory.newClient(pkg_dir,
manifest['__src_url'],
silent=True)
vcs = VCSClientFactory.newClient(
pkg_dir, manifest["__src_url"], silent=True
)
except (AttributeError, exception.PlatformioException):
return None
if not vcs.can_be_updated:
@ -633,10 +635,10 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
else:
try:
latest = self.get_latest_repo_version(
"id=%d" %
manifest['id'] if "id" in manifest else manifest['name'],
"id=%d" % manifest["id"] if "id" in manifest else manifest["name"],
requirements,
silent=True)
silent=True,
)
except (exception.PlatformioException, ValueError):
return None
@ -646,21 +648,17 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
up_to_date = False
try:
assert "__src_url" not in manifest
up_to_date = (self.parse_semver_version(manifest['version'],
raise_exception=True) >=
self.parse_semver_version(latest,
raise_exception=True))
up_to_date = self.parse_semver_version(
manifest["version"], raise_exception=True
) >= self.parse_semver_version(latest, raise_exception=True)
except (AssertionError, ValueError):
up_to_date = latest == manifest['version']
up_to_date = latest == manifest["version"]
return False if up_to_date else latest
def install(self,
name,
requirements=None,
silent=False,
after_update=False,
force=False):
def install(
self, name, requirements=None, silent=False, after_update=False, force=False
):
pkg_dir = None
# interprocess lock
with LockFile(self.package_dir):
@ -690,34 +688,38 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
if not silent:
click.secho(
"{name} @ {version} is already installed".format(
**self.load_manifest(package_dir)),
fg="yellow")
**self.load_manifest(package_dir)
),
fg="yellow",
)
return package_dir
if url:
pkg_dir = self._install_from_url(name,
url,
requirements,
track=True)
pkg_dir = self._install_from_url(name, url, requirements, track=True)
else:
pkg_dir = self._install_from_piorepo(name, requirements)
if not pkg_dir or not self.manifest_exists(pkg_dir):
raise exception.PackageInstallError(name, requirements or "*",
util.get_systype())
raise exception.PackageInstallError(
name, requirements or "*", util.get_systype()
)
manifest = self.load_manifest(pkg_dir)
assert manifest
if not after_update:
telemetry.on_event(category=self.__class__.__name__,
action="Install",
label=manifest['name'])
telemetry.on_event(
category=self.__class__.__name__,
action="Install",
label=manifest["name"],
)
click.secho(
"{name} @ {version} has been successfully installed!".format(
**manifest),
fg="green")
**manifest
),
fg="green",
)
return pkg_dir
@ -729,18 +731,20 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
if isdir(package) and self.get_package_by_dir(package):
pkg_dir = package
else:
name, requirements, url = self.parse_pkg_uri(
package, requirements)
name, requirements, url = self.parse_pkg_uri(package, requirements)
pkg_dir = self.get_package_dir(name, requirements, url)
if not pkg_dir:
raise exception.UnknownPackage("%s @ %s" %
(package, requirements or "*"))
raise exception.UnknownPackage(
"%s @ %s" % (package, requirements or "*")
)
manifest = self.load_manifest(pkg_dir)
click.echo("Uninstalling %s @ %s: \t" % (click.style(
manifest['name'], fg="cyan"), manifest['version']),
nl=False)
click.echo(
"Uninstalling %s @ %s: \t"
% (click.style(manifest["name"], fg="cyan"), manifest["version"]),
nl=False,
)
if islink(pkg_dir):
os.unlink(pkg_dir)
@ -749,19 +753,21 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
self.cache_reset()
# unfix package with the same name
pkg_dir = self.get_package_dir(manifest['name'])
pkg_dir = self.get_package_dir(manifest["name"])
if pkg_dir and "@" in pkg_dir:
shutil.move(
pkg_dir,
join(self.package_dir, self.get_install_dirname(manifest)))
pkg_dir, join(self.package_dir, self.get_install_dirname(manifest))
)
self.cache_reset()
click.echo("[%s]" % click.style("OK", fg="green"))
if not after_update:
telemetry.on_event(category=self.__class__.__name__,
action="Uninstall",
label=manifest['name'])
telemetry.on_event(
category=self.__class__.__name__,
action="Uninstall",
label=manifest["name"],
)
return True
@ -773,16 +779,19 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
pkg_dir = self.get_package_dir(*self.parse_pkg_uri(package))
if not pkg_dir:
raise exception.UnknownPackage("%s @ %s" %
(package, requirements or "*"))
raise exception.UnknownPackage("%s @ %s" % (package, requirements or "*"))
manifest = self.load_manifest(pkg_dir)
name = manifest['name']
name = manifest["name"]
click.echo("{} {:<40} @ {:<15}".format(
"Checking" if only_check else "Updating",
click.style(manifest['name'], fg="cyan"), manifest['version']),
nl=False)
click.echo(
"{} {:<40} @ {:<15}".format(
"Checking" if only_check else "Updating",
click.style(manifest["name"], fg="cyan"),
manifest["version"],
),
nl=False,
)
if not util.internet_on():
click.echo("[%s]" % (click.style("Off-line", fg="yellow")))
return None
@ -799,22 +808,22 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
return True
if "__src_url" in manifest:
vcs = VCSClientFactory.newClient(pkg_dir, manifest['__src_url'])
vcs = VCSClientFactory.newClient(pkg_dir, manifest["__src_url"])
assert vcs.update()
self._update_src_manifest(dict(version=vcs.get_current_revision()),
vcs.storage_dir)
self._update_src_manifest(
dict(version=vcs.get_current_revision()), vcs.storage_dir
)
else:
self.uninstall(pkg_dir, after_update=True)
self.install(name, latest, after_update=True)
telemetry.on_event(category=self.__class__.__name__,
action="Update",
label=manifest['name'])
telemetry.on_event(
category=self.__class__.__name__, action="Update", label=manifest["name"]
)
return True
class PackageManager(BasePkgManager):
@property
def manifest_names(self):
return ["package.json"]

View File

@ -26,13 +26,19 @@ from platformio import __version__, app, exception, fs, util
from platformio.compat import PY2, hashlib_encode_data, is_bytes
from platformio.managers.core import get_core_package_dir
from platformio.managers.package import BasePkgManager, PackageManager
from platformio.proc import (BuildAsyncPipe, copy_pythonpath_to_osenv,
exec_command, get_pythonexe_path)
from platformio.proc import (
BuildAsyncPipe,
copy_pythonpath_to_osenv,
exec_command,
get_pythonexe_path,
)
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (get_project_boards_dir,
get_project_core_dir,
get_project_packages_dir,
get_project_platforms_dir)
from platformio.project.helpers import (
get_project_boards_dir,
get_project_core_dir,
get_project_packages_dir,
get_project_platforms_dir,
)
try:
from urllib.parse import quote
@ -41,16 +47,17 @@ except ImportError:
class PlatformManager(BasePkgManager):
def __init__(self, package_dir=None, repositories=None):
if not repositories:
repositories = [
"https://dl.bintray.com/platformio/dl-platforms/manifest.json",
"{0}://dl.platformio.org/platforms/manifest.json".format(
"https" if app.get_setting("strict_ssl") else "http")
"https" if app.get_setting("strict_ssl") else "http"
),
]
BasePkgManager.__init__(self, package_dir
or get_project_platforms_dir(), repositories)
BasePkgManager.__init__(
self, package_dir or get_project_platforms_dir(), repositories
)
@property
def manifest_names(self):
@ -65,21 +72,21 @@ class PlatformManager(BasePkgManager):
return manifest_path
return None
def install(self,
name,
requirements=None,
with_packages=None,
without_packages=None,
skip_default_package=False,
after_update=False,
silent=False,
force=False,
**_): # pylint: disable=too-many-arguments, arguments-differ
platform_dir = BasePkgManager.install(self,
name,
requirements,
silent=silent,
force=force)
def install(
self,
name,
requirements=None,
with_packages=None,
without_packages=None,
skip_default_package=False,
after_update=False,
silent=False,
force=False,
**_
): # pylint: disable=too-many-arguments, arguments-differ
platform_dir = BasePkgManager.install(
self, name, requirements, silent=silent, force=force
)
p = PlatformFactory.newPlatform(platform_dir)
# don't cleanup packages or install them after update
@ -87,11 +94,13 @@ class PlatformManager(BasePkgManager):
if after_update:
return True
p.install_packages(with_packages,
without_packages,
skip_default_package,
silent=silent,
force=force)
p.install_packages(
with_packages,
without_packages,
skip_default_package,
silent=silent,
force=force,
)
return self.cleanup_packages(list(p.packages))
def uninstall(self, package, requirements=None, after_update=False):
@ -115,11 +124,8 @@ class PlatformManager(BasePkgManager):
return self.cleanup_packages(list(p.packages))
def update( # pylint: disable=arguments-differ
self,
package,
requirements=None,
only_check=False,
only_packages=False):
self, package, requirements=None, only_check=False, only_packages=False
):
if isdir(package):
pkg_dir = package
else:
@ -143,8 +149,9 @@ class PlatformManager(BasePkgManager):
self.cleanup_packages(list(p.packages))
if missed_pkgs:
p.install_packages(with_packages=list(missed_pkgs),
skip_default_package=True)
p.install_packages(
with_packages=list(missed_pkgs), skip_default_package=True
)
return True
@ -152,20 +159,22 @@ class PlatformManager(BasePkgManager):
self.cache_reset()
deppkgs = {}
for manifest in PlatformManager().get_installed():
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
p = PlatformFactory.newPlatform(manifest["__pkg_dir"])
for pkgname, pkgmanifest in p.get_installed_packages().items():
if pkgname not in deppkgs:
deppkgs[pkgname] = set()
deppkgs[pkgname].add(pkgmanifest['version'])
deppkgs[pkgname].add(pkgmanifest["version"])
pm = PackageManager(get_project_packages_dir())
for manifest in pm.get_installed():
if manifest['name'] not in names:
if manifest["name"] not in names:
continue
if (manifest['name'] not in deppkgs
or manifest['version'] not in deppkgs[manifest['name']]):
if (
manifest["name"] not in deppkgs
or manifest["version"] not in deppkgs[manifest["name"]]
):
try:
pm.uninstall(manifest['__pkg_dir'], after_update=True)
pm.uninstall(manifest["__pkg_dir"], after_update=True)
except exception.UnknownPackage:
pass
@ -176,7 +185,7 @@ class PlatformManager(BasePkgManager):
def get_installed_boards(self):
boards = []
for manifest in self.get_installed():
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
p = PlatformFactory.newPlatform(manifest["__pkg_dir"])
for config in p.get_boards().values():
board = config.get_brief_data()
if board not in boards:
@ -189,30 +198,31 @@ class PlatformManager(BasePkgManager):
def get_all_boards(self):
boards = self.get_installed_boards()
know_boards = ["%s:%s" % (b['platform'], b['id']) for b in boards]
know_boards = ["%s:%s" % (b["platform"], b["id"]) for b in boards]
try:
for board in self.get_registered_boards():
key = "%s:%s" % (board['platform'], board['id'])
key = "%s:%s" % (board["platform"], board["id"])
if key not in know_boards:
boards.append(board)
except (exception.APIRequestError, exception.InternetIsOffline):
pass
return sorted(boards, key=lambda b: b['name'])
return sorted(boards, key=lambda b: b["name"])
def board_config(self, id_, platform=None):
for manifest in self.get_installed_boards():
if manifest['id'] == id_ and (not platform
or manifest['platform'] == platform):
if manifest["id"] == id_ and (
not platform or manifest["platform"] == platform
):
return manifest
for manifest in self.get_registered_boards():
if manifest['id'] == id_ and (not platform
or manifest['platform'] == platform):
if manifest["id"] == id_ and (
not platform or manifest["platform"] == platform
):
return manifest
raise exception.UnknownBoard(id_)
class PlatformFactory(object):
@staticmethod
def get_clsname(name):
name = re.sub(r"[^\da-z\_]+", "", name, flags=re.I)
@ -222,8 +232,7 @@ class PlatformFactory(object):
def load_module(name, path):
module = None
try:
module = load_source("platformio.managers.platform.%s" % name,
path)
module = load_source("platformio.managers.platform.%s" % name, path)
except ImportError:
raise exception.UnknownPlatform(name)
return module
@ -234,28 +243,29 @@ class PlatformFactory(object):
platform_dir = None
if isdir(name):
platform_dir = name
name = pm.load_manifest(platform_dir)['name']
name = pm.load_manifest(platform_dir)["name"]
elif name.endswith("platform.json") and isfile(name):
platform_dir = dirname(name)
name = fs.load_json(name)['name']
name = fs.load_json(name)["name"]
else:
name, requirements, url = pm.parse_pkg_uri(name, requirements)
platform_dir = pm.get_package_dir(name, requirements, url)
if platform_dir:
name = pm.load_manifest(platform_dir)['name']
name = pm.load_manifest(platform_dir)["name"]
if not platform_dir:
raise exception.UnknownPlatform(
name if not requirements else "%s@%s" % (name, requirements))
name if not requirements else "%s@%s" % (name, requirements)
)
platform_cls = None
if isfile(join(platform_dir, "platform.py")):
platform_cls = getattr(
cls.load_module(name, join(platform_dir, "platform.py")),
cls.get_clsname(name))
cls.get_clsname(name),
)
else:
platform_cls = type(str(cls.get_clsname(name)), (PlatformBase, ),
{})
platform_cls = type(str(cls.get_clsname(name)), (PlatformBase,), {})
_instance = platform_cls(join(platform_dir, "platform.json"))
assert isinstance(_instance, PlatformBase)
@ -263,14 +273,14 @@ class PlatformFactory(object):
class PlatformPackagesMixin(object):
def install_packages( # pylint: disable=too-many-arguments
self,
with_packages=None,
without_packages=None,
skip_default_package=False,
silent=False,
force=False):
self,
with_packages=None,
without_packages=None,
skip_default_package=False,
silent=False,
force=False,
):
with_packages = set(self.find_pkg_names(with_packages or []))
without_packages = set(self.find_pkg_names(without_packages or []))
@ -283,12 +293,13 @@ class PlatformPackagesMixin(object):
version = opts.get("version", "")
if name in without_packages:
continue
elif (name in with_packages or
not (skip_default_package or opts.get("optional", False))):
elif name in with_packages or not (
skip_default_package or opts.get("optional", False)
):
if ":" in version:
self.pm.install("%s=%s" % (name, version),
silent=silent,
force=force)
self.pm.install(
"%s=%s" % (name, version), silent=silent, force=force
)
else:
self.pm.install(name, version, silent=silent, force=force)
@ -305,9 +316,12 @@ class PlatformPackagesMixin(object):
result.append(_name)
found = True
if (self.frameworks and candidate.startswith("framework-")
and candidate[10:] in self.frameworks):
result.append(self.frameworks[candidate[10:]]['package'])
if (
self.frameworks
and candidate.startswith("framework-")
and candidate[10:] in self.frameworks
):
result.append(self.frameworks[candidate[10:]]["package"])
found = True
if not found:
@ -320,7 +334,7 @@ class PlatformPackagesMixin(object):
requirements = self.packages[name].get("version", "")
if ":" in requirements:
_, requirements, __ = self.pm.parse_pkg_uri(requirements)
self.pm.update(manifest['__pkg_dir'], requirements, only_check)
self.pm.update(manifest["__pkg_dir"], requirements, only_check)
def get_installed_packages(self):
items = {}
@ -335,7 +349,7 @@ class PlatformPackagesMixin(object):
requirements = self.packages[name].get("version", "")
if ":" in requirements:
_, requirements, __ = self.pm.parse_pkg_uri(requirements)
if self.pm.outdated(manifest['__pkg_dir'], requirements):
if self.pm.outdated(manifest["__pkg_dir"], requirements):
return True
return False
@ -343,7 +357,8 @@ class PlatformPackagesMixin(object):
version = self.packages[name].get("version", "")
if ":" in version:
return self.pm.get_package_dir(
*self.pm.parse_pkg_uri("%s=%s" % (name, version)))
*self.pm.parse_pkg_uri("%s=%s" % (name, version))
)
return self.pm.get_package_dir(name, version)
def get_package_version(self, name):
@ -368,15 +383,16 @@ class PlatformRunMixin(object):
return value.decode() if is_bytes(value) else value
def run( # pylint: disable=too-many-arguments
self, variables, targets, silent, verbose, jobs):
self, variables, targets, silent, verbose, jobs
):
assert isinstance(variables, dict)
assert isinstance(targets, list)
config = ProjectConfig.get_instance(variables['project_config'])
options = config.items(env=variables['pioenv'], as_dict=True)
config = ProjectConfig.get_instance(variables["project_config"])
options = config.items(env=variables["pioenv"], as_dict=True)
if "framework" in options:
# support PIO Core 3.0 dev/platforms
options['pioframework'] = options['framework']
options["pioframework"] = options["framework"]
self.configure_default_packages(options, targets)
self.install_packages(silent=True)
@ -386,12 +402,12 @@ class PlatformRunMixin(object):
if "clean" in targets:
targets = ["-c", "."]
variables['platform_manifest'] = self.manifest_path
variables["platform_manifest"] = self.manifest_path
if "build_script" not in variables:
variables['build_script'] = self.get_build_script()
if not isfile(variables['build_script']):
raise exception.BuildScriptNotFound(variables['build_script'])
variables["build_script"] = self.get_build_script()
if not isfile(variables["build_script"]):
raise exception.BuildScriptNotFound(variables["build_script"])
result = self._run_scons(variables, targets, jobs)
assert "returncode" in result
@ -402,14 +418,16 @@ class PlatformRunMixin(object):
args = [
get_pythonexe_path(),
join(get_core_package_dir("tool-scons"), "script", "scons"),
"-Q", "--warn=no-no-parallel-support",
"--jobs", str(jobs),
"--sconstruct", join(fs.get_source_dir(), "builder", "main.py")
"-Q",
"--warn=no-no-parallel-support",
"--jobs",
str(jobs),
"--sconstruct",
join(fs.get_source_dir(), "builder", "main.py"),
] # yapf: disable
args.append("PIOVERBOSE=%d" % (1 if self.verbose else 0))
# pylint: disable=protected-access
args.append("ISATTY=%d" %
(1 if click._compat.isatty(sys.stdout) else 0))
args.append("ISATTY=%d" % (1 if click._compat.isatty(sys.stdout) else 0))
args += targets
# encode and append variables
@ -428,10 +446,13 @@ class PlatformRunMixin(object):
args,
stdout=BuildAsyncPipe(
line_callback=self._on_stdout_line,
data_callback=lambda data: _write_and_flush(sys.stdout, data)),
data_callback=lambda data: _write_and_flush(sys.stdout, data),
),
stderr=BuildAsyncPipe(
line_callback=self._on_stderr_line,
data_callback=lambda data: _write_and_flush(sys.stderr, data)))
data_callback=lambda data: _write_and_flush(sys.stderr, data),
),
)
return result
def _on_stdout_line(self, line):
@ -447,7 +468,7 @@ class PlatformRunMixin(object):
b_pos = line.rfind(": No such file or directory")
if a_pos == -1 or b_pos == -1:
return
self._echo_missed_dependency(line[a_pos + 12:b_pos].strip())
self._echo_missed_dependency(line[a_pos + 12 : b_pos].strip())
def _echo_line(self, line, level):
if line.startswith("scons: "):
@ -472,18 +493,22 @@ class PlatformRunMixin(object):
* Web > {link}
*
{dots}
""".format(filename=filename,
filename_styled=click.style(filename, fg="cyan"),
link=click.style(
"https://platformio.org/lib/search?query=header:%s" %
quote(filename, safe=""),
fg="blue"),
dots="*" * (56 + len(filename)))
""".format(
filename=filename,
filename_styled=click.style(filename, fg="cyan"),
link=click.style(
"https://platformio.org/lib/search?query=header:%s"
% quote(filename, safe=""),
fg="blue",
),
dots="*" * (56 + len(filename)),
)
click.echo(banner, err=True)
class PlatformBase( # pylint: disable=too-many-public-methods
PlatformPackagesMixin, PlatformRunMixin):
PlatformPackagesMixin, PlatformRunMixin
):
PIO_VERSION = semantic_version.Version(util.pepver_to_semver(__version__))
_BOARDS_CACHE = {}
@ -497,8 +522,7 @@ class PlatformBase( # pylint: disable=too-many-public-methods
self._manifest = fs.load_json(manifest_path)
self._custom_packages = None
self.pm = PackageManager(get_project_packages_dir(),
self.package_repositories)
self.pm = PackageManager(get_project_packages_dir(), self.package_repositories)
# if self.engines and "platformio" in self.engines:
# if self.PIO_VERSION not in semantic_version.SimpleSpec(
@ -508,19 +532,19 @@ class PlatformBase( # pylint: disable=too-many-public-methods
@property
def name(self):
return self._manifest['name']
return self._manifest["name"]
@property
def title(self):
return self._manifest['title']
return self._manifest["title"]
@property
def description(self):
return self._manifest['description']
return self._manifest["description"]
@property
def version(self):
return self._manifest['version']
return self._manifest["version"]
@property
def homepage(self):
@ -561,7 +585,7 @@ class PlatformBase( # pylint: disable=too-many-public-methods
@property
def packages(self):
packages = self._manifest.get("packages", {})
for item in (self._custom_packages or []):
for item in self._custom_packages or []:
name = item
version = "*"
if "@" in item:
@ -569,10 +593,7 @@ class PlatformBase( # pylint: disable=too-many-public-methods
name = name.strip()
if name not in packages:
packages[name] = {}
packages[name].update({
"version": version.strip(),
"optional": False
})
packages[name].update({"version": version.strip(), "optional": False})
return packages
def get_dir(self):
@ -591,15 +612,13 @@ class PlatformBase( # pylint: disable=too-many-public-methods
return False
def get_boards(self, id_=None):
def _append_board(board_id, manifest_path):
config = PlatformBoardConfig(manifest_path)
if "platform" in config and config.get("platform") != self.name:
return
if "platforms" in config \
and self.name not in config.get("platforms"):
if "platforms" in config and self.name not in config.get("platforms"):
return
config.manifest['platform'] = self.name
config.manifest["platform"] = self.name
self._BOARDS_CACHE[board_id] = config
bdirs = [
@ -649,28 +668,28 @@ class PlatformBase( # pylint: disable=too-many-public-methods
continue
_pkg_name = self.frameworks[framework].get("package")
if _pkg_name:
self.packages[_pkg_name]['optional'] = False
self.packages[_pkg_name]["optional"] = False
# enable upload tools for upload targets
if any(["upload" in t for t in targets] + ["program" in targets]):
for name, opts in self.packages.items():
if opts.get("type") == "uploader":
self.packages[name]['optional'] = False
self.packages[name]["optional"] = False
# skip all packages in "nobuild" mode
# allow only upload tools and frameworks
elif "nobuild" in targets and opts.get("type") != "framework":
self.packages[name]['optional'] = True
self.packages[name]["optional"] = True
def get_lib_storages(self):
storages = []
for opts in (self.frameworks or {}).values():
if "package" not in opts:
continue
pkg_dir = self.get_package_dir(opts['package'])
pkg_dir = self.get_package_dir(opts["package"])
if not pkg_dir or not isdir(join(pkg_dir, "libraries")):
continue
libs_dir = join(pkg_dir, "libraries")
storages.append({"name": opts['package'], "path": libs_dir})
storages.append({"name": opts["package"], "path": libs_dir})
libcores_dir = join(libs_dir, "__cores__")
if not isdir(libcores_dir):
continue
@ -678,16 +697,17 @@ class PlatformBase( # pylint: disable=too-many-public-methods
libcore_dir = join(libcores_dir, item)
if not isdir(libcore_dir):
continue
storages.append({
"name": "%s-core-%s" % (opts['package'], item),
"path": libcore_dir
})
storages.append(
{
"name": "%s-core-%s" % (opts["package"], item),
"path": libcore_dir,
}
)
return storages
class PlatformBoardConfig(object):
def __init__(self, manifest_path):
self._id = basename(manifest_path)[:-5]
assert isfile(manifest_path)
@ -698,8 +718,8 @@ class PlatformBoardConfig(object):
raise exception.InvalidBoardManifest(manifest_path)
if not set(["name", "url", "vendor"]) <= set(self._manifest):
raise exception.PlatformioException(
"Please specify name, url and vendor fields for " +
manifest_path)
"Please specify name, url and vendor fields for " + manifest_path
)
def get(self, path, default=None):
try:
@ -751,41 +771,33 @@ class PlatformBoardConfig(object):
def get_brief_data(self):
return {
"id":
self.id,
"name":
self._manifest['name'],
"platform":
self._manifest.get("platform"),
"mcu":
self._manifest.get("build", {}).get("mcu", "").upper(),
"fcpu":
int("".join([
c for c in str(
self._manifest.get("build", {}).get("f_cpu", "0L"))
if c.isdigit()
])),
"ram":
self._manifest.get("upload", {}).get("maximum_ram_size", 0),
"rom":
self._manifest.get("upload", {}).get("maximum_size", 0),
"connectivity":
self._manifest.get("connectivity"),
"frameworks":
self._manifest.get("frameworks"),
"debug":
self.get_debug_data(),
"vendor":
self._manifest['vendor'],
"url":
self._manifest['url']
"id": self.id,
"name": self._manifest["name"],
"platform": self._manifest.get("platform"),
"mcu": self._manifest.get("build", {}).get("mcu", "").upper(),
"fcpu": int(
"".join(
[
c
for c in str(self._manifest.get("build", {}).get("f_cpu", "0L"))
if c.isdigit()
]
)
),
"ram": self._manifest.get("upload", {}).get("maximum_ram_size", 0),
"rom": self._manifest.get("upload", {}).get("maximum_size", 0),
"connectivity": self._manifest.get("connectivity"),
"frameworks": self._manifest.get("frameworks"),
"debug": self.get_debug_data(),
"vendor": self._manifest["vendor"],
"url": self._manifest["url"],
}
def get_debug_data(self):
if not self._manifest.get("debug", {}).get("tools"):
return None
tools = {}
for name, options in self._manifest['debug']['tools'].items():
for name, options in self._manifest["debug"]["tools"].items():
tools[name] = {}
for key, value in options.items():
if key in ("default", "onboard"):
@ -798,22 +810,23 @@ class PlatformBoardConfig(object):
if tool_name == "custom":
return tool_name
if not debug_tools:
raise exception.DebugSupportError(self._manifest['name'])
raise exception.DebugSupportError(self._manifest["name"])
if tool_name:
if tool_name in debug_tools:
return tool_name
raise exception.DebugInvalidOptions(
"Unknown debug tool `%s`. Please use one of `%s` or `custom`" %
(tool_name, ", ".join(sorted(list(debug_tools)))))
"Unknown debug tool `%s`. Please use one of `%s` or `custom`"
% (tool_name, ", ".join(sorted(list(debug_tools))))
)
# automatically select best tool
data = {"default": [], "onboard": [], "external": []}
for key, value in debug_tools.items():
if value.get("default"):
data['default'].append(key)
data["default"].append(key)
elif value.get("onboard"):
data['onboard'].append(key)
data['external'].append(key)
data["onboard"].append(key)
data["external"].append(key)
for key, value in data.items():
if not value:

View File

@ -23,7 +23,6 @@ from platformio.compat import WINDOWS, string_types
class AsyncPipeBase(object):
def __init__(self):
self._fd_read, self._fd_write = os.pipe()
self._pipe_reader = os.fdopen(self._fd_read)
@ -53,7 +52,6 @@ class AsyncPipeBase(object):
class BuildAsyncPipe(AsyncPipeBase):
def __init__(self, line_callback, data_callback):
self.line_callback = line_callback
self.data_callback = data_callback
@ -88,7 +86,6 @@ class BuildAsyncPipe(AsyncPipeBase):
class LineBufferedAsyncPipe(AsyncPipeBase):
def __init__(self, line_callback):
self.line_callback = line_callback
super(LineBufferedAsyncPipe, self).__init__()
@ -109,8 +106,8 @@ def exec_command(*args, **kwargs):
p = subprocess.Popen(*args, **kwargs)
try:
result['out'], result['err'] = p.communicate()
result['returncode'] = p.returncode
result["out"], result["err"] = p.communicate()
result["returncode"] = p.returncode
except KeyboardInterrupt:
raise exception.AbortedByUser()
finally:
@ -160,24 +157,22 @@ def copy_pythonpath_to_osenv():
for p in os.sys.path:
conditions = [p not in _PYTHONPATH]
if not WINDOWS:
conditions.append(
isdir(join(p, "click")) or isdir(join(p, "platformio")))
conditions.append(isdir(join(p, "click")) or isdir(join(p, "platformio")))
if all(conditions):
_PYTHONPATH.append(p)
os.environ['PYTHONPATH'] = os.pathsep.join(_PYTHONPATH)
os.environ["PYTHONPATH"] = os.pathsep.join(_PYTHONPATH)
def where_is_program(program, envpath=None):
env = os.environ
if envpath:
env['PATH'] = envpath
env["PATH"] = envpath
# try OS's built-in commands
try:
result = exec_command(["where" if WINDOWS else "which", program],
env=env)
if result['returncode'] == 0 and isfile(result['out'].strip()):
return result['out'].strip()
result = exec_command(["where" if WINDOWS else "which", program], env=env)
if result["returncode"] == 0 and isfile(result["out"].strip()):
return result["out"].strip()
except OSError:
pass

View File

@ -119,23 +119,26 @@ class ProjectConfig(object):
def _maintain_renaimed_options(self):
# legacy `lib_extra_dirs` in [platformio]
if (self._parser.has_section("platformio")
and self._parser.has_option("platformio", "lib_extra_dirs")):
if self._parser.has_section("platformio") and self._parser.has_option(
"platformio", "lib_extra_dirs"
):
if not self._parser.has_section("env"):
self._parser.add_section("env")
self._parser.set("env", "lib_extra_dirs",
self._parser.get("platformio", "lib_extra_dirs"))
self._parser.set(
"env",
"lib_extra_dirs",
self._parser.get("platformio", "lib_extra_dirs"),
)
self._parser.remove_option("platformio", "lib_extra_dirs")
self.warnings.append(
"`lib_extra_dirs` configuration option is deprecated in "
"section [platformio]! Please move it to global `env` section")
"section [platformio]! Please move it to global `env` section"
)
renamed_options = {}
for option in ProjectOptions.values():
if option.oldnames:
renamed_options.update(
{name: option.name
for name in option.oldnames})
renamed_options.update({name: option.name for name in option.oldnames})
for section in self._parser.sections():
scope = section.split(":", 1)[0]
@ -146,29 +149,34 @@ class ProjectConfig(object):
self.warnings.append(
"`%s` configuration option in section [%s] is "
"deprecated and will be removed in the next release! "
"Please use `%s` instead" %
(option, section, renamed_options[option]))
"Please use `%s` instead"
% (option, section, renamed_options[option])
)
# rename on-the-fly
self._parser.set(section, renamed_options[option],
self._parser.get(section, option))
self._parser.set(
section,
renamed_options[option],
self._parser.get(section, option),
)
self._parser.remove_option(section, option)
continue
# unknown
unknown_conditions = [
("%s.%s" % (scope, option)) not in ProjectOptions,
scope != "env" or
not option.startswith(("custom_", "board_"))
scope != "env" or not option.startswith(("custom_", "board_")),
] # yapf: disable
if all(unknown_conditions):
self.warnings.append(
"Ignore unknown configuration option `%s` "
"in section [%s]" % (option, section))
"in section [%s]" % (option, section)
)
return True
def walk_options(self, root_section):
extends_queue = (["env", root_section] if
root_section.startswith("env:") else [root_section])
extends_queue = (
["env", root_section] if root_section.startswith("env:") else [root_section]
)
extends_done = []
while extends_queue:
section = extends_queue.pop()
@ -179,8 +187,8 @@ class ProjectConfig(object):
yield (section, option)
if self._parser.has_option(section, "extends"):
extends_queue.extend(
self.parse_multi_values(
self._parser.get(section, "extends"))[::-1])
self.parse_multi_values(self._parser.get(section, "extends"))[::-1]
)
def options(self, section=None, env=None):
result = []
@ -213,11 +221,9 @@ class ProjectConfig(object):
section = "env:" + env
if as_dict:
return {
option: self.get(section, option)
for option in self.options(section)
option: self.get(section, option) for option in self.options(section)
}
return [(option, self.get(section, option))
for option in self.options(section)]
return [(option, self.get(section, option)) for option in self.options(section)]
def set(self, section, option, value):
if isinstance(value, (list, tuple)):
@ -260,8 +266,7 @@ class ProjectConfig(object):
except ConfigParser.Error as e:
raise exception.InvalidProjectConf(self.path, str(e))
option_meta = ProjectOptions.get("%s.%s" %
(section.split(":", 1)[0], option))
option_meta = ProjectOptions.get("%s.%s" % (section.split(":", 1)[0], option))
if not option_meta:
return value or default
@ -288,8 +293,7 @@ class ProjectConfig(object):
try:
return self._cast_to(value, option_meta.type)
except click.BadParameter as e:
raise exception.ProjectOptionValueError(e.format_message(), option,
section)
raise exception.ProjectOptionValueError(e.format_message(), option, section)
@staticmethod
def _cast_to(value, to_type):
@ -317,8 +321,7 @@ class ProjectConfig(object):
raise exception.ProjectEnvsNotAvailable()
unknown = set(list(envs or []) + self.default_envs()) - known
if unknown:
raise exception.UnknownEnvNames(", ".join(unknown),
", ".join(known))
raise exception.UnknownEnvNames(", ".join(unknown), ", ".join(known))
if not silent:
for warning in self.warnings:
click.secho("Warning! %s" % warning, fg="yellow")

View File

@ -16,8 +16,16 @@ import json
import os
from hashlib import sha1
from os import walk
from os.path import (basename, dirname, expanduser, isdir, isfile, join,
realpath, splitdrive)
from os.path import (
basename,
dirname,
expanduser,
isdir,
isfile,
join,
realpath,
splitdrive,
)
from click.testing import CliRunner
@ -56,9 +64,13 @@ def get_project_optional_dir(name, default=None):
if "$PROJECT_HASH" in optional_dir:
optional_dir = optional_dir.replace(
"$PROJECT_HASH", "%s-%s" %
(basename(project_dir), sha1(
hashlib_encode_data(project_dir)).hexdigest()[:10]))
"$PROJECT_HASH",
"%s-%s"
% (
basename(project_dir),
sha1(hashlib_encode_data(project_dir)).hexdigest()[:10],
),
)
if optional_dir.startswith("~"):
optional_dir = expanduser(optional_dir)
@ -69,7 +81,8 @@ def get_project_optional_dir(name, default=None):
def get_project_core_dir():
default = join(expanduser("~"), ".platformio")
core_dir = get_project_optional_dir(
"core_dir", get_project_optional_dir("home_dir", default))
"core_dir", get_project_optional_dir("home_dir", default)
)
win_core_dir = None
if WINDOWS and core_dir == default:
win_core_dir = splitdrive(core_dir)[0] + "\\.platformio"
@ -91,33 +104,35 @@ def get_project_core_dir():
def get_project_global_lib_dir():
return get_project_optional_dir("globallib_dir",
join(get_project_core_dir(), "lib"))
return get_project_optional_dir(
"globallib_dir", join(get_project_core_dir(), "lib")
)
def get_project_platforms_dir():
return get_project_optional_dir("platforms_dir",
join(get_project_core_dir(), "platforms"))
return get_project_optional_dir(
"platforms_dir", join(get_project_core_dir(), "platforms")
)
def get_project_packages_dir():
return get_project_optional_dir("packages_dir",
join(get_project_core_dir(), "packages"))
return get_project_optional_dir(
"packages_dir", join(get_project_core_dir(), "packages")
)
def get_project_cache_dir():
return get_project_optional_dir("cache_dir",
join(get_project_core_dir(), ".cache"))
return get_project_optional_dir("cache_dir", join(get_project_core_dir(), ".cache"))
def get_project_workspace_dir():
return get_project_optional_dir("workspace_dir",
join(get_project_dir(), ".pio"))
return get_project_optional_dir("workspace_dir", join(get_project_dir(), ".pio"))
def get_project_build_dir(force=False):
path = get_project_optional_dir("build_dir",
join(get_project_workspace_dir(), "build"))
path = get_project_optional_dir(
"build_dir", join(get_project_workspace_dir(), "build")
)
try:
if not isdir(path):
os.makedirs(path)
@ -129,7 +144,8 @@ def get_project_build_dir(force=False):
def get_project_libdeps_dir():
return get_project_optional_dir(
"libdeps_dir", join(get_project_workspace_dir(), "libdeps"))
"libdeps_dir", join(get_project_workspace_dir(), "libdeps")
)
def get_project_lib_dir():
@ -137,8 +153,7 @@ def get_project_lib_dir():
def get_project_include_dir():
return get_project_optional_dir("include_dir",
join(get_project_dir(), "include"))
return get_project_optional_dir("include_dir", join(get_project_dir(), "include"))
def get_project_src_dir():
@ -146,23 +161,19 @@ def get_project_src_dir():
def get_project_test_dir():
return get_project_optional_dir("test_dir", join(get_project_dir(),
"test"))
return get_project_optional_dir("test_dir", join(get_project_dir(), "test"))
def get_project_boards_dir():
return get_project_optional_dir("boards_dir",
join(get_project_dir(), "boards"))
return get_project_optional_dir("boards_dir", join(get_project_dir(), "boards"))
def get_project_data_dir():
return get_project_optional_dir("data_dir", join(get_project_dir(),
"data"))
return get_project_optional_dir("data_dir", join(get_project_dir(), "data"))
def get_project_shared_dir():
return get_project_optional_dir("shared_dir",
join(get_project_dir(), "shared"))
return get_project_optional_dir("shared_dir", join(get_project_dir(), "shared"))
def compute_project_checksum(config):
@ -174,8 +185,7 @@ def compute_project_checksum(config):
# project file structure
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
for d in (get_project_include_dir(), get_project_src_dir(),
get_project_lib_dir()):
for d in (get_project_include_dir(), get_project_src_dir(), get_project_lib_dir()):
if not isdir(d):
continue
chunks = []
@ -196,6 +206,7 @@ def compute_project_checksum(config):
def load_project_ide_data(project_dir, env_or_envs):
from platformio.commands.run import cli as cmd_run
assert env_or_envs
envs = env_or_envs
if not isinstance(envs, list):
@ -204,8 +215,9 @@ def load_project_ide_data(project_dir, env_or_envs):
for env in envs:
args.extend(["-e", env])
result = CliRunner().invoke(cmd_run, args)
if result.exit_code != 0 and not isinstance(result.exception,
exception.ReturnErrorCode):
if result.exit_code != 0 and not isinstance(
result.exception, exception.ReturnErrorCode
):
raise result.exception
if '"includes":' not in result.output:
raise exception.PlatformioException(result.output)
@ -213,11 +225,10 @@ def load_project_ide_data(project_dir, env_or_envs):
data = {}
for line in result.output.split("\n"):
line = line.strip()
if (line.startswith('{"') and line.endswith("}")
and "env_name" in line):
if line.startswith('{"') and line.endswith("}") and "env_name" in line:
_data = json.loads(line)
if "env_name" in _data:
data[_data['env_name']] = _data
data[_data["env_name"]] = _data
if not isinstance(env_or_envs, list) and env_or_envs in data:
return data[env_or_envs]
return data or None

View File

@ -18,20 +18,24 @@ from collections import OrderedDict, namedtuple
import click
ConfigOptionClass = namedtuple("ConfigOption", [
"scope", "name", "type", "multiple", "sysenvvar", "buildenvvar", "oldnames"
])
ConfigOptionClass = namedtuple(
"ConfigOption",
["scope", "name", "type", "multiple", "sysenvvar", "buildenvvar", "oldnames"],
)
def ConfigOption(scope,
name,
type=str,
multiple=False,
sysenvvar=None,
buildenvvar=None,
oldnames=None):
return ConfigOptionClass(scope, name, type, multiple, sysenvvar,
buildenvvar, oldnames)
def ConfigOption(
scope,
name,
type=str,
multiple=False,
sysenvvar=None,
buildenvvar=None,
oldnames=None,
):
return ConfigOptionClass(
scope, name, type, multiple, sysenvvar, buildenvvar, oldnames
)
def ConfigPlatformioOption(*args, **kwargs):
@ -42,171 +46,202 @@ def ConfigEnvOption(*args, **kwargs):
return ConfigOption("env", *args, **kwargs)
ProjectOptions = OrderedDict([
("%s.%s" % (option.scope, option.name), option) for option in [
#
# [platformio]
#
ConfigPlatformioOption(name="description"),
ConfigPlatformioOption(name="default_envs",
oldnames=["env_default"],
multiple=True,
sysenvvar="PLATFORMIO_DEFAULT_ENVS"),
ConfigPlatformioOption(name="extra_configs", multiple=True),
# Dirs
ConfigPlatformioOption(name="core_dir",
oldnames=["home_dir"],
sysenvvar="PLATFORMIO_CORE_DIR"),
ConfigPlatformioOption(name="globallib_dir",
sysenvvar="PLATFORMIO_GLOBALLIB_DIR"),
ConfigPlatformioOption(name="platforms_dir",
sysenvvar="PLATFORMIO_PLATFORMS_DIR"),
ConfigPlatformioOption(name="packages_dir",
sysenvvar="PLATFORMIO_PACKAGES_DIR"),
ConfigPlatformioOption(name="cache_dir",
sysenvvar="PLATFORMIO_CACHE_DIR"),
ConfigPlatformioOption(name="build_cache_dir",
sysenvvar="PLATFORMIO_BUILD_CACHE_DIR"),
ConfigPlatformioOption(name="workspace_dir",
sysenvvar="PLATFORMIO_WORKSPACE_DIR"),
ConfigPlatformioOption(name="build_dir",
sysenvvar="PLATFORMIO_BUILD_DIR"),
ConfigPlatformioOption(name="libdeps_dir",
sysenvvar="PLATFORMIO_LIBDEPS_DIR"),
ConfigPlatformioOption(name="lib_dir", sysenvvar="PLATFORMIO_LIB_DIR"),
ConfigPlatformioOption(name="include_dir",
sysenvvar="PLATFORMIO_INCLUDE_DIR"),
ConfigPlatformioOption(name="src_dir", sysenvvar="PLATFORMIO_SRC_DIR"),
ConfigPlatformioOption(name="test_dir",
sysenvvar="PLATFORMIO_TEST_DIR"),
ConfigPlatformioOption(name="boards_dir",
sysenvvar="PLATFORMIO_BOARDS_DIR"),
ConfigPlatformioOption(name="data_dir",
sysenvvar="PLATFORMIO_DATA_DIR"),
ConfigPlatformioOption(name="shared_dir",
sysenvvar="PLATFORMIO_SHARED_DIR"),
#
# [env]
#
ConfigEnvOption(name="extends", multiple=True),
# Generic
ConfigEnvOption(name="platform", buildenvvar="PIOPLATFORM"),
ConfigEnvOption(name="platform_packages", multiple=True),
ConfigEnvOption(
name="framework", multiple=True, buildenvvar="PIOFRAMEWORK"),
# Board
ConfigEnvOption(name="board", buildenvvar="BOARD"),
ConfigEnvOption(name="board_build.mcu",
oldnames=["board_mcu"],
buildenvvar="BOARD_MCU"),
ConfigEnvOption(name="board_build.f_cpu",
oldnames=["board_f_cpu"],
buildenvvar="BOARD_F_CPU"),
ConfigEnvOption(name="board_build.f_flash",
oldnames=["board_f_flash"],
buildenvvar="BOARD_F_FLASH"),
ConfigEnvOption(name="board_build.flash_mode",
oldnames=["board_flash_mode"],
buildenvvar="BOARD_FLASH_MODE"),
# Build
ConfigEnvOption(name="build_type",
type=click.Choice(["release", "debug"])),
ConfigEnvOption(name="build_flags",
multiple=True,
sysenvvar="PLATFORMIO_BUILD_FLAGS",
buildenvvar="BUILD_FLAGS"),
ConfigEnvOption(name="src_build_flags",
multiple=True,
sysenvvar="PLATFORMIO_SRC_BUILD_FLAGS",
buildenvvar="SRC_BUILD_FLAGS"),
ConfigEnvOption(name="build_unflags",
multiple=True,
sysenvvar="PLATFORMIO_BUILD_UNFLAGS",
buildenvvar="BUILD_UNFLAGS"),
ConfigEnvOption(name="src_filter",
multiple=True,
sysenvvar="PLATFORMIO_SRC_FILTER",
buildenvvar="SRC_FILTER"),
ConfigEnvOption(name="targets", multiple=True),
# Upload
ConfigEnvOption(name="upload_port",
sysenvvar="PLATFORMIO_UPLOAD_PORT",
buildenvvar="UPLOAD_PORT"),
ConfigEnvOption(name="upload_protocol", buildenvvar="UPLOAD_PROTOCOL"),
ConfigEnvOption(
name="upload_speed", type=click.INT, buildenvvar="UPLOAD_SPEED"),
ConfigEnvOption(name="upload_flags",
multiple=True,
sysenvvar="PLATFORMIO_UPLOAD_FLAGS",
buildenvvar="UPLOAD_FLAGS"),
ConfigEnvOption(name="upload_resetmethod",
buildenvvar="UPLOAD_RESETMETHOD"),
ConfigEnvOption(name="upload_command", buildenvvar="UPLOADCMD"),
# Monitor
ConfigEnvOption(name="monitor_port"),
ConfigEnvOption(name="monitor_speed", oldnames=["monitor_baud"]),
ConfigEnvOption(name="monitor_rts", type=click.IntRange(0, 1)),
ConfigEnvOption(name="monitor_dtr", type=click.IntRange(0, 1)),
ConfigEnvOption(name="monitor_flags", multiple=True),
# Library
ConfigEnvOption(name="lib_deps",
oldnames=["lib_use", "lib_force", "lib_install"],
multiple=True),
ConfigEnvOption(name="lib_ignore", multiple=True),
ConfigEnvOption(name="lib_extra_dirs",
multiple=True,
sysenvvar="PLATFORMIO_LIB_EXTRA_DIRS"),
ConfigEnvOption(name="lib_ldf_mode",
type=click.Choice(
["off", "chain", "deep", "chain+", "deep+"])),
ConfigEnvOption(name="lib_compat_mode",
type=click.Choice(["off", "soft", "strict"])),
ConfigEnvOption(name="lib_archive", type=click.BOOL),
# Test
ConfigEnvOption(name="test_filter", multiple=True),
ConfigEnvOption(name="test_ignore", multiple=True),
ConfigEnvOption(name="test_port"),
ConfigEnvOption(name="test_speed", type=click.INT),
ConfigEnvOption(name="test_transport"),
ConfigEnvOption(name="test_build_project_src", type=click.BOOL),
# Debug
ConfigEnvOption(name="debug_tool"),
ConfigEnvOption(name="debug_init_break"),
ConfigEnvOption(name="debug_init_cmds", multiple=True),
ConfigEnvOption(name="debug_extra_cmds", multiple=True),
ConfigEnvOption(name="debug_load_cmds",
oldnames=["debug_load_cmd"],
multiple=True),
ConfigEnvOption(name="debug_load_mode",
type=click.Choice(["always", "modified", "manual"])),
ConfigEnvOption(name="debug_server", multiple=True),
ConfigEnvOption(name="debug_port"),
ConfigEnvOption(name="debug_svd_path",
type=click.Path(
exists=True, file_okay=True, dir_okay=False)),
# Check
ConfigEnvOption(name="check_tool", multiple=True),
ConfigEnvOption(name="check_filter", multiple=True),
ConfigEnvOption(name="check_flags", multiple=True),
ConfigEnvOption(name="check_severity",
multiple=True,
type=click.Choice(["low", "medium", "high"])),
# Other
ConfigEnvOption(name="extra_scripts",
oldnames=["extra_script"],
multiple=True,
sysenvvar="PLATFORMIO_EXTRA_SCRIPTS")
ProjectOptions = OrderedDict(
[
("%s.%s" % (option.scope, option.name), option)
for option in [
#
# [platformio]
#
ConfigPlatformioOption(name="description"),
ConfigPlatformioOption(
name="default_envs",
oldnames=["env_default"],
multiple=True,
sysenvvar="PLATFORMIO_DEFAULT_ENVS",
),
ConfigPlatformioOption(name="extra_configs", multiple=True),
# Dirs
ConfigPlatformioOption(
name="core_dir", oldnames=["home_dir"], sysenvvar="PLATFORMIO_CORE_DIR"
),
ConfigPlatformioOption(
name="globallib_dir", sysenvvar="PLATFORMIO_GLOBALLIB_DIR"
),
ConfigPlatformioOption(
name="platforms_dir", sysenvvar="PLATFORMIO_PLATFORMS_DIR"
),
ConfigPlatformioOption(
name="packages_dir", sysenvvar="PLATFORMIO_PACKAGES_DIR"
),
ConfigPlatformioOption(name="cache_dir", sysenvvar="PLATFORMIO_CACHE_DIR"),
ConfigPlatformioOption(
name="build_cache_dir", sysenvvar="PLATFORMIO_BUILD_CACHE_DIR"
),
ConfigPlatformioOption(
name="workspace_dir", sysenvvar="PLATFORMIO_WORKSPACE_DIR"
),
ConfigPlatformioOption(name="build_dir", sysenvvar="PLATFORMIO_BUILD_DIR"),
ConfigPlatformioOption(
name="libdeps_dir", sysenvvar="PLATFORMIO_LIBDEPS_DIR"
),
ConfigPlatformioOption(name="lib_dir", sysenvvar="PLATFORMIO_LIB_DIR"),
ConfigPlatformioOption(
name="include_dir", sysenvvar="PLATFORMIO_INCLUDE_DIR"
),
ConfigPlatformioOption(name="src_dir", sysenvvar="PLATFORMIO_SRC_DIR"),
ConfigPlatformioOption(name="test_dir", sysenvvar="PLATFORMIO_TEST_DIR"),
ConfigPlatformioOption(
name="boards_dir", sysenvvar="PLATFORMIO_BOARDS_DIR"
),
ConfigPlatformioOption(name="data_dir", sysenvvar="PLATFORMIO_DATA_DIR"),
ConfigPlatformioOption(
name="shared_dir", sysenvvar="PLATFORMIO_SHARED_DIR"
),
#
# [env]
#
ConfigEnvOption(name="extends", multiple=True),
# Generic
ConfigEnvOption(name="platform", buildenvvar="PIOPLATFORM"),
ConfigEnvOption(name="platform_packages", multiple=True),
ConfigEnvOption(
name="framework", multiple=True, buildenvvar="PIOFRAMEWORK"
),
# Board
ConfigEnvOption(name="board", buildenvvar="BOARD"),
ConfigEnvOption(
name="board_build.mcu", oldnames=["board_mcu"], buildenvvar="BOARD_MCU"
),
ConfigEnvOption(
name="board_build.f_cpu",
oldnames=["board_f_cpu"],
buildenvvar="BOARD_F_CPU",
),
ConfigEnvOption(
name="board_build.f_flash",
oldnames=["board_f_flash"],
buildenvvar="BOARD_F_FLASH",
),
ConfigEnvOption(
name="board_build.flash_mode",
oldnames=["board_flash_mode"],
buildenvvar="BOARD_FLASH_MODE",
),
# Build
ConfigEnvOption(name="build_type", type=click.Choice(["release", "debug"])),
ConfigEnvOption(
name="build_flags",
multiple=True,
sysenvvar="PLATFORMIO_BUILD_FLAGS",
buildenvvar="BUILD_FLAGS",
),
ConfigEnvOption(
name="src_build_flags",
multiple=True,
sysenvvar="PLATFORMIO_SRC_BUILD_FLAGS",
buildenvvar="SRC_BUILD_FLAGS",
),
ConfigEnvOption(
name="build_unflags",
multiple=True,
sysenvvar="PLATFORMIO_BUILD_UNFLAGS",
buildenvvar="BUILD_UNFLAGS",
),
ConfigEnvOption(
name="src_filter",
multiple=True,
sysenvvar="PLATFORMIO_SRC_FILTER",
buildenvvar="SRC_FILTER",
),
ConfigEnvOption(name="targets", multiple=True),
# Upload
ConfigEnvOption(
name="upload_port",
sysenvvar="PLATFORMIO_UPLOAD_PORT",
buildenvvar="UPLOAD_PORT",
),
ConfigEnvOption(name="upload_protocol", buildenvvar="UPLOAD_PROTOCOL"),
ConfigEnvOption(
name="upload_speed", type=click.INT, buildenvvar="UPLOAD_SPEED"
),
ConfigEnvOption(
name="upload_flags",
multiple=True,
sysenvvar="PLATFORMIO_UPLOAD_FLAGS",
buildenvvar="UPLOAD_FLAGS",
),
ConfigEnvOption(
name="upload_resetmethod", buildenvvar="UPLOAD_RESETMETHOD"
),
ConfigEnvOption(name="upload_command", buildenvvar="UPLOADCMD"),
# Monitor
ConfigEnvOption(name="monitor_port"),
ConfigEnvOption(name="monitor_speed", oldnames=["monitor_baud"]),
ConfigEnvOption(name="monitor_rts", type=click.IntRange(0, 1)),
ConfigEnvOption(name="monitor_dtr", type=click.IntRange(0, 1)),
ConfigEnvOption(name="monitor_flags", multiple=True),
# Library
ConfigEnvOption(
name="lib_deps",
oldnames=["lib_use", "lib_force", "lib_install"],
multiple=True,
),
ConfigEnvOption(name="lib_ignore", multiple=True),
ConfigEnvOption(
name="lib_extra_dirs",
multiple=True,
sysenvvar="PLATFORMIO_LIB_EXTRA_DIRS",
),
ConfigEnvOption(
name="lib_ldf_mode",
type=click.Choice(["off", "chain", "deep", "chain+", "deep+"]),
),
ConfigEnvOption(
name="lib_compat_mode", type=click.Choice(["off", "soft", "strict"])
),
ConfigEnvOption(name="lib_archive", type=click.BOOL),
# Test
ConfigEnvOption(name="test_filter", multiple=True),
ConfigEnvOption(name="test_ignore", multiple=True),
ConfigEnvOption(name="test_port"),
ConfigEnvOption(name="test_speed", type=click.INT),
ConfigEnvOption(name="test_transport"),
ConfigEnvOption(name="test_build_project_src", type=click.BOOL),
# Debug
ConfigEnvOption(name="debug_tool"),
ConfigEnvOption(name="debug_init_break"),
ConfigEnvOption(name="debug_init_cmds", multiple=True),
ConfigEnvOption(name="debug_extra_cmds", multiple=True),
ConfigEnvOption(
name="debug_load_cmds", oldnames=["debug_load_cmd"], multiple=True
),
ConfigEnvOption(
name="debug_load_mode",
type=click.Choice(["always", "modified", "manual"]),
),
ConfigEnvOption(name="debug_server", multiple=True),
ConfigEnvOption(name="debug_port"),
ConfigEnvOption(
name="debug_svd_path",
type=click.Path(exists=True, file_okay=True, dir_okay=False),
),
# Check
ConfigEnvOption(name="check_tool", multiple=True),
ConfigEnvOption(name="check_filter", multiple=True),
ConfigEnvOption(name="check_flags", multiple=True),
ConfigEnvOption(
name="check_severity",
multiple=True,
type=click.Choice(["low", "medium", "high"]),
),
# Other
ConfigEnvOption(
name="extra_scripts",
oldnames=["extra_script"],
multiple=True,
sysenvvar="PLATFORMIO_EXTRA_SCRIPTS",
),
]
]
])
)

View File

@ -38,7 +38,6 @@ except ImportError:
class TelemetryBase(object):
def __init__(self):
self._params = {}
@ -64,17 +63,17 @@ class MeasurementProtocol(TelemetryBase):
"event_category": "ec",
"event_action": "ea",
"event_label": "el",
"event_value": "ev"
"event_value": "ev",
}
def __init__(self):
super(MeasurementProtocol, self).__init__()
self['v'] = 1
self['tid'] = self.TID
self['cid'] = app.get_cid()
self["v"] = 1
self["tid"] = self.TID
self["cid"] = app.get_cid()
try:
self['sr'] = "%dx%d" % click.get_terminal_size()
self["sr"] = "%dx%d" % click.get_terminal_size()
except ValueError:
pass
@ -93,7 +92,7 @@ class MeasurementProtocol(TelemetryBase):
super(MeasurementProtocol, self).__setitem__(name, value)
def _prefill_appinfo(self):
self['av'] = __version__
self["av"] = __version__
# gather dependent packages
dpdata = []
@ -102,10 +101,9 @@ class MeasurementProtocol(TelemetryBase):
dpdata.append("Caller/%s" % app.get_session_var("caller_id"))
if getenv("PLATFORMIO_IDE"):
dpdata.append("IDE/%s" % getenv("PLATFORMIO_IDE"))
self['an'] = " ".join(dpdata)
self["an"] = " ".join(dpdata)
def _prefill_custom_data(self):
def _filter_args(items):
result = []
stop = False
@ -119,17 +117,16 @@ class MeasurementProtocol(TelemetryBase):
return result
caller_id = str(app.get_session_var("caller_id"))
self['cd1'] = util.get_systype()
self['cd2'] = "Python/%s %s" % (platform.python_version(),
platform.platform())
self["cd1"] = util.get_systype()
self["cd2"] = "Python/%s %s" % (platform.python_version(), platform.platform())
# self['cd3'] = " ".join(_filter_args(sys.argv[1:]))
self['cd4'] = 1 if (not util.is_ci() and
(caller_id or not is_container())) else 0
self["cd4"] = (
1 if (not util.is_ci() and (caller_id or not is_container())) else 0
)
if caller_id:
self['cd5'] = caller_id.lower()
self["cd5"] = caller_id.lower()
def _prefill_screen_name(self):
def _first_arg_from_list(args_, list_):
for _arg in args_:
if _arg in list_:
@ -146,12 +143,27 @@ class MeasurementProtocol(TelemetryBase):
return
cmd_path = args[:1]
if args[0] in ("platform", "platforms", "serialports", "device",
"settings", "account"):
if args[0] in (
"platform",
"platforms",
"serialports",
"device",
"settings",
"account",
):
cmd_path = args[:2]
if args[0] == "lib" and len(args) > 1:
lib_subcmds = ("builtin", "install", "list", "register", "search",
"show", "stats", "uninstall", "update")
lib_subcmds = (
"builtin",
"install",
"list",
"register",
"search",
"show",
"stats",
"uninstall",
"update",
)
sub_cmd = _first_arg_from_list(args[1:], lib_subcmds)
if sub_cmd:
cmd_path.append(sub_cmd)
@ -165,24 +177,25 @@ class MeasurementProtocol(TelemetryBase):
sub_cmd = _first_arg_from_list(args[2:], remote2_subcmds)
if sub_cmd:
cmd_path.append(sub_cmd)
self['screen_name'] = " ".join([p.title() for p in cmd_path])
self["screen_name"] = " ".join([p.title() for p in cmd_path])
@staticmethod
def _ignore_hit():
if not app.get_setting("enable_telemetry"):
return True
if app.get_session_var("caller_id") and \
all(c in sys.argv for c in ("run", "idedata")):
if app.get_session_var("caller_id") and all(
c in sys.argv for c in ("run", "idedata")
):
return True
return False
def send(self, hittype):
if self._ignore_hit():
return
self['t'] = hittype
self["t"] = hittype
# correct queue time
if "qt" in self._params and isinstance(self['qt'], float):
self['qt'] = int((time() - self['qt']) * 1000)
if "qt" in self._params and isinstance(self["qt"], float):
self["qt"] = int((time() - self["qt"]) * 1000)
MPDataPusher().push(self._params)
@ -202,7 +215,7 @@ class MPDataPusher(object):
# if network is off-line
if self._http_offline:
if "qt" not in item:
item['qt'] = time()
item["qt"] = time()
self._failedque.append(item)
return
@ -243,7 +256,7 @@ class MPDataPusher(object):
item = self._queue.get()
_item = item.copy()
if "qt" not in _item:
_item['qt'] = time()
_item["qt"] = time()
self._failedque.append(_item)
if self._send_data(item):
self._failedque.remove(_item)
@ -259,7 +272,8 @@ class MPDataPusher(object):
"https://ssl.google-analytics.com/collect",
data=data,
headers=util.get_request_defheaders(),
timeout=1)
timeout=1,
)
r.raise_for_status()
return True
except requests.exceptions.HTTPError as e:
@ -284,11 +298,10 @@ def on_command():
def measure_ci():
event = {"category": "CI", "action": "NoName", "label": None}
known_cis = ("TRAVIS", "APPVEYOR", "GITLAB_CI", "CIRCLECI", "SHIPPABLE",
"DRONE")
known_cis = ("TRAVIS", "APPVEYOR", "GITLAB_CI", "CIRCLECI", "SHIPPABLE", "DRONE")
for name in known_cis:
if getenv(name, "false").lower() == "true":
event['action'] = name
event["action"] = name
break
on_event(**event)
@ -307,32 +320,37 @@ def on_run_environment(options, targets):
def on_event(category, action, label=None, value=None, screen_name=None):
mp = MeasurementProtocol()
mp['event_category'] = category[:150]
mp['event_action'] = action[:500]
mp["event_category"] = category[:150]
mp["event_action"] = action[:500]
if label:
mp['event_label'] = label[:500]
mp["event_label"] = label[:500]
if value:
mp['event_value'] = int(value)
mp["event_value"] = int(value)
if screen_name:
mp['screen_name'] = screen_name[:2048]
mp["screen_name"] = screen_name[:2048]
mp.send("event")
def on_exception(e):
def _cleanup_description(text):
text = text.replace("Traceback (most recent call last):", "")
text = re.sub(r'File "([^"]+)"',
lambda m: join(*m.group(1).split(sep)[-2:]),
text,
flags=re.M)
text = re.sub(
r'File "([^"]+)"',
lambda m: join(*m.group(1).split(sep)[-2:]),
text,
flags=re.M,
)
text = re.sub(r"\s+", " ", text, flags=re.M)
return text.strip()
skip_conditions = [
isinstance(e, cls) for cls in (IOError, exception.ReturnErrorCode,
exception.UserSideException,
exception.PlatformIOProjectException)
isinstance(e, cls)
for cls in (
IOError,
exception.ReturnErrorCode,
exception.UserSideException,
exception.PlatformIOProjectException,
)
]
try:
skip_conditions.append("[API] Account: " in str(e))
@ -340,14 +358,16 @@ def on_exception(e):
e = ue
if any(skip_conditions):
return
is_crash = any([
not isinstance(e, exception.PlatformioException),
"Error" in e.__class__.__name__
])
is_crash = any(
[
not isinstance(e, exception.PlatformioException),
"Error" in e.__class__.__name__,
]
)
mp = MeasurementProtocol()
description = _cleanup_description(format_exc() if is_crash else str(e))
mp['exd'] = ("%s: %s" % (type(e).__name__, description))[:2048]
mp['exf'] = 1 if is_crash else 0
mp["exd"] = ("%s: %s" % (type(e).__name__, description))[:2048]
mp["exf"] = 1 if is_crash else 0
mp.send("exception")
@ -373,7 +393,7 @@ def backup_reports(items):
KEEP_MAX_REPORTS = 100
tm = app.get_state_item("telemetry", {})
if "backup" not in tm:
tm['backup'] = []
tm["backup"] = []
for params in items:
# skip static options
@ -383,28 +403,28 @@ def backup_reports(items):
# store time in UNIX format
if "qt" not in params:
params['qt'] = time()
elif not isinstance(params['qt'], float):
params['qt'] = time() - (params['qt'] / 1000)
params["qt"] = time()
elif not isinstance(params["qt"], float):
params["qt"] = time() - (params["qt"] / 1000)
tm['backup'].append(params)
tm["backup"].append(params)
tm['backup'] = tm['backup'][KEEP_MAX_REPORTS * -1:]
tm["backup"] = tm["backup"][KEEP_MAX_REPORTS * -1 :]
app.set_state_item("telemetry", tm)
def resend_backuped_reports():
tm = app.get_state_item("telemetry", {})
if "backup" not in tm or not tm['backup']:
if "backup" not in tm or not tm["backup"]:
return False
for report in tm['backup']:
for report in tm["backup"]:
mp = MeasurementProtocol()
for key, value in report.items():
mp[key] = value
mp.send(report['t'])
mp.send(report["t"])
# clean
tm['backup'] = []
tm["backup"] = []
app.set_state_item("telemetry", tm)
return True

View File

@ -24,7 +24,6 @@ from platformio import exception, util
class ArchiveBase(object):
def __init__(self, arhfileobj):
self._afo = arhfileobj
@ -46,7 +45,6 @@ class ArchiveBase(object):
class TARArchive(ArchiveBase):
def __init__(self, archpath):
super(TARArchive, self).__init__(tarfile_open(archpath))
@ -62,7 +60,6 @@ class TARArchive(ArchiveBase):
class ZIPArchive(ArchiveBase):
def __init__(self, archpath):
super(ZIPArchive, self).__init__(ZipFile(archpath))
@ -74,8 +71,10 @@ class ZIPArchive(ArchiveBase):
@staticmethod
def preserve_mtime(item, dest_dir):
util.change_filemtime(join(dest_dir, item.filename),
mktime(tuple(item.date_time) + tuple([0, 0, 0])))
util.change_filemtime(
join(dest_dir, item.filename),
mktime(tuple(item.date_time) + tuple([0, 0, 0])),
)
def get_items(self):
return self._afo.infolist()
@ -92,7 +91,6 @@ class ZIPArchive(ArchiveBase):
class FileUnpacker(object):
def __init__(self, archpath):
self.archpath = archpath
self._unpacker = None

View File

@ -40,7 +40,6 @@ from platformio.proc import is_ci # pylint: disable=unused-import
class memoized(object):
def __init__(self, expire=0):
expire = str(expire)
if expire.isdigit():
@ -51,13 +50,12 @@ class memoized(object):
self.cache = {}
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
key = str(args) + str(kwargs)
if (key not in self.cache
or (self.expire > 0
and self.cache[key][0] < time.time() - self.expire)):
if key not in self.cache or (
self.expire > 0 and self.cache[key][0] < time.time() - self.expire
):
self.cache[key] = (time.time(), func(*args, **kwargs))
return self.cache[key][1]
@ -69,13 +67,11 @@ class memoized(object):
class throttle(object):
def __init__(self, threshhold):
self.threshhold = threshhold # milliseconds
self.last = 0
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
diff = int(round((time.time() - self.last) * 1000))
@ -166,17 +162,14 @@ def get_logical_devices():
if WINDOWS:
try:
result = exec_command(
["wmic", "logicaldisk", "get",
"name,VolumeName"]).get("out", "")
["wmic", "logicaldisk", "get", "name,VolumeName"]
).get("out", "")
devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
for line in result.split("\n"):
match = devicenamere.match(line.strip())
if not match:
continue
items.append({
"path": match.group(1) + "\\",
"name": match.group(2)
})
items.append({"path": match.group(1) + "\\", "name": match.group(2)})
return items
except WindowsError: # pylint: disable=undefined-variable
pass
@ -192,10 +185,7 @@ def get_logical_devices():
match = devicenamere.match(line.strip())
if not match:
continue
items.append({
"path": match.group(1),
"name": os.path.basename(match.group(1))
})
items.append({"path": match.group(1), "name": os.path.basename(match.group(1))})
return items
@ -205,22 +195,20 @@ def get_mdns_services():
except ImportError:
from site import addsitedir
from platformio.managers.core import get_core_package_dir
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
addsitedir(contrib_pysite_dir)
sys.path.insert(0, contrib_pysite_dir)
import zeroconf
class mDNSListener(object):
def __init__(self):
self._zc = zeroconf.Zeroconf(
interfaces=zeroconf.InterfaceChoice.All)
self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All)
self._found_types = []
self._found_services = []
def __enter__(self):
zeroconf.ServiceBrowser(self._zc, "_services._dns-sd._udp.local.",
self)
zeroconf.ServiceBrowser(self._zc, "_services._dns-sd._udp.local.", self)
return self
def __exit__(self, etype, value, traceback):
@ -233,8 +221,7 @@ def get_mdns_services():
try:
assert zeroconf.service_type_name(name)
assert str(name)
except (AssertionError, UnicodeError,
zeroconf.BadTypeInNameException):
except (AssertionError, UnicodeError, zeroconf.BadTypeInNameException):
return
if name not in self._found_types:
self._found_types.append(name)
@ -255,29 +242,29 @@ def get_mdns_services():
if service.properties:
try:
properties = {
k.decode("utf8"):
v.decode("utf8") if isinstance(v, bytes) else v
k.decode("utf8"): v.decode("utf8")
if isinstance(v, bytes)
else v
for k, v in service.properties.items()
}
json.dumps(properties)
except UnicodeDecodeError:
properties = None
items.append({
"type":
service.type,
"name":
service.name,
"ip":
".".join([
str(c if isinstance(c, int) else ord(c))
for c in service.address
]),
"port":
service.port,
"properties":
properties
})
items.append(
{
"type": service.type,
"name": service.name,
"ip": ".".join(
[
str(c if isinstance(c, int) else ord(c))
for c in service.address
]
),
"port": service.port,
"properties": properties,
}
)
return items
@ -293,10 +280,8 @@ def _api_request_session():
@throttle(500)
def _get_api_result(
url, # pylint: disable=too-many-branches
params=None,
data=None,
auth=None):
url, params=None, data=None, auth=None # pylint: disable=too-many-branches
):
from platformio.app import get_setting
result = {}
@ -311,30 +296,29 @@ def _get_api_result(
try:
if data:
r = _api_request_session().post(url,
params=params,
data=data,
headers=headers,
auth=auth,
verify=verify_ssl)
r = _api_request_session().post(
url,
params=params,
data=data,
headers=headers,
auth=auth,
verify=verify_ssl,
)
else:
r = _api_request_session().get(url,
params=params,
headers=headers,
auth=auth,
verify=verify_ssl)
r = _api_request_session().get(
url, params=params, headers=headers, auth=auth, verify=verify_ssl
)
result = r.json()
r.raise_for_status()
return r.text
except requests.exceptions.HTTPError as e:
if result and "message" in result:
raise exception.APIRequestError(result['message'])
raise exception.APIRequestError(result["message"])
if result and "errors" in result:
raise exception.APIRequestError(result['errors'][0]['title'])
raise exception.APIRequestError(result["errors"][0]["title"])
raise exception.APIRequestError(e)
except ValueError:
raise exception.APIRequestError("Invalid response: %s" %
r.text.encode("utf-8"))
raise exception.APIRequestError("Invalid response: %s" % r.text.encode("utf-8"))
finally:
if r:
r.close()
@ -343,10 +327,12 @@ def _get_api_result(
def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
from platformio.app import ContentCache
total = 0
max_retries = 5
cache_key = (ContentCache.key_from_args(url, params, data, auth)
if cache_valid else None)
cache_key = (
ContentCache.key_from_args(url, params, data, auth) if cache_valid else None
)
while total < max_retries:
try:
with ContentCache() as cc:
@ -363,24 +349,24 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
with ContentCache() as cc:
cc.set(cache_key, result, cache_valid)
return json.loads(result)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout) as e:
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
total += 1
if not PlatformioCLI.in_silence():
click.secho(
"[API] ConnectionError: {0} (incremented retry: max={1}, "
"total={2})".format(e, max_retries, total),
fg="yellow")
fg="yellow",
)
time.sleep(2 * total)
raise exception.APIRequestError(
"Could not connect to PlatformIO API Service. "
"Please try later.")
"Could not connect to PlatformIO API Service. " "Please try later."
)
PING_INTERNET_IPS = [
"192.30.253.113", # github.com
"193.222.52.25" # dl.platformio.org
"193.222.52.25", # dl.platformio.org
]
@ -391,12 +377,9 @@ def _internet_on():
for ip in PING_INTERNET_IPS:
try:
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
requests.get("http://%s" % ip,
allow_redirects=False,
timeout=timeout)
requests.get("http://%s" % ip, allow_redirects=False, timeout=timeout)
else:
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect(
(ip, 80))
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip, 80))
return True
except: # pylint: disable=bare-except
pass
@ -438,8 +421,7 @@ def merge_dicts(d1, d2, path=None):
if path is None:
path = []
for key in d2:
if (key in d1 and isinstance(d1[key], dict)
and isinstance(d2[key], dict)):
if key in d1 and isinstance(d1[key], dict) and isinstance(d2[key], dict):
merge_dicts(d1[key], d2[key], path + [str(key)])
else:
d1[key] = d2[key]
@ -450,9 +432,7 @@ def print_labeled_bar(label, is_error=False, fg=None):
terminal_width, _ = click.get_terminal_size()
width = len(click.unstyle(label))
half_line = "=" * int((terminal_width - width - 2) / 2)
click.secho("%s %s %s" % (half_line, label, half_line),
fg=fg,
err=is_error)
click.secho("%s %s %s" % (half_line, label, half_line), fg=fg, err=is_error)
def humanize_duration_time(duration):

View File

@ -27,7 +27,6 @@ except ImportError:
class VCSClientFactory(object):
@staticmethod
def newClient(src_dir, remote_url, silent=False):
result = urlparse(remote_url)
@ -38,15 +37,14 @@ class VCSClientFactory(object):
remote_url = remote_url[4:]
elif "+" in result.scheme:
type_, _ = result.scheme.split("+", 1)
remote_url = remote_url[len(type_) + 1:]
remote_url = remote_url[len(type_) + 1 :]
if "#" in remote_url:
remote_url, tag = remote_url.rsplit("#", 1)
if not type_:
raise PlatformioException("VCS: Unknown repository type %s" %
remote_url)
obj = getattr(modules[__name__],
"%sClient" % type_.title())(src_dir, remote_url, tag,
silent)
raise PlatformioException("VCS: Unknown repository type %s" % remote_url)
obj = getattr(modules[__name__], "%sClient" % type_.title())(
src_dir, remote_url, tag, silent
)
assert isinstance(obj, VCSClientBase)
return obj
@ -71,8 +69,8 @@ class VCSClientBase(object):
assert self.run_cmd(["--version"])
except (AssertionError, OSError, PlatformioException):
raise UserSideException(
"VCS: `%s` client is not installed in your system" %
self.command)
"VCS: `%s` client is not installed in your system" % self.command
)
return True
@property
@ -98,24 +96,23 @@ class VCSClientBase(object):
def run_cmd(self, args, **kwargs):
args = [self.command] + args
if "cwd" not in kwargs:
kwargs['cwd'] = self.src_dir
kwargs["cwd"] = self.src_dir
try:
check_call(args, **kwargs)
return True
except CalledProcessError as e:
raise PlatformioException("VCS: Could not process command %s" %
e.cmd)
raise PlatformioException("VCS: Could not process command %s" % e.cmd)
def get_cmd_output(self, args, **kwargs):
args = [self.command] + args
if "cwd" not in kwargs:
kwargs['cwd'] = self.src_dir
kwargs["cwd"] = self.src_dir
result = exec_command(args, **kwargs)
if result['returncode'] == 0:
return result['out'].strip()
if result["returncode"] == 0:
return result["out"].strip()
raise PlatformioException(
"VCS: Could not receive an output from `%s` command (%s)" %
(args, result))
"VCS: Could not receive an output from `%s` command (%s)" % (args, result)
)
class GitClient(VCSClientBase):
@ -127,7 +124,8 @@ class GitClient(VCSClientBase):
return VCSClientBase.check_client(self)
except UserSideException:
raise UserSideException(
"Please install Git client from https://git-scm.com/downloads")
"Please install Git client from https://git-scm.com/downloads"
)
def get_branches(self):
output = self.get_cmd_output(["branch"])
@ -232,7 +230,8 @@ class SvnClient(VCSClientBase):
def get_current_revision(self):
output = self.get_cmd_output(
["info", "--non-interactive", "--trust-server-cert", "-r", "HEAD"])
["info", "--non-interactive", "--trust-server-cert", "-r", "HEAD"]
)
for line in output.split("\n"):
line = line.strip()
if line.startswith("Revision:"):

View File

@ -23,7 +23,7 @@ def test_board_json_output(clirunner, validate_cliresult):
validate_cliresult(result)
boards = json.loads(result.output)
assert isinstance(boards, list)
assert any(["mbed" in b['frameworks'] for b in boards])
assert any(["mbed" in b["frameworks"] for b in boards])
def test_board_raw_output(clirunner, validate_cliresult):
@ -33,8 +33,7 @@ def test_board_raw_output(clirunner, validate_cliresult):
def test_board_options(clirunner, validate_cliresult):
required_opts = set(
["fcpu", "frameworks", "id", "mcu", "name", "platform"])
required_opts = set(["fcpu", "frameworks", "id", "mcu", "name", "platform"])
# fetch available platforms
result = clirunner.invoke(cmd_platform_search, ["--json-output"])
@ -42,7 +41,7 @@ def test_board_options(clirunner, validate_cliresult):
search_result = json.loads(result.output)
assert isinstance(search_result, list)
assert len(search_result)
platforms = [item['name'] for item in search_result]
platforms = [item["name"] for item in search_result]
result = clirunner.invoke(cmd_boards, ["mbed", "--json-output"])
validate_cliresult(result)
@ -50,4 +49,4 @@ def test_board_options(clirunner, validate_cliresult):
for board in boards:
assert required_opts.issubset(set(board))
assert board['platform'] in platforms
assert board["platform"] in platforms

View File

@ -90,47 +90,43 @@ def test_check_cli_output(clirunner, check_dir):
errors, warnings, style = count_defects(result.output)
assert (result.exit_code != 0)
assert (errors + warnings + style == EXPECTED_DEFECTS)
assert result.exit_code != 0
assert errors + warnings + style == EXPECTED_DEFECTS
def test_check_json_output(clirunner, check_dir):
result = clirunner.invoke(
cmd_check,
["--project-dir", str(check_dir), "--json-output"])
cmd_check, ["--project-dir", str(check_dir), "--json-output"]
)
output = json.loads(result.stdout.strip())
assert isinstance(output, list)
assert (len(output[0].get("defects", [])) == EXPECTED_DEFECTS)
assert len(output[0].get("defects", [])) == EXPECTED_DEFECTS
def test_check_tool_defines_passed(clirunner, check_dir):
result = clirunner.invoke(
cmd_check,
["--project-dir", str(check_dir), "--verbose"])
result = clirunner.invoke(cmd_check, ["--project-dir", str(check_dir), "--verbose"])
output = result.output
assert ("PLATFORMIO=" in output)
assert ("__GNUC__" in output)
assert "PLATFORMIO=" in output
assert "__GNUC__" in output
def test_check_severity_threshold(clirunner, check_dir):
result = clirunner.invoke(
cmd_check,
["--project-dir", str(check_dir), "--severity=high"])
cmd_check, ["--project-dir", str(check_dir), "--severity=high"]
)
errors, warnings, style = count_defects(result.output)
assert (result.exit_code != 0)
assert (errors == EXPECTED_ERRORS)
assert (warnings == 0)
assert (style == 0)
assert result.exit_code != 0
assert errors == EXPECTED_ERRORS
assert warnings == 0
assert style == 0
def test_check_includes_passed(clirunner, check_dir):
result = clirunner.invoke(
cmd_check,
["--project-dir", str(check_dir), "--verbose"])
result = clirunner.invoke(cmd_check, ["--project-dir", str(check_dir), "--verbose"])
output = result.output
inc_count = 0
@ -139,13 +135,11 @@ def test_check_includes_passed(clirunner, check_dir):
inc_count = l.count("-I")
# at least 1 include path for default mode
assert (inc_count > 1)
assert inc_count > 1
def test_check_silent_mode(clirunner, check_dir):
result = clirunner.invoke(
cmd_check,
["--project-dir", str(check_dir), "--silent"])
result = clirunner.invoke(cmd_check, ["--project-dir", str(check_dir), "--silent"])
errors, warnings, style = count_defects(result.output)
@ -159,9 +153,8 @@ def test_check_filter_sources(clirunner, check_dir):
check_dir.mkdir(join("src", "app")).join("additional.cpp").write(TEST_CODE)
result = clirunner.invoke(
cmd_check,
["--project-dir",
str(check_dir), "--filter=-<*> +<src/app/>"])
cmd_check, ["--project-dir", str(check_dir), "--filter=-<*> +<src/app/>"]
)
errors, warnings, style = count_defects(result.output)
@ -187,8 +180,8 @@ def test_check_failed_if_no_source_files(clirunner, tmpdir):
def test_check_failed_if_bad_flag_passed(clirunner, check_dir):
result = clirunner.invoke(
cmd_check, ["--project-dir",
str(check_dir), '"--flags=--UNKNOWN"'])
cmd_check, ["--project-dir", str(check_dir), '"--flags=--UNKNOWN"']
)
errors, warnings, style = count_defects(result.output)
@ -200,7 +193,8 @@ def test_check_failed_if_bad_flag_passed(clirunner, check_dir):
def test_check_success_if_no_errors(clirunner, tmpdir):
tmpdir.join("platformio.ini").write(DEFAULT_CONFIG)
tmpdir.mkdir("src").join("main.c").write("""
tmpdir.mkdir("src").join("main.c").write(
"""
#include <stdlib.h>
void unused_functin(){
@ -211,7 +205,8 @@ void unused_functin(){
int main() {
}
""")
"""
)
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
@ -243,14 +238,17 @@ def test_check_individual_flags_passed(clirunner, tmpdir):
def test_check_cppcheck_misra_addon(clirunner, check_dir):
check_dir.join("misra.json").write("""
check_dir.join("misra.json").write(
"""
{
"script": "addons/misra.py",
"args": ["--rule-texts=rules.txt"]
}
""")
"""
)
check_dir.join("rules.txt").write("""
check_dir.join("rules.txt").write(
"""
Appendix A Summary of guidelines
Rule 3.1 Required
R3.1 text.
@ -272,12 +270,12 @@ Rule 21.3 Required
R21.3 Found MISRA defect
Rule 21.4
R21.4 text.
""")
"""
)
result = clirunner.invoke(
cmd_check,
["--project-dir",
str(check_dir), "--flags=--addon=misra.json"])
cmd_check, ["--project-dir", str(check_dir), "--flags=--addon=misra.json"]
)
assert result.exit_code != 0
assert "R21.3 Found MISRA defect" in result.output

View File

@ -25,37 +25,63 @@ def test_ci_empty(clirunner):
def test_ci_boards(clirunner, validate_cliresult):
result = clirunner.invoke(cmd_ci, [
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "uno", "-b",
"leonardo"
])
result = clirunner.invoke(
cmd_ci,
[
join("examples", "wiring-blink", "src", "main.cpp"),
"-b",
"uno",
"-b",
"leonardo",
],
)
validate_cliresult(result)
def test_ci_build_dir(clirunner, tmpdir_factory, validate_cliresult):
build_dir = str(tmpdir_factory.mktemp("ci_build_dir"))
result = clirunner.invoke(cmd_ci, [
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "uno",
"--build-dir", build_dir
])
result = clirunner.invoke(
cmd_ci,
[
join("examples", "wiring-blink", "src", "main.cpp"),
"-b",
"uno",
"--build-dir",
build_dir,
],
)
validate_cliresult(result)
assert not isfile(join(build_dir, "platformio.ini"))
def test_ci_keep_build_dir(clirunner, tmpdir_factory, validate_cliresult):
build_dir = str(tmpdir_factory.mktemp("ci_build_dir"))
result = clirunner.invoke(cmd_ci, [
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "uno",
"--build-dir", build_dir, "--keep-build-dir"
])
result = clirunner.invoke(
cmd_ci,
[
join("examples", "wiring-blink", "src", "main.cpp"),
"-b",
"uno",
"--build-dir",
build_dir,
"--keep-build-dir",
],
)
validate_cliresult(result)
assert isfile(join(build_dir, "platformio.ini"))
# 2nd attempt
result = clirunner.invoke(cmd_ci, [
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "metro",
"--build-dir", build_dir, "--keep-build-dir"
])
result = clirunner.invoke(
cmd_ci,
[
join("examples", "wiring-blink", "src", "main.cpp"),
"-b",
"metro",
"--build-dir",
build_dir,
"--keep-build-dir",
],
)
validate_cliresult(result)
assert "board: uno" in result.output
@ -64,10 +90,14 @@ def test_ci_keep_build_dir(clirunner, tmpdir_factory, validate_cliresult):
def test_ci_project_conf(clirunner, validate_cliresult):
project_dir = join("examples", "wiring-blink")
result = clirunner.invoke(cmd_ci, [
join(project_dir, "src", "main.cpp"), "--project-conf",
join(project_dir, "platformio.ini")
])
result = clirunner.invoke(
cmd_ci,
[
join(project_dir, "src", "main.cpp"),
"--project-conf",
join(project_dir, "platformio.ini"),
],
)
validate_cliresult(result)
assert "uno" in result.output
@ -75,12 +105,24 @@ def test_ci_project_conf(clirunner, validate_cliresult):
def test_ci_lib_and_board(clirunner, tmpdir_factory, validate_cliresult):
storage_dir = str(tmpdir_factory.mktemp("lib"))
result = clirunner.invoke(
cmd_lib, ["--storage-dir", storage_dir, "install", "1@2.3.2"])
cmd_lib, ["--storage-dir", storage_dir, "install", "1@2.3.2"]
)
validate_cliresult(result)
result = clirunner.invoke(cmd_ci, [
join(storage_dir, "OneWire_ID1", "examples", "DS2408_Switch",
"DS2408_Switch.pde"), "-l",
join(storage_dir, "OneWire_ID1"), "-b", "uno"
])
result = clirunner.invoke(
cmd_ci,
[
join(
storage_dir,
"OneWire_ID1",
"examples",
"DS2408_Switch",
"DS2408_Switch.pde",
),
"-l",
join(storage_dir, "OneWire_ID1"),
"-b",
"uno",
],
)
validate_cliresult(result)

View File

@ -25,8 +25,7 @@ from platformio.project.config import ProjectConfig
def validate_pioproject(pioproject_dir):
pioconf_path = join(pioproject_dir, "platformio.ini")
assert isfile(pioconf_path) and getsize(pioconf_path) > 0
assert isdir(join(pioproject_dir, "src")) and isdir(
join(pioproject_dir, "lib"))
assert isdir(join(pioproject_dir, "src")) and isdir(join(pioproject_dir, "lib"))
def test_init_default(clirunner, validate_cliresult):
@ -66,18 +65,17 @@ def test_init_ide_without_board(clirunner, tmpdir):
def test_init_ide_atom(clirunner, validate_cliresult, tmpdir):
with tmpdir.as_cwd():
result = clirunner.invoke(
cmd_init, ["--ide", "atom", "-b", "uno", "-b", "teensy31"])
cmd_init, ["--ide", "atom", "-b", "uno", "-b", "teensy31"]
)
validate_cliresult(result)
validate_pioproject(str(tmpdir))
assert all([
tmpdir.join(f).check()
for f in (".clang_complete", ".gcc-flags.json")
])
assert all(
[tmpdir.join(f).check() for f in (".clang_complete", ".gcc-flags.json")]
)
assert "arduinoavr" in tmpdir.join(".clang_complete").read()
# switch to NodeMCU
result = clirunner.invoke(cmd_init,
["--ide", "atom", "-b", "nodemcuv2"])
result = clirunner.invoke(cmd_init, ["--ide", "atom", "-b", "nodemcuv2"])
validate_cliresult(result)
validate_pioproject(str(tmpdir))
assert "arduinoespressif" in tmpdir.join(".clang_complete").read()
@ -110,46 +108,49 @@ def test_init_special_board(clirunner, validate_cliresult):
config = ProjectConfig(join(getcwd(), "platformio.ini"))
config.validate()
expected_result = dict(platform=str(boards[0]['platform']),
board="uno",
framework=[str(boards[0]['frameworks'][0])])
expected_result = dict(
platform=str(boards[0]["platform"]),
board="uno",
framework=[str(boards[0]["frameworks"][0])],
)
assert config.has_section("env:uno")
assert sorted(config.items(env="uno", as_dict=True).items()) == sorted(
expected_result.items())
expected_result.items()
)
def test_init_enable_auto_uploading(clirunner, validate_cliresult):
with clirunner.isolated_filesystem():
result = clirunner.invoke(
cmd_init, ["-b", "uno", "--project-option", "targets=upload"])
cmd_init, ["-b", "uno", "--project-option", "targets=upload"]
)
validate_cliresult(result)
validate_pioproject(getcwd())
config = ProjectConfig(join(getcwd(), "platformio.ini"))
config.validate()
expected_result = dict(targets=["upload"],
platform="atmelavr",
board="uno",
framework=["arduino"])
expected_result = dict(
targets=["upload"], platform="atmelavr", board="uno", framework=["arduino"]
)
assert config.has_section("env:uno")
assert sorted(config.items(env="uno", as_dict=True).items()) == sorted(
expected_result.items())
expected_result.items()
)
def test_init_custom_framework(clirunner, validate_cliresult):
with clirunner.isolated_filesystem():
result = clirunner.invoke(
cmd_init, ["-b", "teensy31", "--project-option", "framework=mbed"])
cmd_init, ["-b", "teensy31", "--project-option", "framework=mbed"]
)
validate_cliresult(result)
validate_pioproject(getcwd())
config = ProjectConfig(join(getcwd(), "platformio.ini"))
config.validate()
expected_result = dict(platform="teensy",
board="teensy31",
framework=["mbed"])
expected_result = dict(platform="teensy", board="teensy31", framework=["mbed"])
assert config.has_section("env:teensy31")
assert sorted(config.items(env="teensy31",
as_dict=True).items()) == sorted(
expected_result.items())
assert sorted(config.items(env="teensy31", as_dict=True).items()) == sorted(
expected_result.items()
)
def test_init_incorrect_board(clirunner):

View File

@ -28,19 +28,25 @@ def test_search(clirunner, validate_cliresult):
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
assert int(match.group(1)) > 2
result = clirunner.invoke(cmd_lib,
["search", "DHT22", "--platform=timsp430"])
result = clirunner.invoke(cmd_lib, ["search", "DHT22", "--platform=timsp430"])
validate_cliresult(result)
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
assert int(match.group(1)) > 1
def test_global_install_registry(clirunner, validate_cliresult,
isolated_pio_home):
result = clirunner.invoke(cmd_lib, [
"-g", "install", "64", "ArduinoJson@~5.10.0", "547@2.2.4",
"AsyncMqttClient@<=0.8.2", "999@77d4eb3f8a"
])
def test_global_install_registry(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(
cmd_lib,
[
"-g",
"install",
"64",
"ArduinoJson@~5.10.0",
"547@2.2.4",
"AsyncMqttClient@<=0.8.2",
"999@77d4eb3f8a",
],
)
validate_cliresult(result)
# install unknown library
@ -50,29 +56,40 @@ def test_global_install_registry(clirunner, validate_cliresult,
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"ArduinoJson_ID64", "ArduinoJson_ID64@5.10.1", "NeoPixelBus_ID547",
"AsyncMqttClient_ID346", "ESPAsyncTCP_ID305", "AsyncTCP_ID1826",
"RFcontrol_ID999"
"ArduinoJson_ID64",
"ArduinoJson_ID64@5.10.1",
"NeoPixelBus_ID547",
"AsyncMqttClient_ID346",
"ESPAsyncTCP_ID305",
"AsyncTCP_ID1826",
"RFcontrol_ID999",
]
assert set(items1) == set(items2)
def test_global_install_archive(clirunner, validate_cliresult,
isolated_pio_home):
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
"SomeLib=http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip"
])
def test_global_install_archive(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(
cmd_lib,
[
"-g",
"install",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
"SomeLib=http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
],
)
validate_cliresult(result)
# incorrect requirements
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@1.2.3"
])
result = clirunner.invoke(
cmd_lib,
[
"-g",
"install",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@1.2.3",
],
)
assert result.exit_code != 0
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
@ -80,8 +97,7 @@ def test_global_install_archive(clirunner, validate_cliresult,
assert set(items1) >= set(items2)
def test_global_install_repository(clirunner, validate_cliresult,
isolated_pio_home):
def test_global_install_repository(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(
cmd_lib,
[
@ -93,24 +109,28 @@ def test_global_install_repository(clirunner, validate_cliresult,
"https://gitlab.com/ivankravets/rs485-nodeproto.git",
"https://github.com/platformio/platformio-libmirror.git",
# "https://developer.mbed.org/users/simon/code/TextLCD/",
"knolleary/pubsubclient#bef58148582f956dfa772687db80c44e2279a163"
])
"knolleary/pubsubclient#bef58148582f956dfa772687db80c44e2279a163",
],
)
validate_cliresult(result)
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"PJON", "PJON@src-79de467ebe19de18287becff0a1fb42d",
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81", "rs485-nodeproto",
"platformio-libmirror", "PubSubClient"
"PJON",
"PJON@src-79de467ebe19de18287becff0a1fb42d",
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
"rs485-nodeproto",
"platformio-libmirror",
"PubSubClient",
]
assert set(items1) >= set(items2)
def test_install_duplicates(clirunner, validate_cliresult, without_internet):
# registry
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"http://dl.platformio.org/libraries/archives/0/9540.tar.gz"
])
result = clirunner.invoke(
cmd_lib,
["-g", "install", "http://dl.platformio.org/libraries/archives/0/9540.tar.gz"],
)
validate_cliresult(result)
assert "is already installed" in result.output
@ -120,18 +140,22 @@ def test_install_duplicates(clirunner, validate_cliresult, without_internet):
assert "is already installed" in result.output
# archive
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip"
])
result = clirunner.invoke(
cmd_lib,
[
"-g",
"install",
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
],
)
validate_cliresult(result)
assert "is already installed" in result.output
# repository
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"https://github.com/platformio/platformio-libmirror.git"
])
result = clirunner.invoke(
cmd_lib,
["-g", "install", "https://github.com/platformio/platformio-libmirror.git"],
)
validate_cliresult(result)
assert "is already installed" in result.output
@ -139,27 +163,48 @@ def test_install_duplicates(clirunner, validate_cliresult, without_internet):
def test_global_lib_list(clirunner, validate_cliresult):
result = clirunner.invoke(cmd_lib, ["-g", "list"])
validate_cliresult(result)
assert all([
n in result.output for n in
("Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
"Version: 5.10.1",
"Source: git+https://github.com/gioblu/PJON.git#3.0",
"Version: 1fb26fd")
])
assert all(
[
n in result.output
for n in (
"Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
"Version: 5.10.1",
"Source: git+https://github.com/gioblu/PJON.git#3.0",
"Version: 1fb26fd",
)
]
)
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
assert all([
n in result.output for n in
("__pkg_dir",
'"__src_url": "git+https://gitlab.com/ivankravets/rs485-nodeproto.git"',
'"version": "5.10.1"')
])
items1 = [i['name'] for i in json.loads(result.output)]
assert all(
[
n in result.output
for n in (
"__pkg_dir",
'"__src_url": "git+https://gitlab.com/ivankravets/rs485-nodeproto.git"',
'"version": "5.10.1"',
)
]
)
items1 = [i["name"] for i in json.loads(result.output)]
items2 = [
"ESP32WebServer", "ArduinoJson", "ArduinoJson", "ArduinoJson",
"ArduinoJson", "AsyncMqttClient", "AsyncTCP", "SomeLib", "ESPAsyncTCP",
"NeoPixelBus", "OneWire", "PJON", "PJON", "PubSubClient", "RFcontrol",
"platformio-libmirror", "rs485-nodeproto"
"ESP32WebServer",
"ArduinoJson",
"ArduinoJson",
"ArduinoJson",
"ArduinoJson",
"AsyncMqttClient",
"AsyncTCP",
"SomeLib",
"ESPAsyncTCP",
"NeoPixelBus",
"OneWire",
"PJON",
"PJON",
"PubSubClient",
"RFcontrol",
"platformio-libmirror",
"rs485-nodeproto",
]
assert sorted(items1) == sorted(items2)
@ -167,33 +212,37 @@ def test_global_lib_list(clirunner, validate_cliresult):
"{name}@{version}".format(**item) for item in json.loads(result.output)
]
versions2 = [
"ArduinoJson@5.8.2", "ArduinoJson@5.10.1", "AsyncMqttClient@0.8.2",
"NeoPixelBus@2.2.4", "PJON@07fe9aa", "PJON@1fb26fd",
"PubSubClient@bef5814", "RFcontrol@77d4eb3f8a"
"ArduinoJson@5.8.2",
"ArduinoJson@5.10.1",
"AsyncMqttClient@0.8.2",
"NeoPixelBus@2.2.4",
"PJON@07fe9aa",
"PJON@1fb26fd",
"PubSubClient@bef5814",
"RFcontrol@77d4eb3f8a",
]
assert set(versions1) >= set(versions2)
def test_global_lib_update_check(clirunner, validate_cliresult):
result = clirunner.invoke(
cmd_lib, ["-g", "update", "--only-check", "--json-output"])
cmd_lib, ["-g", "update", "--only-check", "--json-output"]
)
validate_cliresult(result)
output = json.loads(result.output)
assert set(["RFcontrol",
"NeoPixelBus"]) == set([l['name'] for l in output])
assert set(["RFcontrol", "NeoPixelBus"]) == set([l["name"] for l in output])
def test_global_lib_update(clirunner, validate_cliresult):
# update library using package directory
result = clirunner.invoke(
cmd_lib,
["-g", "update", "NeoPixelBus", "--only-check", "--json-output"])
cmd_lib, ["-g", "update", "NeoPixelBus", "--only-check", "--json-output"]
)
validate_cliresult(result)
oudated = json.loads(result.output)
assert len(oudated) == 1
assert "__pkg_dir" in oudated[0]
result = clirunner.invoke(cmd_lib,
["-g", "update", oudated[0]['__pkg_dir']])
result = clirunner.invoke(cmd_lib, ["-g", "update", oudated[0]["__pkg_dir"]])
validate_cliresult(result)
assert "Uninstalling NeoPixelBus @ 2.2.4" in result.output
@ -210,31 +259,43 @@ def test_global_lib_update(clirunner, validate_cliresult):
assert isinstance(result.exception, exception.UnknownPackage)
def test_global_lib_uninstall(clirunner, validate_cliresult,
isolated_pio_home):
def test_global_lib_uninstall(clirunner, validate_cliresult, isolated_pio_home):
# uninstall using package directory
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
validate_cliresult(result)
items = json.loads(result.output)
result = clirunner.invoke(cmd_lib,
["-g", "uninstall", items[5]['__pkg_dir']])
result = clirunner.invoke(cmd_lib, ["-g", "uninstall", items[5]["__pkg_dir"]])
validate_cliresult(result)
assert "Uninstalling AsyncTCP" in result.output
# uninstall the rest libraries
result = clirunner.invoke(cmd_lib, [
"-g", "uninstall", "1", "https://github.com/bblanchon/ArduinoJson.git",
"ArduinoJson@!=5.6.7", "RFcontrol"
])
result = clirunner.invoke(
cmd_lib,
[
"-g",
"uninstall",
"1",
"https://github.com/bblanchon/ArduinoJson.git",
"ArduinoJson@!=5.6.7",
"RFcontrol",
],
)
validate_cliresult(result)
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"rs485-nodeproto", "platformio-libmirror",
"PubSubClient", "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
"ESPAsyncTCP_ID305", "SomeLib_ID54", "NeoPixelBus_ID547", "PJON",
"AsyncMqttClient_ID346", "ArduinoJson_ID64",
"PJON@src-79de467ebe19de18287becff0a1fb42d", "ESP32WebServer"
"rs485-nodeproto",
"platformio-libmirror",
"PubSubClient",
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
"ESPAsyncTCP_ID305",
"SomeLib_ID54",
"NeoPixelBus_ID547",
"PJON",
"AsyncMqttClient_ID346",
"ArduinoJson_ID64",
"PJON@src-79de467ebe19de18287becff0a1fb42d",
"ESP32WebServer",
]
assert set(items1) == set(items2)
@ -247,8 +308,7 @@ def test_global_lib_uninstall(clirunner, validate_cliresult,
def test_lib_show(clirunner, validate_cliresult):
result = clirunner.invoke(cmd_lib, ["show", "64"])
validate_cliresult(result)
assert all(
[s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR")])
assert all([s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR")])
result = clirunner.invoke(cmd_lib, ["show", "OneWire", "--json-output"])
validate_cliresult(result)
assert "OneWire" in result.output
@ -264,14 +324,23 @@ def test_lib_builtin(clirunner, validate_cliresult):
def test_lib_stats(clirunner, validate_cliresult):
result = clirunner.invoke(cmd_lib, ["stats"])
validate_cliresult(result)
assert all([
s in result.output
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
])
assert all(
[
s in result.output
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
]
)
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
validate_cliresult(result)
assert set([
"dlweek", "added", "updated", "topkeywords", "dlmonth", "dlday",
"lastkeywords"
]) == set(json.loads(result.output).keys())
assert set(
[
"dlweek",
"added",
"updated",
"topkeywords",
"dlmonth",
"dlday",
"lastkeywords",
]
) == set(json.loads(result.output).keys())

View File

@ -19,13 +19,14 @@ from platformio.commands import platform as cli_platform
def test_search_json_output(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cli_platform.platform_search,
["arduino", "--json-output"])
result = clirunner.invoke(
cli_platform.platform_search, ["arduino", "--json-output"]
)
validate_cliresult(result)
search_result = json.loads(result.output)
assert isinstance(search_result, list)
assert search_result
platforms = [item['name'] for item in search_result]
platforms = [item["name"] for item in search_result]
assert "atmelsam" in platforms
@ -36,25 +37,22 @@ def test_search_raw_output(clirunner, validate_cliresult):
def test_install_unknown_version(clirunner):
result = clirunner.invoke(cli_platform.platform_install,
["atmelavr@99.99.99"])
result = clirunner.invoke(cli_platform.platform_install, ["atmelavr@99.99.99"])
assert result.exit_code != 0
assert isinstance(result.exception, exception.UndefinedPackageVersion)
def test_install_unknown_from_registry(clirunner):
result = clirunner.invoke(cli_platform.platform_install,
["unknown-platform"])
result = clirunner.invoke(cli_platform.platform_install, ["unknown-platform"])
assert result.exit_code != 0
assert isinstance(result.exception, exception.UnknownPackage)
def test_install_known_version(clirunner, validate_cliresult,
isolated_pio_home):
result = clirunner.invoke(cli_platform.platform_install, [
"atmelavr@1.2.0", "--skip-default-package", "--with-package",
"tool-avrdude"
])
def test_install_known_version(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(
cli_platform.platform_install,
["atmelavr@1.2.0", "--skip-default-package", "--with-package", "tool-avrdude"],
)
validate_cliresult(result)
assert "atmelavr @ 1.2.0" in result.output
assert "Installing tool-avrdude @" in result.output
@ -62,10 +60,13 @@ def test_install_known_version(clirunner, validate_cliresult,
def test_install_from_vcs(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cli_platform.platform_install, [
"https://github.com/platformio/"
"platform-espressif8266.git#feature/stage", "--skip-default-package"
])
result = clirunner.invoke(
cli_platform.platform_install,
[
"https://github.com/platformio/" "platform-espressif8266.git#feature/stage",
"--skip-default-package",
],
)
validate_cliresult(result)
assert "espressif8266" in result.output
assert len(isolated_pio_home.join("packages").listdir()) == 1
@ -77,24 +78,24 @@ def test_list_json_output(clirunner, validate_cliresult):
list_result = json.loads(result.output)
assert isinstance(list_result, list)
assert list_result
platforms = [item['name'] for item in list_result]
platforms = [item["name"] for item in list_result]
assert set(["atmelavr", "espressif8266"]) == set(platforms)
def test_list_raw_output(clirunner, validate_cliresult):
result = clirunner.invoke(cli_platform.platform_list)
validate_cliresult(result)
assert all(
[s in result.output for s in ("atmelavr", "espressif8266")])
assert all([s in result.output for s in ("atmelavr", "espressif8266")])
def test_update_check(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cli_platform.platform_update,
["--only-check", "--json-output"])
result = clirunner.invoke(
cli_platform.platform_update, ["--only-check", "--json-output"]
)
validate_cliresult(result)
output = json.loads(result.output)
assert len(output) == 1
assert output[0]['name'] == "atmelavr"
assert output[0]["name"] == "atmelavr"
assert len(isolated_pio_home.join("packages").listdir()) == 1
@ -107,7 +108,8 @@ def test_update_raw(clirunner, validate_cliresult, isolated_pio_home):
def test_uninstall(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cli_platform.platform_uninstall,
["atmelavr", "espressif8266"])
result = clirunner.invoke(
cli_platform.platform_uninstall, ["atmelavr", "espressif8266"]
)
validate_cliresult(result)
assert not isolated_pio_home.join("platforms").listdir()

View File

@ -20,11 +20,18 @@ from platformio import util
def test_local_env():
result = util.exec_command([
"platformio", "test", "-d",
join("examples", "unit-testing", "calculator"), "-e", "native"
])
if result['returncode'] != 1:
result = util.exec_command(
[
"platformio",
"test",
"-d",
join("examples", "unit-testing", "calculator"),
"-e",
"native",
]
)
if result["returncode"] != 1:
pytest.fail(result)
assert all([s in result['err']
for s in ("PASSED", "IGNORED", "FAILED")]), result['out']
assert all([s in result["err"] for s in ("PASSED", "IGNORED", "FAILED")]), result[
"out"
]

View File

@ -22,12 +22,9 @@ from platformio import util
@pytest.fixture(scope="session")
def validate_cliresult():
def decorator(result):
assert result.exit_code == 0, "{} => {}".format(
result.exception, result.output)
assert not result.exception, "{} => {}".format(result.exception,
result.output)
assert result.exit_code == 0, "{} => {}".format(result.exception, result.output)
assert not result.exception, "{} => {}".format(result.exception, result.output)
return decorator
@ -40,10 +37,10 @@ def clirunner():
@pytest.fixture(scope="module")
def isolated_pio_home(request, tmpdir_factory):
home_dir = tmpdir_factory.mktemp(".platformio")
os.environ['PLATFORMIO_CORE_DIR'] = str(home_dir)
os.environ["PLATFORMIO_CORE_DIR"] = str(home_dir)
def fin():
del os.environ['PLATFORMIO_CORE_DIR']
del os.environ["PLATFORMIO_CORE_DIR"]
request.addfinalizer(fin)
return home_dir

View File

@ -16,27 +16,34 @@ from platformio.commands.run import cli as cmd_run
def test_build_flags(clirunner, validate_cliresult, tmpdir):
build_flags = [("-D TEST_INT=13", "-DTEST_INT=13"),
("-DTEST_SINGLE_MACRO", "-DTEST_SINGLE_MACRO"),
('-DTEST_STR_SPACE="Andrew Smith"',
'"-DTEST_STR_SPACE=Andrew Smith"')]
build_flags = [
("-D TEST_INT=13", "-DTEST_INT=13"),
("-DTEST_SINGLE_MACRO", "-DTEST_SINGLE_MACRO"),
('-DTEST_STR_SPACE="Andrew Smith"', '"-DTEST_STR_SPACE=Andrew Smith"'),
]
tmpdir.join("platformio.ini").write("""
tmpdir.join("platformio.ini").write(
"""
[env:native]
platform = native
extra_scripts = extra.py
build_flags =
; -DCOMMENTED_MACRO
%s ; inline comment
""" % " ".join([f[0] for f in build_flags]))
"""
% " ".join([f[0] for f in build_flags])
)
tmpdir.join("extra.py").write("""
tmpdir.join("extra.py").write(
"""
Import("projenv")
projenv.Append(CPPDEFINES="POST_SCRIPT_MACRO")
""")
"""
)
tmpdir.mkdir("src").join("main.cpp").write("""
tmpdir.mkdir("src").join("main.cpp").write(
"""
#if !defined(TEST_INT) || TEST_INT != 13
#error "TEST_INT"
#endif
@ -55,26 +62,28 @@ projenv.Append(CPPDEFINES="POST_SCRIPT_MACRO")
int main() {
}
""")
"""
)
result = clirunner.invoke(
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
result = clirunner.invoke(cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
validate_cliresult(result)
build_output = result.output[result.output.find(
"Scanning dependencies..."):]
build_output = result.output[result.output.find("Scanning dependencies...") :]
for flag in build_flags:
assert flag[1] in build_output, flag
def test_build_unflags(clirunner, validate_cliresult, tmpdir):
tmpdir.join("platformio.ini").write("""
tmpdir.join("platformio.ini").write(
"""
[env:native]
platform = native
build_unflags = -DTMP_MACRO1=45 -I. -DNON_EXISTING_MACRO -lunknownLib -Os
extra_scripts = pre:extra.py
""")
"""
)
tmpdir.join("extra.py").write("""
tmpdir.join("extra.py").write(
"""
Import("env")
env.Append(CPPPATH="%s")
env.Append(CPPDEFINES="TMP_MACRO1")
@ -82,22 +91,24 @@ env.Append(CPPDEFINES=["TMP_MACRO2"])
env.Append(CPPDEFINES=("TMP_MACRO3", 13))
env.Append(CCFLAGS=["-Os"])
env.Append(LIBS=["unknownLib"])
""" % str(tmpdir))
"""
% str(tmpdir)
)
tmpdir.mkdir("src").join("main.c").write("""
tmpdir.mkdir("src").join("main.c").write(
"""
#ifdef TMP_MACRO1
#error "TMP_MACRO1 should be removed"
#endif
int main() {
}
""")
"""
)
result = clirunner.invoke(
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
result = clirunner.invoke(cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
validate_cliresult(result)
build_output = result.output[result.output.find(
"Scanning dependencies..."):]
build_output = result.output[result.output.find("Scanning dependencies...") :]
assert "-DTMP_MACRO1" not in build_output
assert "-Os" not in build_output
assert str(tmpdir) not in build_output

View File

@ -35,12 +35,12 @@ def pytest_generate_tests(metafunc):
# dev/platforms
for manifest in PlatformManager().get_installed():
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
p = PlatformFactory.newPlatform(manifest["__pkg_dir"])
ignore_conds = [
not p.is_embedded(),
p.name == "ststm8",
# issue with "version `CXXABI_1.3.9' not found (required by sdcc)"
"linux" in util.get_systype() and p.name == "intel_mcs51"
"linux" in util.get_systype() and p.name == "intel_mcs51",
]
if any(ignore_conds):
continue
@ -61,10 +61,9 @@ def pytest_generate_tests(metafunc):
candidates[group] = []
candidates[group].append(root)
project_dirs.extend([
random.choice(examples) for examples in candidates.values()
if examples
])
project_dirs.extend(
[random.choice(examples) for examples in candidates.values() if examples]
)
metafunc.parametrize("pioproject_dir", sorted(project_dirs))
@ -76,12 +75,11 @@ def test_run(pioproject_dir):
if isdir(build_dir):
util.rmtree_(build_dir)
env_names = ProjectConfig(join(pioproject_dir,
"platformio.ini")).envs()
env_names = ProjectConfig(join(pioproject_dir, "platformio.ini")).envs()
result = util.exec_command(
["platformio", "run", "-e",
random.choice(env_names)])
if result['returncode'] != 0:
["platformio", "run", "-e", random.choice(env_names)]
)
if result["returncode"] != 0:
pytest.fail(str(result))
assert isdir(build_dir)

View File

@ -37,14 +37,10 @@ def test_example(clirunner, validate_cliresult, piotest_dir):
def test_warning_line(clirunner, validate_cliresult):
result = clirunner.invoke(cmd_ci,
[join(INOTEST_DIR, "basic"), "-b", "uno"])
result = clirunner.invoke(cmd_ci, [join(INOTEST_DIR, "basic"), "-b", "uno"])
validate_cliresult(result)
assert ('basic.ino:16:14: warning: #warning "Line number is 16"' in
result.output)
assert ('basic.ino:46:2: warning: #warning "Line number is 46"' in
result.output)
result = clirunner.invoke(
cmd_ci, [join(INOTEST_DIR, "strmultilines"), "-b", "uno"])
assert 'basic.ino:16:14: warning: #warning "Line number is 16"' in result.output
assert 'basic.ino:46:2: warning: #warning "Line number is 46"' in result.output
result = clirunner.invoke(cmd_ci, [join(INOTEST_DIR, "strmultilines"), "-b", "uno"])
validate_cliresult(result)
assert ('main.ino:75:2: warning: #warning "Line 75"' in result.output)
assert 'main.ino:75:2: warning: #warning "Line 75"' in result.output

View File

@ -23,7 +23,6 @@ from platformio.managers.platform import PlatformManager
def test_check_pio_upgrade(clirunner, isolated_pio_home, validate_cliresult):
def _patch_pio_version(version):
maintenance.__version__ = version
cmd_upgrade.VERSION = version.split(".", 3)
@ -54,8 +53,7 @@ def test_check_pio_upgrade(clirunner, isolated_pio_home, validate_cliresult):
def test_check_lib_updates(clirunner, isolated_pio_home, validate_cliresult):
# install obsolete library
result = clirunner.invoke(cli_pio,
["lib", "-g", "install", "ArduinoJson@<5.7"])
result = clirunner.invoke(cli_pio, ["lib", "-g", "install", "ArduinoJson@<5.7"])
validate_cliresult(result)
# reset check time
@ -65,15 +63,14 @@ def test_check_lib_updates(clirunner, isolated_pio_home, validate_cliresult):
result = clirunner.invoke(cli_pio, ["lib", "-g", "list"])
validate_cliresult(result)
assert ("There are the new updates for libraries (ArduinoJson)" in
result.output)
assert "There are the new updates for libraries (ArduinoJson)" in result.output
def test_check_and_update_libraries(clirunner, isolated_pio_home,
validate_cliresult):
def test_check_and_update_libraries(clirunner, isolated_pio_home, validate_cliresult):
# enable library auto-updates
result = clirunner.invoke(
cli_pio, ["settings", "set", "auto_update_libraries", "Yes"])
cli_pio, ["settings", "set", "auto_update_libraries", "Yes"]
)
# reset check time
interval = int(app.get_setting("check_libraries_interval")) * 3600 * 24
@ -89,27 +86,23 @@ def test_check_and_update_libraries(clirunner, isolated_pio_home,
# initiate auto-updating
result = clirunner.invoke(cli_pio, ["lib", "-g", "show", "ArduinoJson"])
validate_cliresult(result)
assert ("There are the new updates for libraries (ArduinoJson)" in
result.output)
assert "There are the new updates for libraries (ArduinoJson)" in result.output
assert "Please wait while updating libraries" in result.output
assert re.search(r"Updating ArduinoJson\s+@ 5.6.7\s+\[[\d\.]+\]",
result.output)
assert re.search(r"Updating ArduinoJson\s+@ 5.6.7\s+\[[\d\.]+\]", result.output)
# check updated version
result = clirunner.invoke(cli_pio, ["lib", "-g", "list", "--json-output"])
validate_cliresult(result)
assert prev_data[0]['version'] != json.loads(result.output)[0]['version']
assert prev_data[0]["version"] != json.loads(result.output)[0]["version"]
def test_check_platform_updates(clirunner, isolated_pio_home,
validate_cliresult):
def test_check_platform_updates(clirunner, isolated_pio_home, validate_cliresult):
# install obsolete platform
result = clirunner.invoke(cli_pio, ["platform", "install", "native"])
validate_cliresult(result)
manifest_path = isolated_pio_home.join("platforms", "native",
"platform.json")
manifest_path = isolated_pio_home.join("platforms", "native", "platform.json")
manifest = json.loads(manifest_path.read())
manifest['version'] = "0.0.0"
manifest["version"] = "0.0.0"
manifest_path.write(json.dumps(manifest))
# reset cached manifests
PlatformManager().cache_reset()
@ -124,11 +117,11 @@ def test_check_platform_updates(clirunner, isolated_pio_home,
assert "There are the new updates for platforms (native)" in result.output
def test_check_and_update_platforms(clirunner, isolated_pio_home,
validate_cliresult):
def test_check_and_update_platforms(clirunner, isolated_pio_home, validate_cliresult):
# enable library auto-updates
result = clirunner.invoke(
cli_pio, ["settings", "set", "auto_update_platforms", "Yes"])
cli_pio, ["settings", "set", "auto_update_platforms", "Yes"]
)
# reset check time
interval = int(app.get_setting("check_platforms_interval")) * 3600 * 24
@ -151,4 +144,4 @@ def test_check_and_update_platforms(clirunner, isolated_pio_home,
# check updated version
result = clirunner.invoke(cli_pio, ["platform", "list", "--json-output"])
validate_cliresult(result)
assert prev_data[0]['version'] != json.loads(result.output)[0]['version']
assert prev_data[0]["version"] != json.loads(result.output)[0]["version"]

View File

@ -29,126 +29,134 @@ def test_pkg_input_parser():
["id=13@~1.2.3", ("id=13", "~1.2.3", None)],
[
get_project_core_dir(),
(".platformio", None, "file://" + get_project_core_dir())
(".platformio", None, "file://" + get_project_core_dir()),
],
[
"LocalName=" + get_project_core_dir(),
("LocalName", None, "file://" + get_project_core_dir())
("LocalName", None, "file://" + get_project_core_dir()),
],
[
"LocalName=%s@>2.3.0" % get_project_core_dir(),
("LocalName", ">2.3.0", "file://" + get_project_core_dir())
("LocalName", ">2.3.0", "file://" + get_project_core_dir()),
],
[
"https://github.com/user/package.git",
("package", None, "git+https://github.com/user/package.git")
("package", None, "git+https://github.com/user/package.git"),
],
[
"MyPackage=https://gitlab.com/user/package.git",
("MyPackage", None, "git+https://gitlab.com/user/package.git")
("MyPackage", None, "git+https://gitlab.com/user/package.git"),
],
[
"MyPackage=https://gitlab.com/user/package.git@3.2.1,!=2",
("MyPackage", "3.2.1,!=2",
"git+https://gitlab.com/user/package.git")
("MyPackage", "3.2.1,!=2", "git+https://gitlab.com/user/package.git"),
],
[
"https://somedomain.com/path/LibraryName-1.2.3.zip",
("LibraryName-1.2.3", None,
"https://somedomain.com/path/LibraryName-1.2.3.zip")
(
"LibraryName-1.2.3",
None,
"https://somedomain.com/path/LibraryName-1.2.3.zip",
),
],
[
"https://github.com/user/package/archive/branch.zip",
("branch", None,
"https://github.com/user/package/archive/branch.zip")
("branch", None, "https://github.com/user/package/archive/branch.zip"),
],
[
"https://github.com/user/package/archive/branch.zip@~1.2.3",
("branch", "~1.2.3",
"https://github.com/user/package/archive/branch.zip")
("branch", "~1.2.3", "https://github.com/user/package/archive/branch.zip"),
],
[
"https://github.com/user/package/archive/branch.tar.gz",
("branch.tar", None,
"https://github.com/user/package/archive/branch.tar.gz")
(
"branch.tar",
None,
"https://github.com/user/package/archive/branch.tar.gz",
),
],
[
"https://github.com/user/package/archive/branch.tar.gz@!=5",
("branch.tar", "!=5",
"https://github.com/user/package/archive/branch.tar.gz")
(
"branch.tar",
"!=5",
"https://github.com/user/package/archive/branch.tar.gz",
),
],
[
"https://developer.mbed.org/users/user/code/package/",
("package", None,
"hg+https://developer.mbed.org/users/user/code/package/")
("package", None, "hg+https://developer.mbed.org/users/user/code/package/"),
],
[
"https://os.mbed.com/users/user/code/package/",
("package", None,
"hg+https://os.mbed.com/users/user/code/package/")
("package", None, "hg+https://os.mbed.com/users/user/code/package/"),
],
[
"https://github.com/user/package#v1.2.3",
("package", None, "git+https://github.com/user/package#v1.2.3")
("package", None, "git+https://github.com/user/package#v1.2.3"),
],
[
"https://github.com/user/package.git#branch",
("package", None, "git+https://github.com/user/package.git#branch")
("package", None, "git+https://github.com/user/package.git#branch"),
],
[
"PkgName=https://github.com/user/package.git#a13d344fg56",
("PkgName", None,
"git+https://github.com/user/package.git#a13d344fg56")
],
[
"user/package",
("package", None, "git+https://github.com/user/package")
("PkgName", None, "git+https://github.com/user/package.git#a13d344fg56"),
],
["user/package", ("package", None, "git+https://github.com/user/package")],
[
"PkgName=user/package",
("PkgName", None, "git+https://github.com/user/package")
("PkgName", None, "git+https://github.com/user/package"),
],
[
"PkgName=user/package#master",
("PkgName", None, "git+https://github.com/user/package#master")
("PkgName", None, "git+https://github.com/user/package#master"),
],
[
"git+https://github.com/user/package",
("package", None, "git+https://github.com/user/package")
("package", None, "git+https://github.com/user/package"),
],
[
"hg+https://example.com/user/package",
("package", None, "hg+https://example.com/user/package")
("package", None, "hg+https://example.com/user/package"),
],
[
"git@github.com:user/package.git",
("package", None, "git+git@github.com:user/package.git")
("package", None, "git+git@github.com:user/package.git"),
],
[
"git@github.com:user/package.git#v1.2.0",
("package", None, "git+git@github.com:user/package.git#v1.2.0")
("package", None, "git+git@github.com:user/package.git#v1.2.0"),
],
[
"LocalName=git@github.com:user/package.git#v1.2.0@~1.2.0",
("LocalName", "~1.2.0",
"git+git@github.com:user/package.git#v1.2.0")
("LocalName", "~1.2.0", "git+git@github.com:user/package.git#v1.2.0"),
],
[
"git+ssh://git@gitlab.private-server.com/user/package#1.2.0",
("package", None,
"git+ssh://git@gitlab.private-server.com/user/package#1.2.0")
(
"package",
None,
"git+ssh://git@gitlab.private-server.com/user/package#1.2.0",
),
],
[
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0",
("package", None,
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0")
(
"package",
None,
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0",
),
],
[
"LocalName=git+ssh://user@gitlab.private-server.com:1234"
"/package#1.2.0@!=13",
("LocalName", "!=13",
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0")
]
(
"LocalName",
"!=13",
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0",
),
],
]
for params, result in items:
if isinstance(params, tuple):
@ -165,62 +173,55 @@ def test_install_packages(isolated_pio_home, tmpdir):
dict(id=1, name="name_1", version="1.2"),
dict(id=1, name="name_1", version="1.0.0"),
dict(name="name_2", version="1.0.0"),
dict(name="name_2",
version="2.0.0",
__src_url="git+https://github.com"),
dict(name="name_2",
version="3.0.0",
__src_url="git+https://github2.com"),
dict(name="name_2",
version="4.0.0",
__src_url="git+https://github2.com")
dict(name="name_2", version="2.0.0", __src_url="git+https://github.com"),
dict(name="name_2", version="3.0.0", __src_url="git+https://github2.com"),
dict(name="name_2", version="4.0.0", __src_url="git+https://github2.com"),
]
pm = PackageManager(join(get_project_core_dir(), "packages"))
for package in packages:
tmp_dir = tmpdir.mkdir("tmp-package")
tmp_dir.join("package.json").write(json.dumps(package))
pm._install_from_url(package['name'], "file://%s" % str(tmp_dir))
pm._install_from_url(package["name"], "file://%s" % str(tmp_dir))
tmp_dir.remove(rec=1)
assert len(pm.get_installed()) == len(packages) - 1
pkg_dirnames = [
'name_1_ID1', 'name_1_ID1@1.0.0', 'name_1_ID1@1.2', 'name_1_ID1@2.0.0',
'name_1_ID1@shasum', 'name_2',
'name_2@src-177cbce1f0705580d17790fda1cc2ef5',
'name_2@src-f863b537ab00f4c7b5011fc44b120e1f'
"name_1_ID1",
"name_1_ID1@1.0.0",
"name_1_ID1@1.2",
"name_1_ID1@2.0.0",
"name_1_ID1@shasum",
"name_2",
"name_2@src-177cbce1f0705580d17790fda1cc2ef5",
"name_2@src-f863b537ab00f4c7b5011fc44b120e1f",
]
assert set([
p.basename for p in isolated_pio_home.join("packages").listdir()
]) == set(pkg_dirnames)
assert set(
[p.basename for p in isolated_pio_home.join("packages").listdir()]
) == set(pkg_dirnames)
def test_get_package():
tests = [
[("unknown", ), None],
[("1", ), None],
[("id=1", "shasum"),
dict(id=1, name="name_1", version="shasum")],
[("id=1", "*"),
dict(id=1, name="name_1", version="2.1.0")],
[("id=1", "^1"),
dict(id=1, name="name_1", version="1.2")],
[("id=1", "^1"),
dict(id=1, name="name_1", version="1.2")],
[("name_1", "<2"),
dict(id=1, name="name_1", version="1.2")],
[("unknown",), None],
[("1",), None],
[("id=1", "shasum"), dict(id=1, name="name_1", version="shasum")],
[("id=1", "*"), dict(id=1, name="name_1", version="2.1.0")],
[("id=1", "^1"), dict(id=1, name="name_1", version="1.2")],
[("id=1", "^1"), dict(id=1, name="name_1", version="1.2")],
[("name_1", "<2"), dict(id=1, name="name_1", version="1.2")],
[("name_1", ">2"), None],
[("name_1", "2-0-0"), None],
[("name_2", ), dict(name="name_2", version="4.0.0")],
[("url_has_higher_priority", None, "git+https://github.com"),
dict(name="name_2",
version="2.0.0",
__src_url="git+https://github.com")],
[("name_2", None, "git+https://github.com"),
dict(name="name_2",
version="2.0.0",
__src_url="git+https://github.com")],
[("name_2",), dict(name="name_2", version="4.0.0")],
[
("url_has_higher_priority", None, "git+https://github.com"),
dict(name="name_2", version="2.0.0", __src_url="git+https://github.com"),
],
[
("name_2", None, "git+https://github.com"),
dict(name="name_2", version="2.0.0", __src_url="git+https://github.com"),
],
]
pm = PackageManager(join(get_project_core_dir(), "packages"))

View File

@ -20,8 +20,8 @@ from platformio import exception, util
def test_platformio_cli():
result = util.exec_command(["pio", "--help"])
assert result['returncode'] == 0
assert "Usage: pio [OPTIONS] COMMAND [ARGS]..." in result['out']
assert result["returncode"] == 0
assert "Usage: pio [OPTIONS] COMMAND [ARGS]..." in result["out"]
def test_ping_internet_ips():
@ -38,5 +38,5 @@ def test_api_cache(monkeypatch, isolated_pio_home):
api_kwargs = {"url": "/stats", "cache_valid": "10s"}
result = util.get_api_result(**api_kwargs)
assert result and "boards" in result
monkeypatch.setattr(util, '_internet_on', lambda: False)
monkeypatch.setattr(util, "_internet_on", lambda: False)
assert util.get_api_result(**api_kwargs) == result

View File

@ -18,14 +18,14 @@ import requests
def validate_response(r):
assert r.status_code == 200, r.url
assert int(r.headers['Content-Length']) > 0, r.url
assert r.headers['Content-Type'] in ("application/gzip",
"application/octet-stream")
assert int(r.headers["Content-Length"]) > 0, r.url
assert r.headers["Content-Type"] in ("application/gzip", "application/octet-stream")
def test_packages():
pkgs_manifest = requests.get(
"https://dl.bintray.com/platformio/dl-packages/manifest.json").json()
"https://dl.bintray.com/platformio/dl-packages/manifest.json"
).json()
assert isinstance(pkgs_manifest, dict)
items = []
for _, variants in pkgs_manifest.items():
@ -33,12 +33,12 @@ def test_packages():
items.append(item)
for item in items:
assert item['url'].endswith(".tar.gz"), item
assert item["url"].endswith(".tar.gz"), item
r = requests.head(item['url'], allow_redirects=True)
r = requests.head(item["url"], allow_redirects=True)
validate_response(r)
if "X-Checksum-Sha1" not in r.headers:
return pytest.skip("X-Checksum-Sha1 is not provided")
assert item['sha1'] == r.headers.get("X-Checksum-Sha1")[0:40], item
assert item["sha1"] == r.headers.get("X-Checksum-Sha1")[0:40], item

View File

@ -117,8 +117,16 @@ def test_sections(config):
config.getraw("unknown_section", "unknown_option")
assert config.sections() == [
"platformio", "env", "strict_ldf", "monitor_custom", "strict_settings",
"custom", "env:base", "env:test_extends", "env:extra_1", "env:extra_2"
"platformio",
"env",
"strict_ldf",
"monitor_custom",
"strict_settings",
"custom",
"env:base",
"env:test_extends",
"env:extra_1",
"env:extra_2",
]
@ -129,11 +137,20 @@ def test_envs(config):
def test_options(config):
assert config.options(env="base") == [
"build_flags", "targets", "monitor_speed", "lib_deps", "lib_ignore"
"build_flags",
"targets",
"monitor_speed",
"lib_deps",
"lib_ignore",
]
assert config.options(env="test_extends") == [
"extends", "build_flags", "lib_ldf_mode", "lib_compat_mode",
"monitor_speed", "lib_deps", "lib_ignore"
"extends",
"build_flags",
"lib_ldf_mode",
"lib_compat_mode",
"monitor_speed",
"lib_deps",
"lib_ignore",
]
@ -154,15 +171,22 @@ def test_sysenv_options(config):
os.environ["__PIO_TEST_CNF_EXTRA_FLAGS"] = "-L /usr/local/lib"
assert config.get("custom", "extra_flags") == "-L /usr/local/lib"
assert config.get("env:base", "build_flags") == [
"-D DEBUG=1 -L /usr/local/lib", "-DSYSENVDEPS1 -DSYSENVDEPS2"
"-D DEBUG=1 -L /usr/local/lib",
"-DSYSENVDEPS1 -DSYSENVDEPS2",
]
assert config.get("env:base", "upload_port") == "/dev/sysenv/port"
assert config.get("env:extra_2", "upload_port") == "/dev/extra_2/port"
# env var as option
assert config.options(env="test_extends") == [
"extends", "build_flags", "lib_ldf_mode", "lib_compat_mode",
"monitor_speed", "lib_deps", "lib_ignore", "upload_port"
"extends",
"build_flags",
"lib_ldf_mode",
"lib_compat_mode",
"monitor_speed",
"lib_deps",
"lib_ignore",
"upload_port",
]
# sysenv
@ -208,7 +232,7 @@ def test_items(config):
("debug_flags", "-D DEBUG=1"),
("lib_flags", "-lc -lm"),
("extra_flags", None),
("lib_ignore", "LibIgnoreCustom")
("lib_ignore", "LibIgnoreCustom"),
] # yapf: disable
assert config.items(env="base") == [
("build_flags", ["-D DEBUG=1"]),
@ -228,7 +252,7 @@ def test_items(config):
("lib_ignore", ["LibIgnoreCustom", "Lib3"]),
("upload_port", "/dev/extra_2/port"),
("monitor_speed", "115200"),
("lib_deps", ["Lib1", "Lib2"])
("lib_deps", ["Lib1", "Lib2"]),
] # yapf: disable
assert config.items(env="test_extends") == [
("extends", ["strict_settings"]),
@ -237,5 +261,5 @@ def test_items(config):
("lib_compat_mode", "strict"),
("monitor_speed", "9600"),
("lib_deps", ["Lib1", "Lib2"]),
("lib_ignore", ["LibIgnoreCustom"])
("lib_ignore", ["LibIgnoreCustom"]),
] # yapf: disable

View File

@ -20,7 +20,7 @@ passenv = *
usedevelop = True
deps =
isort
yapf
black
pylint
pytest
pytest-xdist