Merge branch 'release/v5.2.4'

This commit is contained in:
Ivan Kravets
2021-12-15 12:19:59 +02:00
41 changed files with 398 additions and 232 deletions

View File

@ -8,6 +8,21 @@ PlatformIO Core 5
**A professional collaborative platform for embedded development** **A professional collaborative platform for embedded development**
5.2.4 (2021-12-15)
~~~~~~~~~~~~~~~~~~
- Added support for a new ``headers`` field in `library.json <https://docs.platformio.org/en/latest/librarymanager/config.html>`__ (declare a list of header files that can be included in a project source files using ``#include <...>`` directive)
- Improved tab completion support for Bash, ZSH, and Fish shells (`issue #4114 <https://github.com/platformio/platformio-core/issues/4114>`_)
- Improved support for projects located on a network share (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_, `issue #3926 <https://github.com/platformio/platformio-core/issues/3926>`_, `issue #4099 <https://github.com/platformio/platformio-core/issues/4099>`_)
- Improved PIO Remote setup on credit-card sized computers (Raspberry Pi, BeagleBon, etc) (`issue #3865 <https://github.com/platformio/platformio-core/issues/3865>`_)
- Upgraded build engine to the SCons 4.3 (`release notes <https://github.com/SCons/scons/blob/rel_4.3.0/CHANGES.txt>`__)
- Fixed an issue with the CLion project generator when a macro contains a space (`issue #4102 <https://github.com/platformio/platformio-core/issues/4102>`_)
- Fixed an issue with the NetBeans project generator when the path to PlatformIO contains a space (`issue #4096 <https://github.com/platformio/platformio-core/issues/4096>`_)
- Fixed an issue when the system environment variable does not override a project configuration option (`issue #4125 <https://github.com/platformio/platformio-core/issues/4125>`_)
- Fixed an issue when referencing ``*_dir`` option from a custom project configuration environment (`issue #4110 <https://github.com/platformio/platformio-core/issues/4110>`_)
- Fixed an issue with the CLion template that generated a broken CMake file if user's home directory contained an unescaped backslash (`issue #4071 <https://github.com/platformio/platformio-core/issues/4071>`_)
- Fixed an issue with wrong detecting Windows architecture when Python 32bit is used (`issue #4134 <https://github.com/platformio/platformio-core/issues/4134>`_)
5.2.3 (2021-11-05) 5.2.3 (2021-11-05)
~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~

View File

@ -1,5 +1,5 @@
PlatformIO PlatformIO Core
========== ===============
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg .. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
:target: https://docs.platformio.org/page/core/index.html :target: https://docs.platformio.org/page/core/index.html

2
docs

Submodule docs updated: fed771ae8d...ba3fca21ea

View File

@ -14,7 +14,7 @@
import sys import sys
VERSION = (5, 2, 3) VERSION = (5, 2, 4)
__version__ = ".".join([str(s) for s in VERSION]) __version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio" __title__ = "platformio"
@ -50,7 +50,7 @@ __core_packages__ = {
"contrib-piohome": "~3.4.0", "contrib-piohome": "~3.4.0",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor), "contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-unity": "~1.20500.0", "tool-unity": "~1.20500.0",
"tool-scons": "~4.40200.0", "tool-scons": "~4.40300.0",
"tool-cppcheck": "~1.260.0", "tool-cppcheck": "~1.260.0",
"tool-clangtidy": "~1.120001.0", "tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.14.0", "tool-pvs-studio": "~7.14.0",

View File

@ -24,13 +24,6 @@ from platformio import __version__, exception
from platformio.commands import PlatformioCLI from platformio.commands import PlatformioCLI
from platformio.compat import IS_CYGWIN, ensure_python3 from platformio.compat import IS_CYGWIN, ensure_python3
try:
import click_completion # pylint: disable=import-error
click_completion.init()
except: # pylint: disable=bare-except
pass
@click.command( @click.command(
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"]) cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
@ -74,7 +67,6 @@ try:
def process_result(ctx, result, *_, **__): def process_result(ctx, result, *_, **__):
_process_result(ctx, result) _process_result(ctx, result)
except (AttributeError, TypeError): # legacy support for CLick > 8.0.1 except (AttributeError, TypeError): # legacy support for CLick > 8.0.1
@cli.resultcallback() @cli.resultcallback()

View File

@ -31,7 +31,7 @@ from platformio.project.helpers import get_default_projects_dir
def projects_dir_validate(projects_dir): def projects_dir_validate(projects_dir):
assert os.path.isdir(projects_dir) assert os.path.isdir(projects_dir)
return os.path.realpath(projects_dir) return os.path.abspath(projects_dir)
DEFAULT_SETTINGS = { DEFAULT_SETTINGS = {

View File

@ -115,10 +115,10 @@ env.Replace(
PROJECT_LIBDEPS_DIR=config.get("platformio", "libdeps_dir"), PROJECT_LIBDEPS_DIR=config.get("platformio", "libdeps_dir"),
PROJECT_INCLUDE_DIR=config.get("platformio", "include_dir"), PROJECT_INCLUDE_DIR=config.get("platformio", "include_dir"),
PROJECT_SRC_DIR=config.get("platformio", "src_dir"), PROJECT_SRC_DIR=config.get("platformio", "src_dir"),
PROJECTSRC_DIR=config.get("platformio", "src_dir"), # legacy for dev/platform PROJECTSRC_DIR="$PROJECT_SRC_DIR", # legacy for dev/platform
PROJECT_TEST_DIR=config.get("platformio", "test_dir"), PROJECT_TEST_DIR=config.get("platformio", "test_dir"),
PROJECT_DATA_DIR=config.get("platformio", "data_dir"), PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
PROJECTDATA_DIR=config.get("platformio", "data_dir"), # legacy for dev/platform PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"), PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"), BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
LIBSOURCE_DIRS=[ LIBSOURCE_DIRS=[
@ -128,15 +128,20 @@ env.Replace(
], ],
) )
if ( if int(ARGUMENTS.get("ISATTY", 0)):
compat.IS_WINDOWS # pylint: disable=protected-access
and sys.version_info >= (3, 8) click._compat.isatty = lambda stream: True
and env["PROJECT_DIR"].startswith("\\\\")
): if compat.IS_WINDOWS and sys.version_info >= (3, 8) and os.getcwd().startswith("\\\\"):
click.secho("!!! WARNING !!!\t\t" * 3, fg="red")
click.secho( click.secho(
"There is a known issue with Python 3.8+ and mapped network drives on " "Your project is located on a mapped network drive but the "
"Windows.\nSee a solution at:\n" "current command-line shell does not support the UNC paths.",
"https://github.com/platformio/platformio-core/issues/3417", fg="yellow",
)
click.secho(
"Please move your project to a physical drive or check this workaround: "
"https://bit.ly/3kuU5mP\n",
fg="yellow", fg="yellow",
) )
@ -145,10 +150,6 @@ if env.subst("$BUILD_CACHE_DIR"):
os.makedirs(env.subst("$BUILD_CACHE_DIR")) os.makedirs(env.subst("$BUILD_CACHE_DIR"))
env.CacheDir("$BUILD_CACHE_DIR") env.CacheDir("$BUILD_CACHE_DIR")
if int(ARGUMENTS.get("ISATTY", 0)):
# pylint: disable=protected-access
click._compat.isatty = lambda stream: True
is_clean_all = "cleanall" in COMMAND_LINE_TARGETS is_clean_all = "cleanall" in COMMAND_LINE_TARGETS
if env.GetOption("clean") or is_clean_all: if env.GetOption("clean") or is_clean_all:
env.PioClean(is_clean_all) env.PioClean(is_clean_all)

View File

@ -32,14 +32,14 @@ def _dump_includes(env):
env.subst("$PROJECT_SRC_DIR"), env.subst("$PROJECT_SRC_DIR"),
] ]
includes["build"].extend( includes["build"].extend(
[os.path.realpath(env.subst(item)) for item in env.get("CPPPATH", [])] [os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
) )
# installed libs # installed libs
includes["compatlib"] = [] includes["compatlib"] = []
for lb in env.GetLibBuilders(): for lb in env.GetLibBuilders():
includes["compatlib"].extend( includes["compatlib"].extend(
[os.path.realpath(inc) for inc in lb.get_include_dirs()] [os.path.abspath(inc) for inc in lb.get_include_dirs()]
) )
# includes from toolchains # includes from toolchains
@ -56,9 +56,7 @@ def _dump_includes(env):
os.path.join(toolchain_dir, "*", "include*"), os.path.join(toolchain_dir, "*", "include*"),
] ]
for g in toolchain_incglobs: for g in toolchain_incglobs:
includes["toolchain"].extend( includes["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)])
[os.path.realpath(inc) for inc in glob.glob(g)]
)
# include Unity framework if there are tests in project # include Unity framework if there are tests in project
includes["unity"] = [] includes["unity"] = []
@ -132,7 +130,7 @@ def _dump_defines(env):
def _get_svd_path(env): def _get_svd_path(env):
svd_path = env.GetProjectOption("debug_svd_path") svd_path = env.GetProjectOption("debug_svd_path")
if svd_path: if svd_path:
return os.path.realpath(svd_path) return os.path.abspath(svd_path)
if "BOARD" not in env: if "BOARD" not in env:
return None return None
@ -147,7 +145,7 @@ def _get_svd_path(env):
# default file from ./platform/misc/svd folder # default file from ./platform/misc/svd folder
p = env.PioPlatform() p = env.PioPlatform()
if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)): if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)):
return os.path.realpath(os.path.join(p.get_dir(), "misc", "svd", svd_path)) return os.path.abspath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
return None return None

View File

@ -125,7 +125,7 @@ class LibBuilderBase(object):
def __init__(self, env, path, manifest=None, verbose=False): def __init__(self, env, path, manifest=None, verbose=False):
self.env = env.Clone() self.env = env.Clone()
self.envorigin = env.Clone() self.envorigin = env.Clone()
self.path = os.path.realpath(env.subst(path)) self.path = os.path.abspath(env.subst(path))
self.verbose = verbose self.verbose = verbose
try: try:
@ -290,7 +290,7 @@ class LibBuilderBase(object):
if self.extra_script: if self.extra_script:
self.env.SConscriptChdir(1) self.env.SConscriptChdir(1)
self.env.SConscript( self.env.SConscript(
os.path.realpath(self.extra_script), os.path.abspath(self.extra_script),
exports={"env": self.env, "pio_lib_builder": self}, exports={"env": self.env, "pio_lib_builder": self},
) )
self.env.ProcessUnFlags(self.build_unflags) self.env.ProcessUnFlags(self.build_unflags)
@ -750,14 +750,14 @@ class PlatformIOLibBuilder(LibBuilderBase):
def include_dir(self): def include_dir(self):
if "includeDir" in self._manifest.get("build", {}): if "includeDir" in self._manifest.get("build", {}):
with fs.cd(self.path): with fs.cd(self.path):
return os.path.realpath(self._manifest.get("build").get("includeDir")) return os.path.abspath(self._manifest.get("build").get("includeDir"))
return LibBuilderBase.include_dir.fget(self) # pylint: disable=no-member return LibBuilderBase.include_dir.fget(self) # pylint: disable=no-member
@property @property
def src_dir(self): def src_dir(self):
if "srcDir" in self._manifest.get("build", {}): if "srcDir" in self._manifest.get("build", {}):
with fs.cd(self.path): with fs.cd(self.path):
return os.path.realpath(self._manifest.get("build").get("srcDir")) return os.path.abspath(self._manifest.get("build").get("srcDir"))
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
@property @property
@ -1024,7 +1024,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
found_incompat = False found_incompat = False
for storage_dir in env.GetLibSourceDirs(): for storage_dir in env.GetLibSourceDirs():
storage_dir = os.path.realpath(storage_dir) storage_dir = os.path.abspath(storage_dir)
if not os.path.isdir(storage_dir): if not os.path.isdir(storage_dir):
continue continue
for item in sorted(os.listdir(storage_dir)): for item in sorted(os.listdir(storage_dir)):

View File

@ -376,7 +376,7 @@ def GetExtraScripts(env, scope):
if not items: if not items:
return items return items
with fs.cd(env.subst("$PROJECT_DIR")): with fs.cd(env.subst("$PROJECT_DIR")):
return [os.path.realpath(item) for item in items] return [os.path.abspath(item) for item in items]
def exists(_): def exists(_):

View File

@ -207,12 +207,12 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
for k in ("CPPPATH", "LIBPATH"): for k in ("CPPPATH", "LIBPATH"):
for i, p in enumerate(result.get(k, [])): for i, p in enumerate(result.get(k, [])):
if os.path.isdir(p): if os.path.isdir(p):
result[k][i] = os.path.realpath(p) result[k][i] = os.path.abspath(p)
# fix relative path for "-include" # fix relative path for "-include"
for i, f in enumerate(result.get("CCFLAGS", [])): for i, f in enumerate(result.get("CCFLAGS", [])):
if isinstance(f, tuple) and f[0] == "-include": if isinstance(f, tuple) and f[0] == "-include":
result["CCFLAGS"][i] = (f[0], env.File(os.path.realpath(f[1].get_path()))) result["CCFLAGS"][i] = (f[0], env.File(os.path.abspath(f[1].get_path())))
return result return result

View File

@ -117,7 +117,7 @@ class RegistryClient(HTTPClient):
if page: if page:
params["page"] = int(page) params["page"] = int(page)
return self.fetch_json_data( return self.fetch_json_data(
"get", "/v3/packages", params=params, cache_valid="1h" "get", "/v3/search", params=params, cache_valid="1h"
) )
def get_package(self, type_, owner, name, version=None): def get_package(self, type_, owner, name, version=None):

View File

@ -86,7 +86,7 @@ class DefectItem(object):
"severity": self.SEVERITY_LABELS[self.severity], "severity": self.SEVERITY_LABELS[self.severity],
"category": self.category, "category": self.category,
"message": self.message, "message": self.message,
"file": os.path.realpath(self.file), "file": os.path.abspath(self.file),
"line": self.line, "line": self.line,
"column": self.column, "column": self.column,
"callstack": self.callstack, "callstack": self.callstack,

View File

@ -201,11 +201,11 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
def _add_file(path): def _add_file(path):
if path.endswith(header_extensions): if path.endswith(header_extensions):
result["headers"].append(os.path.realpath(path)) result["headers"].append(os.path.abspath(path))
elif path.endswith(c_extension): elif path.endswith(c_extension):
result["c"].append(os.path.realpath(path)) result["c"].append(os.path.abspath(path))
elif path.endswith(cpp_extensions): elif path.endswith(cpp_extensions):
result["c++"].append(os.path.realpath(path)) result["c++"].append(os.path.abspath(path))
for pattern in patterns: for pattern in patterns:
for item in glob.glob(pattern, recursive=True): for item in glob.glob(pattern, recursive=True):

View File

@ -33,7 +33,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
for i, p in enumerate(value): for i, p in enumerate(value):
if p.startswith("~"): if p.startswith("~"):
value[i] = fs.expanduser(p) value[i] = fs.expanduser(p)
value[i] = os.path.realpath(value[i]) value[i] = os.path.abspath(value[i])
if not glob.glob(value[i], recursive=True): if not glob.glob(value[i], recursive=True):
invalid_path = p invalid_path = p
break break
@ -162,7 +162,7 @@ def _exclude_contents(dst_dir, patterns):
for p in patterns: for p in patterns:
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True) contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
for path in contents: for path in contents:
path = os.path.realpath(path) path = os.path.abspath(path)
if os.path.isdir(path): if os.path.isdir(path):
fs.rmtree(path) fs.rmtree(path)
elif os.path.isfile(path): elif os.path.isfile(path):

View File

@ -93,7 +93,7 @@ class ProjectRPC:
# skip non existing folders and resolve full path # skip non existing folders and resolve full path
for key in ("envLibdepsDirs", "libExtraDirs"): for key in ("envLibdepsDirs", "libExtraDirs"):
data[key] = [ data[key] = [
fs.expanduser(d) if d.startswith("~") else os.path.realpath(d) fs.expanduser(d) if d.startswith("~") else os.path.abspath(d)
for d in data[key] for d in data[key]
if os.path.isdir(d) if os.path.isdir(d)
] ]

View File

@ -92,6 +92,6 @@ class WebSocketJSONRPCServer(WebSocketEndpoint):
async def _handle_rpc(self, websocket, data): async def _handle_rpc(self, websocket, data):
# pylint: disable=no-member # pylint: disable=no-member
response = await self.factory.manager.get_response_for_payload(data) response = await self.factory.manager.get_response_for_payload(data)
if response.error: if response.error and response.error.data:
click.secho("Error: %s" % response.error.data, fg="red", err=True) click.secho("Error: %s" % response.error.data, fg="red", err=True)
await websocket.send_text(self.factory.manager.serialize(response.body)) await websocket.send_text(self.factory.manager.serialize(response.body))

View File

@ -37,7 +37,7 @@ from platformio.project.exception import NotPlatformIOProjectError
@click.pass_context @click.pass_context
def cli(ctx, agent): def cli(ctx, agent):
ctx.obj = agent ctx.obj = agent
inject_contrib_pysite(verify_openssl=True) inject_contrib_pysite()
@cli.group("agent", short_help="Start a new agent or list active") @cli.group("agent", short_help="Start a new agent or list active")

View File

@ -14,7 +14,6 @@
import json import json
import platform import platform
import subprocess
import sys import sys
import click import click
@ -22,6 +21,7 @@ from tabulate import tabulate
from platformio import __version__, compat, fs, proc, util from platformio import __version__, compat, fs, proc, util
from platformio.commands.system.completion import ( from platformio.commands.system.completion import (
ShellType,
get_completion_install_path, get_completion_install_path,
install_completion_code, install_completion_code,
uninstall_completion_code, uninstall_completion_code,
@ -150,23 +150,11 @@ def system_prune(force, dry_run, cache, core_packages, platform_packages):
@cli.group("completion", short_help="Shell completion support") @cli.group("completion", short_help="Shell completion support")
def completion(): def completion():
# pylint: disable=import-error,import-outside-toplevel pass
try:
import click_completion # pylint: disable=unused-import,unused-variable
except ImportError:
click.echo("Installing dependent packages...")
subprocess.check_call(
[proc.get_pythonexe_path(), "-m", "pip", "install", "click-completion"],
)
@completion.command("install", short_help="Install shell completion files/code") @completion.command("install", short_help="Install shell completion files/code")
@click.option( @click.argument("shell", type=click.Choice([t.value for t in ShellType]))
"--shell",
default=None,
type=click.Choice(["fish", "bash", "zsh", "powershell", "auto"]),
help="The shell type, default=auto",
)
@click.option( @click.option(
"--path", "--path",
type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True), type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True),
@ -174,26 +162,18 @@ def completion():
"The standard installation path is used by default.", "The standard installation path is used by default.",
) )
def completion_install(shell, path): def completion_install(shell, path):
shell = ShellType(shell)
import click_completion # pylint: disable=import-outside-toplevel,import-error
shell = shell or click_completion.get_auto_shell()
path = path or get_completion_install_path(shell) path = path or get_completion_install_path(shell)
install_completion_code(shell, path) install_completion_code(shell, path)
click.echo( click.echo(
"PlatformIO CLI completion has been installed for %s shell to %s \n" "PlatformIO CLI completion has been installed for %s shell to %s \n"
"Please restart a current shell session." "Please restart a current shell session."
% (click.style(shell, fg="cyan"), click.style(path, fg="blue")) % (click.style(shell.name, fg="cyan"), click.style(path, fg="blue"))
) )
@completion.command("uninstall", short_help="Uninstall shell completion files/code") @completion.command("uninstall", short_help="Uninstall shell completion files/code")
@click.option( @click.argument("shell", type=click.Choice([t.value for t in ShellType]))
"--shell",
default=None,
type=click.Choice(["fish", "bash", "zsh", "powershell", "auto"]),
help="The shell type, default=auto",
)
@click.option( @click.option(
"--path", "--path",
type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True), type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True),
@ -201,14 +181,11 @@ def completion_install(shell, path):
"The standard installation path is used by default.", "The standard installation path is used by default.",
) )
def completion_uninstall(shell, path): def completion_uninstall(shell, path):
shell = ShellType(shell)
import click_completion # pylint: disable=import-outside-toplevel,import-error
shell = shell or click_completion.get_auto_shell()
path = path or get_completion_install_path(shell) path = path or get_completion_install_path(shell)
uninstall_completion_code(shell, path) uninstall_completion_code(shell, path)
click.echo( click.echo(
"PlatformIO CLI completion has been uninstalled for %s shell from %s \n" "PlatformIO CLI completion has been uninstalled for %s shell from %s \n"
"Please restart a current shell session." "Please restart a current shell session."
% (click.style(shell, fg="cyan"), click.style(path, fg="blue")) % (click.style(shell.name, fg="cyan"), click.style(path, fg="blue"))
) )

View File

@ -13,61 +13,75 @@
# limitations under the License. # limitations under the License.
import os import os
import subprocess from enum import Enum
import click import click
from platformio.compat import IS_MACOS
class ShellType(Enum):
FISH = "fish"
ZSH = "zsh"
BASH = "bash"
def get_completion_install_path(shell): def get_completion_install_path(shell):
home_dir = os.path.expanduser("~") home_dir = os.path.expanduser("~")
prog_name = click.get_current_context().find_root().info_name prog_name = click.get_current_context().find_root().info_name
if shell == "fish": if shell == ShellType.FISH:
return os.path.join( return os.path.join(
home_dir, ".config", "fish", "completions", "%s.fish" % prog_name home_dir, ".config", "fish", "completions", "%s.fish" % prog_name
) )
if shell == "bash": if shell == ShellType.ZSH:
return os.path.join(home_dir, ".bash_completion")
if shell == "zsh":
return os.path.join(home_dir, ".zshrc") return os.path.join(home_dir, ".zshrc")
if shell == "powershell": if shell == ShellType.BASH:
return subprocess.check_output( return os.path.join(home_dir, ".bash_completion")
["powershell", "-NoProfile", "echo $profile"] raise click.ClickException("%s is not supported." % shell)
).strip()
def get_completion_code(shell):
if shell == ShellType.FISH:
return "eval (env _PIO_COMPLETE=fish_source pio)"
if shell == ShellType.ZSH:
code = "autoload -Uz compinit\ncompinit\n" if IS_MACOS else ""
return code + 'eval "$(_PIO_COMPLETE=zsh_source pio)"'
if shell == ShellType.BASH:
return 'eval "$(_PIO_COMPLETE=bash_source pio)"'
raise click.ClickException("%s is not supported." % shell) raise click.ClickException("%s is not supported." % shell)
def is_completion_code_installed(shell, path): def is_completion_code_installed(shell, path):
if shell == "fish" or not os.path.exists(path): if shell == ShellType.FISH or not os.path.exists(path):
return False return False
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path, encoding="utf8") as fp: with open(path, encoding="utf8") as fp:
return click_completion.get_code(shell=shell) in fp.read() return get_completion_code(shell) in fp.read()
def install_completion_code(shell, path): def install_completion_code(shell, path):
import click_completion # pylint: disable=import-error,import-outside-toplevel
if is_completion_code_installed(shell, path): if is_completion_code_installed(shell, path):
return None return None
append = shell != ShellType.FISH
return click_completion.install(shell=shell, path=path, append=shell != "fish") with open(path, mode="a" if append else "w", encoding="utf8") as fp:
if append:
fp.write("\n\n# Begin: PlatformIO Core completion support\n")
fp.write(get_completion_code(shell))
if append:
fp.write("\n# End: PlatformIO Core completion support\n\n")
return True
def uninstall_completion_code(shell, path): def uninstall_completion_code(shell, path):
if not os.path.exists(path): if not os.path.exists(path):
return True return True
if shell == "fish": if shell == ShellType.FISH:
os.remove(path) os.remove(path)
return True return True
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path, "r+", encoding="utf8") as fp: with open(path, "r+", encoding="utf8") as fp:
contents = fp.read() contents = fp.read()
fp.seek(0) fp.seek(0)
fp.truncate() fp.truncate()
fp.write(contents.replace(click_completion.get_code(shell=shell), "")) fp.write(contents.replace(get_completion_code(shell), ""))
return True return True

View File

@ -61,7 +61,7 @@ class GDBClientProcess(DebugClientProcess):
def _get_data_dir(gdb_path): def _get_data_dir(gdb_path):
if "msp430" in gdb_path: if "msp430" in gdb_path:
return None return None
gdb_data_dir = os.path.realpath( gdb_data_dir = os.path.abspath(
os.path.join(os.path.dirname(gdb_path), "..", "share", "gdb") os.path.join(os.path.dirname(gdb_path), "..", "share", "gdb")
) )
return gdb_data_dir if os.path.isdir(gdb_data_dir) else None return gdb_data_dir if os.path.isdir(gdb_data_dir) else None

View File

@ -24,7 +24,7 @@ import sys
import click import click
from platformio import exception from platformio import exception, proc
from platformio.compat import IS_WINDOWS from platformio.compat import IS_WINDOWS
@ -41,7 +41,7 @@ class cd(object):
def get_source_dir(): def get_source_dir():
curpath = os.path.realpath(__file__) curpath = os.path.abspath(__file__)
if not os.path.isfile(curpath): if not os.path.isfile(curpath):
for p in sys.path: for p in sys.path:
if os.path.isfile(os.path.join(p, __file__)): if os.path.isfile(os.path.join(p, __file__)):
@ -119,7 +119,7 @@ def ensure_udev_rules():
if not any(os.path.isfile(p) for p in installed_rules): if not any(os.path.isfile(p) for p in installed_rules):
raise exception.MissedUdevRules raise exception.MissedUdevRules
origin_path = os.path.realpath( origin_path = os.path.abspath(
os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules") os.path.join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules")
) )
if not os.path.isfile(origin_path): if not os.path.isfile(origin_path):
@ -181,6 +181,25 @@ def to_unix_path(path):
return re.sub(r"[\\]+", "/", path) return re.sub(r"[\\]+", "/", path)
def normalize_path(path):
path = os.path.abspath(path)
if not IS_WINDOWS or not path.startswith("\\\\"):
return path
try:
result = proc.exec_command(["net", "use"])
if result["returncode"] != 0:
return path
share_re = re.compile(r"\s([A-Z]\:)\s+(\\\\[^\s]+)")
for line in result["out"].split("\n"):
share = share_re.search(line)
if not share:
continue
path = path.replace(share.group(2), share.group(1))
except OSError:
pass
return path
def expanduser(path): def expanduser(path):
""" """
Be compatible with Python 3.8, on Windows skip HOME and check for USERPROFILE Be compatible with Python 3.8, on Windows skip HOME and check for USERPROFILE

View File

@ -48,7 +48,7 @@ class LockFile(object):
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY): def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
self.timeout = timeout self.timeout = timeout
self.delay = delay self.delay = delay
self._lock_path = os.path.realpath(path) + ".lock" self._lock_path = os.path.abspath(path) + ".lock"
self._fp = None self._fp = None
def _lock(self): def _lock(self):

View File

@ -252,9 +252,9 @@ class BasePackageManager( # pylint: disable=too-many-public-methods
# external "URL" mismatch # external "URL" mismatch
if spec.external: if spec.external:
# local folder mismatch # local folder mismatch
if os.path.realpath(spec.url) == os.path.realpath(pkg.path) or ( if os.path.abspath(spec.url) == os.path.abspath(pkg.path) or (
spec.url.startswith("file://") spec.url.startswith("file://")
and os.path.realpath(pkg.path) == os.path.realpath(spec.url[7:]) and os.path.abspath(pkg.path) == os.path.abspath(spec.url[7:])
): ):
return True return True
if spec.url != pkg.metadata.spec.url: if spec.url != pkg.metadata.spec.url:

View File

@ -20,6 +20,7 @@ import sys
from datetime import date from datetime import date
from platformio import __core_packages__, exception, fs, util from platformio import __core_packages__, exception, fs, util
from platformio.exception import UserSideException
from platformio.package.exception import UnknownPackageError from platformio.package.exception import UnknownPackageError
from platformio.package.manager.tool import ToolPackageManager from platformio.package.manager.tool import ToolPackageManager
from platformio.package.meta import PackageItem, PackageSpec from platformio.package.meta import PackageItem, PackageSpec
@ -101,7 +102,7 @@ def remove_unnecessary_core_packages(dry_run=False):
return candidates return candidates
def inject_contrib_pysite(verify_openssl=False): def inject_contrib_pysite():
# pylint: disable=import-outside-toplevel # pylint: disable=import-outside-toplevel
from site import addsitedir from site import addsitedir
@ -119,12 +120,10 @@ def inject_contrib_pysite(verify_openssl=False):
addsitedir(contrib_pysite_dir) addsitedir(contrib_pysite_dir)
sys.path.insert(0, contrib_pysite_dir) sys.path.insert(0, contrib_pysite_dir)
if not verify_openssl:
return True
try: try:
# pylint: disable=import-error,unused-import,unused-variable # pylint: disable=import-error,unused-import,unused-variable
from OpenSSL import SSL from OpenSSL import SSL
except: # pylint: disable=bare-except except: # pylint: disable=bare-except
build_contrib_pysite_package(contrib_pysite_dir) build_contrib_pysite_package(contrib_pysite_dir)
@ -152,8 +151,15 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
] ]
if "linux" in systype: if "linux" in systype:
args.extend(["--no-binary", ":all:"]) args.extend(["--no-binary", ":all:"])
for dep in get_contrib_pysite_deps(): try:
subprocess.check_call(args + [dep]) subprocess.run(args + get_contrib_pysite_deps(), check=True)
except subprocess.CalledProcessError as exc:
if "linux" in systype:
raise UserSideException(
"\n\nPlease ensure that the next packages are installed:\n\n"
"sudo apt install python3-dev libffi-dev libssl-dev\n"
)
raise exc
# build manifests # build manifests
with open( with open(
@ -206,25 +212,18 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
def get_contrib_pysite_deps(): def get_contrib_pysite_deps():
sys_type = util.get_systype()
py_version = "%d%d" % (sys.version_info.major, sys.version_info.minor)
twisted_version = "20.3.0" twisted_version = "20.3.0"
result = [ result = [
# twisted[tls], see setup.py for %twisted_version%
"twisted == %s" % twisted_version, "twisted == %s" % twisted_version,
# pyopenssl depends on it, use RUST-less version
"cryptography >= 3.3, < 35.0.0",
"pyopenssl >= 16.0.0, <= 21.0.0",
"service_identity >= 18.1.0, <= 21.1.0",
] ]
# twisted[tls], see setup.py for %twisted_version% sys_type = util.get_systype()
result.extend( py_version = "%d%d" % (sys.version_info.major, sys.version_info.minor)
[
# pyopenssl depends on it, use RUST-less version
"cryptography >= 3.3, < 35.0.0",
"pyopenssl >= 16.0.0",
"service_identity >= 18.1.0",
"idna >= 0.6, != 2.3",
]
)
if "windows" in sys_type: if "windows" in sys_type:
result.append("pypiwin32 == 223") result.append("pypiwin32 == 223")
# workaround for twisted wheels # workaround for twisted wheels

View File

@ -167,7 +167,7 @@ class BaseManifestParser(object):
return self._data return self._data
@staticmethod @staticmethod
def str_to_list(value, sep=",", lowercase=True): def str_to_list(value, sep=",", lowercase=False, unique=False):
if isinstance(value, string_types): if isinstance(value, string_types):
value = value.split(sep) value = value.split(sep)
assert isinstance(value, list) assert isinstance(value, list)
@ -178,6 +178,8 @@ class BaseManifestParser(object):
continue continue
if lowercase: if lowercase:
item = item.lower() item = item.lower()
if unique and item in result:
continue
result.append(item) result.append(item)
return result return result
@ -323,12 +325,16 @@ class LibraryJsonManifestParser(BaseManifestParser):
# normalize Union[str, list] fields # normalize Union[str, list] fields
for k in ("keywords", "platforms", "frameworks"): for k in ("keywords", "platforms", "frameworks"):
if k in data: if k in data:
data[k] = self.str_to_list(data[k], sep=",") data[k] = self.str_to_list(
data[k], sep=",", lowercase=True, unique=True
)
if "headers" in data:
data["headers"] = self.str_to_list(data["headers"], sep=",", unique=True)
if "authors" in data: if "authors" in data:
data["authors"] = self._parse_authors(data["authors"]) data["authors"] = self._parse_authors(data["authors"])
if "platforms" in data: if "platforms" in data:
data["platforms"] = self._parse_platforms(data["platforms"]) or None data["platforms"] = self._fix_platforms(data["platforms"]) or None
if "export" in data: if "export" in data:
data["export"] = self._parse_export(data["export"]) data["export"] = self._parse_export(data["export"])
if "dependencies" in data: if "dependencies" in data:
@ -361,15 +367,11 @@ class LibraryJsonManifestParser(BaseManifestParser):
return [self.cleanup_author(author) for author in raw] return [self.cleanup_author(author) for author in raw]
@staticmethod @staticmethod
def _parse_platforms(raw): def _fix_platforms(items):
assert isinstance(raw, list) assert isinstance(items, list)
result = [] if "espressif" in items:
# renamed platforms items[items.index("espressif")] = "espressif8266"
for item in raw: return items
if item == "espressif":
item = "espressif8266"
result.append(item)
return result
@staticmethod @staticmethod
def _parse_export(raw): def _parse_export(raw):
@ -430,7 +432,9 @@ class ModuleJsonManifestParser(BaseManifestParser):
if "dependencies" in data: if "dependencies" in data:
data["dependencies"] = self._parse_dependencies(data["dependencies"]) data["dependencies"] = self._parse_dependencies(data["dependencies"])
if "keywords" in data: if "keywords" in data:
data["keywords"] = self.str_to_list(data["keywords"], sep=",") data["keywords"] = self.str_to_list(
data["keywords"], sep=",", lowercase=True, unique=True
)
return data return data
def _parse_authors(self, raw): def _parse_authors(self, raw):
@ -475,11 +479,13 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
homepage=homepage, homepage=homepage,
repository=repository or None, repository=repository or None,
description=self._parse_description(data), description=self._parse_description(data),
platforms=self._parse_platforms(data) or ["*"], platforms=self._parse_platforms(data) or None,
keywords=self._parse_keywords(data), keywords=self._parse_keywords(data) or None,
export=self._parse_export(), export=self._parse_export(),
) )
) )
if "includes" in data:
data["headers"] = self.str_to_list(data["includes"], sep=",", unique=True)
if "author" in data: if "author" in data:
data["authors"] = self._parse_authors(data) data["authors"] = self._parse_authors(data)
for key in ("author", "maintainer"): for key in ("author", "maintainer"):
@ -511,22 +517,24 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
for k in ("sentence", "paragraph"): for k in ("sentence", "paragraph"):
if k in properties and properties[k] not in lines: if k in properties and properties[k] not in lines:
lines.append(properties[k]) lines.append(properties[k])
if len(lines) == 2 and not lines[0].endswith("."): if len(lines) == 2:
lines[0] += "." if not lines[0].endswith("."):
lines[0] += "."
if len(lines[0]) + len(lines[1]) >= 1000:
del lines[1]
return " ".join(lines) return " ".join(lines)
@staticmethod def _parse_keywords(self, properties):
def _parse_keywords(properties): return self.str_to_list(
result = [] re.split(
for item in re.split(r"[\s/]+", properties.get("category", "uncategorized")): r"[\s/]+",
item = item.strip() properties.get("category", ""),
if not item: ),
continue lowercase=True,
result.append(item.lower()) unique=True,
return result )
@staticmethod def _parse_platforms(self, properties):
def _parse_platforms(properties):
result = [] result = []
platforms_map = { platforms_map = {
"avr": "atmelavr", "avr": "atmelavr",
@ -547,7 +555,7 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
return ["*"] return ["*"]
if arch in platforms_map: if arch in platforms_map:
result.append(platforms_map[arch]) result.append(platforms_map[arch])
return result return self.str_to_list(result, lowercase=True, unique=True)
def _parse_authors(self, properties): def _parse_authors(self, properties):
if "author" not in properties: if "author" not in properties:
@ -643,24 +651,31 @@ class PlatformJsonManifestParser(BaseManifestParser):
def parse(self, contents): def parse(self, contents):
data = json.loads(contents) data = json.loads(contents)
if "keywords" in data: if "keywords" in data:
data["keywords"] = self.str_to_list(data["keywords"], sep=",") data["keywords"] = self.str_to_list(
data["keywords"], sep=",", lowercase=True, unique=True
)
if "frameworks" in data: if "frameworks" in data:
data["frameworks"] = self._parse_frameworks(data["frameworks"]) data["frameworks"] = (
self.str_to_list(
list(data["frameworks"].keys()), lowercase=True, unique=True
)
if isinstance(data["frameworks"], dict)
else None
)
if "packages" in data: if "packages" in data:
data["dependencies"] = self._parse_dependencies(data["packages"]) data["dependencies"] = self._parse_dependencies(data["packages"])
return data return data
@staticmethod
def _parse_frameworks(raw):
if not isinstance(raw, dict):
return None
return [name.lower() for name in raw.keys()]
@staticmethod @staticmethod
def _parse_dependencies(raw): def _parse_dependencies(raw):
return [ result = []
dict(name=name, version=opts.get("version")) for name, opts in raw.items() for name, opts in raw.items():
] item = {"name": name}
for k in ("owner", "version"):
if k in opts:
item[k] = opts[k]
result.append(item)
return result
class PackageJsonManifestParser(BaseManifestParser): class PackageJsonManifestParser(BaseManifestParser):
@ -669,22 +684,21 @@ class PackageJsonManifestParser(BaseManifestParser):
def parse(self, contents): def parse(self, contents):
data = json.loads(contents) data = json.loads(contents)
if "keywords" in data: if "keywords" in data:
data["keywords"] = self.str_to_list(data["keywords"], sep=",") data["keywords"] = self.str_to_list(
data["keywords"], sep=",", lowercase=True, unique=True
)
data = self._parse_system(data) data = self._parse_system(data)
data = self._parse_homepage(data) data = self._parse_homepage(data)
data = self._parse_repository(data) data = self._parse_repository(data)
return data return data
@staticmethod def _parse_system(self, data):
def _parse_system(data):
if "system" not in data: if "system" not in data:
return data return data
if data["system"] in ("*", ["*"], "all"): if data["system"] in ("*", ["*"], "all"):
del data["system"] del data["system"]
return data return data
if not isinstance(data["system"], list): data["system"] = self.str_to_list(data["system"], lowercase=True, unique=True)
data["system"] = [data["system"]]
data["system"] = [s.strip().lower() for s in data["system"]]
return data return data
@staticmethod @staticmethod

View File

@ -33,7 +33,6 @@ if MARSHMALLOW_2:
class CompatSchema(Schema): class CompatSchema(Schema):
pass pass
else: else:
class CompatSchema(Schema): class CompatSchema(Schema):
@ -209,6 +208,13 @@ class ManifestSchema(BaseSchema):
] ]
) )
) )
headers = StrictListField(
fields.Str(
validate=[
validate.Length(min=1, max=255),
]
)
)
# platform.json specific # platform.json specific
title = fields.Str(validate=validate.Length(min=1, max=100)) title = fields.Str(validate=validate.Length(min=1, max=100))
@ -253,7 +259,7 @@ class ManifestSchema(BaseSchema):
@staticmethod @staticmethod
@memoized(expire="1h") @memoized(expire="1h")
def load_spdx_licenses(): def load_spdx_licenses():
version = "3.14" version = "3.15"
spdx_data_url = ( spdx_data_url = (
"https://raw.githubusercontent.com/spdx/license-list-data/" "https://raw.githubusercontent.com/spdx/license-list-data/"
"v%s/json/licenses.json" % version "v%s/json/licenses.json" % version

View File

@ -42,6 +42,7 @@ class PackagePacker(object):
".vscode", ".vscode",
".cache", ".cache",
"**/.cache", "**/.cache",
"**/__pycache__",
# VCS # VCS
".git/", ".git/",
".hg/", ".hg/",

View File

@ -271,17 +271,16 @@ class ProjectConfigBase(object):
if value == MISSING: if value == MISSING:
value = "" value = ""
value += ("\n" if value else "") + envvar_value value += ("\n" if value else "") + envvar_value
elif envvar_value and value == MISSING: elif envvar_value:
value = envvar_value value = envvar_value
if value == MISSING: if value == MISSING:
value = default if default != MISSING else option_meta.default value = default if default != MISSING else option_meta.default
if callable(value):
value = value()
if value == MISSING: if value == MISSING:
return None return None
if option_meta.validate:
value = option_meta.validate(value)
return self._expand_interpolations(value) return self._expand_interpolations(value)
def _expand_interpolations(self, value): def _expand_interpolations(self, value):
@ -318,6 +317,8 @@ class ProjectConfigBase(object):
if not option_meta: if not option_meta:
return value return value
if option_meta.validate:
value = option_meta.validate(value)
if option_meta.multiple: if option_meta.multiple:
value = self.parse_multi_values(value or []) value = self.parse_multi_values(value or [])
try: try:

View File

@ -82,7 +82,7 @@ class ProjectGenerator(object):
"project_dir": self.project_dir, "project_dir": self.project_dir,
"original_env_name": self.original_env_name, "original_env_name": self.original_env_name,
"env_name": self.env_name, "env_name": self.env_name,
"user_home_dir": os.path.realpath(fs.expanduser("~")), "user_home_dir": os.path.abspath(fs.expanduser("~")),
"platformio_path": sys.argv[0] "platformio_path": sys.argv[0]
if os.path.isfile(sys.argv[0]) if os.path.isfile(sys.argv[0])
else where_is_program("platformio"), else where_is_program("platformio"),
@ -125,7 +125,9 @@ class ProjectGenerator(object):
with fs.cd(self.project_dir): with fs.cd(self.project_dir):
for root, _, files in os.walk(self.config.get("platformio", "src_dir")): for root, _, files in os.walk(self.config.get("platformio", "src_dir")):
for f in files: for f in files:
result.append(os.path.relpath(os.path.join(root, f))) result.append(
os.path.relpath(os.path.join(os.path.realpath(root), f))
)
return result return result
def get_tpls(self): def get_tpls(self):

View File

@ -24,7 +24,7 @@ from platformio.project.config import ProjectConfig
def get_project_dir(): def get_project_dir():
return os.getcwd() return fs.normalize_path(os.getcwd())
def is_platformio_project(project_dir=None): def is_platformio_project(project_dir=None):

View File

@ -60,7 +60,7 @@ class ConfigOption(object): # pylint: disable=too-many-instance-attributes
type="string", type="string",
multiple=self.multiple, multiple=self.multiple,
sysenvvar=self.sysenvvar, sysenvvar=self.sysenvvar,
default=self.default, default=self.default() if callable(self.default) else self.default,
) )
if isinstance(self.type, click.ParamType): if isinstance(self.type, click.ParamType):
result["type"] = self.type.name result["type"] = self.type.name
@ -114,17 +114,16 @@ def validate_dir(path):
path = fs.expanduser(path) path = fs.expanduser(path)
if "$" in path: if "$" in path:
path = expand_dir_templates(path) path = expand_dir_templates(path)
return os.path.realpath(path) return fs.normalize_path(path)
def validate_core_dir(path): def get_default_core_dir():
default_dir = ProjectOptions["platformio.core_dir"].default path = os.path.join(fs.expanduser("~"), ".platformio")
win_core_dir = None if IS_WINDOWS:
if IS_WINDOWS and path == default_dir:
win_core_dir = os.path.splitdrive(path)[0] + "\\.platformio" win_core_dir = os.path.splitdrive(path)[0] + "\\.platformio"
if os.path.isdir(win_core_dir): if os.path.isdir(win_core_dir):
path = win_core_dir return win_core_dir
return validate_dir(path) return path
ProjectOptions = OrderedDict( ProjectOptions = OrderedDict(
@ -169,8 +168,8 @@ ProjectOptions = OrderedDict(
), ),
oldnames=["home_dir"], oldnames=["home_dir"],
sysenvvar="PLATFORMIO_CORE_DIR", sysenvvar="PLATFORMIO_CORE_DIR",
default=os.path.join(fs.expanduser("~"), ".platformio"), default=get_default_core_dir,
validate=validate_core_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
group="directory", group="directory",

View File

@ -8,15 +8,14 @@
% import os % import os
% import re % import re
% %
% from platformio.compat import WINDOWS % from platformio.project.helpers import load_project_ide_data
% from platformio.project.helpers import (load_project_ide_data)
% %
% def _normalize_path(path): % def _normalize_path(path):
% if project_dir in path: % if project_dir in path:
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}") % path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
% elif user_home_dir in path: % elif user_home_dir in path:
% if "windows" in systype: % if "windows" in systype:
% path = path.replace(user_home_dir, "$ENV{HOMEDRIVE}$ENV{HOMEPATH}") % path = path.replace(user_home_dir, "${ENV_HOME_PATH}")
% else: % else:
% path = path.replace(user_home_dir, "$ENV{HOME}") % path = path.replace(user_home_dir, "$ENV{HOME}")
% end % end
@ -54,6 +53,11 @@ set(CMAKE_CONFIGURATION_TYPES "{{ ";".join(envs) }};" CACHE STRING "Build Types
set(CMAKE_CONFIGURATION_TYPES "{{ env_name }}" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE) set(CMAKE_CONFIGURATION_TYPES "{{ env_name }}" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE)
% end % end
# Convert "Home Directory" that may contain unescaped backslashes on Windows
% if "windows" in systype:
file(TO_CMAKE_PATH $ENV{HOMEDRIVE}$ENV{HOMEPATH} ENV_HOME_PATH)
% end
% if svd_path: % if svd_path:
set(CLION_SVD_FILE_PATH "{{ _normalize_path(svd_path) }}" CACHE FILEPATH "Peripheral Registers Definitions File" FORCE) set(CLION_SVD_FILE_PATH "{{ _normalize_path(svd_path) }}" CACHE FILEPATH "Peripheral Registers Definitions File" FORCE)
% end % end
@ -75,7 +79,7 @@ set(CMAKE_CXX_STANDARD {{ cxx_stds[-1] }})
if (CMAKE_BUILD_TYPE MATCHES "{{ env_name }}") if (CMAKE_BUILD_TYPE MATCHES "{{ env_name }}")
% for define in defines: % for define in defines:
add_definitions(-D{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}) add_definitions(-D{{!re.sub(r"([\"\(\)\ #])", r"\\\1", define)}})
% end % end
% for include in filter_includes(includes): % for include in filter_includes(includes):
@ -99,7 +103,7 @@ endif()
% for env, data in ide_data.items(): % for env, data in ide_data.items():
if (CMAKE_BUILD_TYPE MATCHES "{{ env }}") if (CMAKE_BUILD_TYPE MATCHES "{{ env }}")
% for define in data["defines"]: % for define in data["defines"]:
add_definitions(-D{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}) add_definitions(-D{{!re.sub(r"([\"\(\)\ #])", r"\\\1", define)}})
% end % end
% for include in filter_includes(data["includes"]): % for include in filter_includes(data["includes"]):

View File

@ -30,8 +30,8 @@
<makefileType> <makefileType>
<makeTool> <makeTool>
<buildCommandWorkingDir>.</buildCommandWorkingDir> <buildCommandWorkingDir>.</buildCommandWorkingDir>
<buildCommand>{{platformio_path}} -f -c netbeans run</buildCommand> <buildCommand>"{{platformio_path}}" -f -c netbeans run</buildCommand>
<cleanCommand>{{platformio_path}} -f -c netbeans run --target clean</cleanCommand> <cleanCommand>"{{platformio_path}}" -f -c netbeans run --target clean</cleanCommand>
<executablePath></executablePath> <executablePath></executablePath>
<cTool> <cTool>
% cleaned_includes = filter_includes(includes) % cleaned_includes = filter_includes(includes)

View File

@ -92,8 +92,8 @@ def singleton(cls):
def get_systype(): def get_systype():
type_ = platform.system().lower() type_ = platform.system().lower()
arch = platform.machine().lower() arch = platform.machine().lower()
if type_ == "windows": if type_ == "windows" and "x86" in arch:
arch = "amd64" if platform.architecture()[0] == "64bit" else "x86" arch = "amd64" if "64" in arch else "x86"
return "%s_%s" % (type_, arch) if arch else type_ return "%s_%s" % (type_, arch) if arch else type_

View File

@ -28,7 +28,7 @@ from platformio.compat import PY2
minimal_requirements = [ minimal_requirements = [
"bottle==0.12.*", "bottle==0.12.*",
"click>=7.1.2,<9,!=8.0.2", "click>=8,<9,!=8.0.2",
"colorama", "colorama",
"marshmallow%s" % (">=2,<3" if PY2 else ">=2,<4"), "marshmallow%s" % (">=2,<3" if PY2 else ">=2,<4"),
"pyelftools>=0.27,<1", "pyelftools>=0.27,<1",
@ -39,13 +39,13 @@ minimal_requirements = [
] ]
if not PY2: if not PY2:
minimal_requirements.append("zeroconf==0.36.*") minimal_requirements.append("zeroconf==0.37.*")
home_requirements = [ home_requirements = [
"aiofiles==0.7.*", "aiofiles==0.8.*",
"ajsonrpc==1.*", "ajsonrpc==1.*",
"starlette==0.17.*", "starlette==0.17.*",
"uvicorn==0.15.*", "uvicorn==0.16.*",
"wsproto==1.0.*", "wsproto==1.0.*",
] ]

View File

@ -16,6 +16,10 @@ import json
from os import getcwd, makedirs from os import getcwd, makedirs
from os.path import getsize, isdir, isfile, join from os.path import getsize, isdir, isfile, join
import pytest
from platformio import proc
from platformio.commands import platform as cli_platform
from platformio.commands.boards import cli as cmd_boards from platformio.commands.boards import cli as cmd_boards
from platformio.commands.project import project_init as cmd_init from platformio.commands.project import project_init as cmd_init
from platformio.project.config import ProjectConfig from platformio.project.config import ProjectConfig
@ -177,3 +181,83 @@ def test_init_incorrect_board(clirunner):
assert result.exit_code == 2 assert result.exit_code == 2
assert "Error: Invalid value for" in result.output assert "Error: Invalid value for" in result.output
assert isinstance(result.exception, SystemExit) assert isinstance(result.exception, SystemExit)
@pytest.mark.skipif(not proc.is_ci(), reason="runs on CI")
def test_init_ide_clion(clirunner, isolated_pio_core, validate_cliresult, tmpdir):
result = clirunner.invoke(
cli_platform.platform_install,
[
"ststm32",
"--skip-default-package",
"--with-package",
"tool-cmake",
"--with-package",
"tool-ninja",
],
)
# Add extra libraries to cover cases with possible unwanted backslashes
lib_extra_dirs = isolated_pio_core.join("extra_libs").mkdir()
extra_lib = lib_extra_dirs.join("extra_lib").mkdir()
extra_lib.join("extra_lib.h").write(" ")
extra_lib.join("extra_lib.cpp").write(" ")
with tmpdir.as_cwd():
result = clirunner.invoke(
cmd_init,
[
"-b",
"nucleo_f401re",
"--ide",
"clion",
"--project-option",
"framework=arduino",
"--project-option",
"lib_extra_dirs=%s" % str(lib_extra_dirs),
],
)
validate_cliresult(result)
assert all(isfile(f) for f in ("CMakeLists.txt", "CMakeListsPrivate.txt"))
tmpdir.join("src").join("main.cpp").write(
"""#include <Arduino.h>
#include "extra_lib.h"
void setup(){}
void loop(){}
"""
)
cmake_path = str(
isolated_pio_core.join("packages")
.join("tool-cmake")
.join("bin")
.join("cmake")
)
tmpdir.join("build_dir").mkdir()
result = proc.exec_command(
[
cmake_path,
"-DCMAKE_BUILD_TYPE=nucleo_f401re",
"-DCMAKE_MAKE_PROGRAM=%s"
% str(
isolated_pio_core.join("packages").join("tool-ninja").join("ninja")
),
"-G",
"Ninja",
"-S",
str(tmpdir),
"-B",
"build_dir",
]
)
# Check if CMake was able to generate a native project for Ninja
assert result["returncode"] == 0, result["out"]
result = proc.exec_command(
[cmake_path, "--build", "build_dir", "--target", "Debug"]
)
assert result["returncode"] == 0
assert "[SUCCESS]" in str(result["out"])

View File

@ -199,10 +199,10 @@ def test_install_from_registry(isolated_pio_core, tmpdir_factory):
# Libraries # Libraries
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage"))) lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
# library with dependencies # library with dependencies
lm.install("AsyncMqttClient-esphome @ 0.8.4", silent=True) lm.install("AsyncMqttClient-esphome @ 0.8.6", silent=True)
assert len(lm.get_installed()) == 3 assert len(lm.get_installed()) == 3
pkg = lm.get_package("AsyncTCP-esphome") pkg = lm.get_package("AsyncTCP-esphome")
assert pkg.metadata.spec.owner == "ottowinter" assert pkg.metadata.spec.owner == "esphome"
assert not lm.get_package("non-existing-package") assert not lm.get_package("non-existing-package")
# mbed library # mbed library
assert lm.install("wolfSSL", silent=True) assert lm.install("wolfSSL", silent=True)
@ -214,8 +214,8 @@ def test_install_from_registry(isolated_pio_core, tmpdir_factory):
# test conflicted names # test conflicted names
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("conflicted-storage"))) lm = LibraryPackageManager(str(tmpdir_factory.mktemp("conflicted-storage")))
lm.install("4@2.6.1", silent=True) lm.install("z3t0/IRremote@2.6.1", silent=True)
lm.install("5357@2.6.1", silent=True) lm.install("mbed-yuhki50/IRremote", silent=True)
assert len(lm.get_installed()) == 2 assert len(lm.get_installed()) == 2
# Tools # Tools

View File

@ -28,7 +28,8 @@ def test_library_json_parser():
contents = """ contents = """
{ {
"name": "TestPackage", "name": "TestPackage",
"keywords": "kw1, KW2, kw3", "keywords": "kw1, KW2, kw3, KW2",
"headers": "include1.h, Include2.hpp",
"platforms": ["atmelavr", "espressif"], "platforms": ["atmelavr", "espressif"],
"repository": { "repository": {
"type": "git", "type": "git",
@ -62,6 +63,7 @@ def test_library_json_parser():
}, },
"export": {"exclude": [".gitignore", "tests"], "include": ["mylib"]}, "export": {"exclude": [".gitignore", "tests"], "include": ["mylib"]},
"keywords": ["kw1", "kw2", "kw3"], "keywords": ["kw1", "kw2", "kw3"],
"headers": ["include1.h", "Include2.hpp"],
"homepage": "http://old.url.format", "homepage": "http://old.url.format",
"build": {"flags": ["-DHELLO"]}, "build": {"flags": ["-DHELLO"]},
"dependencies": [ "dependencies": [
@ -76,8 +78,8 @@ def test_library_json_parser():
contents = """ contents = """
{ {
"keywords": ["sound", "audio", "music", "SD", "card", "playback"], "keywords": ["sound", "audio", "music", "SD", "card", "playback"],
"headers": ["include 1.h", "include Space.hpp"],
"frameworks": "arduino", "frameworks": "arduino",
"platforms": "atmelavr",
"export": { "export": {
"exclude": "audio_samples" "exclude": "audio_samples"
}, },
@ -94,9 +96,9 @@ def test_library_json_parser():
raw_data, raw_data,
{ {
"keywords": ["sound", "audio", "music", "sd", "card", "playback"], "keywords": ["sound", "audio", "music", "sd", "card", "playback"],
"headers": ["include 1.h", "include Space.hpp"],
"frameworks": ["arduino"], "frameworks": ["arduino"],
"export": {"exclude": ["audio_samples"]}, "export": {"exclude": ["audio_samples"]},
"platforms": ["atmelavr"],
"dependencies": [ "dependencies": [
{"name": "deps1", "version": "1.0.0"}, {"name": "deps1", "version": "1.0.0"},
{ {
@ -202,9 +204,11 @@ version=1.2.3
author=SomeAuthor <info AT author.com>, Maintainer Author (nickname) <www.example.com> author=SomeAuthor <info AT author.com>, Maintainer Author (nickname) <www.example.com>
maintainer=Maintainer Author (nickname) <www.example.com> maintainer=Maintainer Author (nickname) <www.example.com>
sentence=This is Arduino library sentence=This is Arduino library
category=Signal Input/Output
customField=Custom Value customField=Custom Value
depends=First Library (=2.0.0), Second Library (>=1.2.0), Third depends=First Library (=2.0.0), Second Library (>=1.2.0), Third
ignore_empty_field= ignore_empty_field=
includes=Arduino.h, Arduino Space.hpp
""" """
raw_data = parser.LibraryPropertiesManifestParser(contents).as_dict() raw_data = parser.LibraryPropertiesManifestParser(contents).as_dict()
raw_data["dependencies"] = sorted(raw_data["dependencies"], key=lambda a: a["name"]) raw_data["dependencies"] = sorted(raw_data["dependencies"], key=lambda a: a["name"])
@ -215,7 +219,6 @@ ignore_empty_field=
"version": "1.2.3", "version": "1.2.3",
"description": "This is Arduino library", "description": "This is Arduino library",
"sentence": "This is Arduino library", "sentence": "This is Arduino library",
"platforms": ["*"],
"frameworks": ["arduino"], "frameworks": ["arduino"],
"export": { "export": {
"exclude": ["extras", "docs", "tests", "test", "*.doxyfile", "*.pdf"] "exclude": ["extras", "docs", "tests", "test", "*.doxyfile", "*.pdf"]
@ -224,7 +227,10 @@ ignore_empty_field=
{"name": "SomeAuthor", "email": "info@author.com"}, {"name": "SomeAuthor", "email": "info@author.com"},
{"name": "Maintainer Author", "maintainer": True}, {"name": "Maintainer Author", "maintainer": True},
], ],
"keywords": ["uncategorized"], "category": "Signal Input/Output",
"keywords": ["signal", "input", "output"],
"headers": ["Arduino.h", "Arduino Space.hpp"],
"includes": "Arduino.h, Arduino Space.hpp",
"customField": "Custom Value", "customField": "Custom Value",
"depends": "First Library (=2.0.0), Second Library (>=1.2.0), Third", "depends": "First Library (=2.0.0), Second Library (>=1.2.0), Third",
"dependencies": [ "dependencies": [
@ -291,6 +297,7 @@ maintainer=Rocket Scream Electronics
assert data["authors"] == [ assert data["authors"] == [
{"name": "Rocket Scream Electronics", "maintainer": True} {"name": "Rocket Scream Electronics", "maintainer": True}
] ]
assert "keywords" not in data
def test_library_json_schema(): def test_library_json_schema():
@ -434,7 +441,7 @@ sentence=A library for monochrome TFTs and OLEDs
paragraph=Supported display controller: SSD1306, SSD1309, SSD1322, SSD1325 paragraph=Supported display controller: SSD1306, SSD1309, SSD1322, SSD1325
category=Display category=Display
url=https://github.com/olikraus/u8glib url=https://github.com/olikraus/u8glib
architectures=avr,sam architectures=avr,sam,samd
depends=First Library (=2.0.0), Second Library (>=1.2.0), Third depends=First Library (=2.0.0), Second Library (>=1.2.0), Third
""" """
raw_data = parser.ManifestParserFactory.new( raw_data = parser.ManifestParserFactory.new(
@ -530,6 +537,7 @@ includes=MozziGuts.h
}, },
"platforms": ["*"], "platforms": ["*"],
"frameworks": ["arduino"], "frameworks": ["arduino"],
"headers": ["MozziGuts.h"],
"export": { "export": {
"exclude": ["extras", "docs", "tests", "test", "*.doxyfile", "*.pdf"] "exclude": ["extras", "docs", "tests", "test", "*.doxyfile", "*.pdf"]
}, },
@ -552,7 +560,7 @@ def test_platform_json_schema():
"name": "atmelavr", "name": "atmelavr",
"title": "Atmel AVR", "title": "Atmel AVR",
"description": "Atmel AVR 8- and 32-bit MCUs deliver a unique combination of performance, power efficiency and design flexibility. Optimized to speed time to market-and easily adapt to new ones-they are based on the industrys most code-efficient architecture for C and assembly programming.", "description": "Atmel AVR 8- and 32-bit MCUs deliver a unique combination of performance, power efficiency and design flexibility. Optimized to speed time to market-and easily adapt to new ones-they are based on the industrys most code-efficient architecture for C and assembly programming.",
"keywords": "arduino, atmel, avr", "keywords": "arduino, atmel, avr, MCU",
"homepage": "http://www.atmel.com/products/microcontrollers/avr/default.aspx", "homepage": "http://www.atmel.com/products/microcontrollers/avr/default.aspx",
"license": "Apache-2.0", "license": "Apache-2.0",
"engines": { "engines": {
@ -576,6 +584,7 @@ def test_platform_json_schema():
"packages": { "packages": {
"toolchain-atmelavr": { "toolchain-atmelavr": {
"type": "toolchain", "type": "toolchain",
"owner": "platformio",
"version": "~1.50400.0" "version": "~1.50400.0"
}, },
"framework-arduinoavr": { "framework-arduinoavr": {
@ -611,7 +620,7 @@ def test_platform_json_schema():
"on the industrys most code-efficient architecture for C and " "on the industrys most code-efficient architecture for C and "
"assembly programming." "assembly programming."
), ),
"keywords": ["arduino", "atmel", "avr"], "keywords": ["arduino", "atmel", "avr", "mcu"],
"homepage": "http://www.atmel.com/products/microcontrollers/avr/default.aspx", "homepage": "http://www.atmel.com/products/microcontrollers/avr/default.aspx",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {
@ -623,7 +632,11 @@ def test_platform_json_schema():
"dependencies": [ "dependencies": [
{"name": "framework-arduinoavr", "version": "~4.2.0"}, {"name": "framework-arduinoavr", "version": "~4.2.0"},
{"name": "tool-avrdude", "version": "~1.60300.0"}, {"name": "tool-avrdude", "version": "~1.60300.0"},
{"name": "toolchain-atmelavr", "version": "~1.50400.0"}, {
"name": "toolchain-atmelavr",
"owner": "platformio",
"version": "~1.50400.0",
},
], ],
}, },
) )
@ -636,7 +649,7 @@ def test_package_json_schema():
"description": "SCons software construction tool", "description": "SCons software construction tool",
"keywords": "SCons, build", "keywords": "SCons, build",
"homepage": "http://www.scons.org", "homepage": "http://www.scons.org",
"system": ["linux_armv6l", "linux_armv7l", "linux_armv8l"], "system": ["linux_armv6l", "linux_armv7l", "linux_armv8l", "LINUX_ARMV7L"],
"version": "3.30101.0" "version": "3.30101.0"
} }
""" """

View File

@ -26,7 +26,8 @@ from platformio.project.exception import InvalidProjectConfError, UnknownEnvName
BASE_CONFIG = """ BASE_CONFIG = """
[platformio] [platformio]
env_default = base, extra_2 env_default = base, extra_2
build_dir = ~/tmp/pio-$PROJECT_HASH src_dir = ${custom.src_dir}
build_dir = ${custom.build_dir}
extra_configs = extra_configs =
extra_envs.ini extra_envs.ini
extra_debug.ini extra_debug.ini
@ -53,6 +54,8 @@ extends = strict_ldf, monitor_custom
build_flags = -D RELEASE build_flags = -D RELEASE
[custom] [custom]
src_dir = source
build_dir = ~/tmp/pio-$PROJECT_HASH
debug_flags = -D RELEASE debug_flags = -D RELEASE
lib_flags = -lc -lm lib_flags = -lc -lm
extra_flags = ${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS} extra_flags = ${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS}
@ -226,7 +229,7 @@ def test_sysenv_options(config):
"-DSYSENVDEPS1 -DSYSENVDEPS2", "-DSYSENVDEPS1 -DSYSENVDEPS2",
] ]
assert config.get("env:base", "upload_port") == "/dev/sysenv/port" assert config.get("env:base", "upload_port") == "/dev/sysenv/port"
assert config.get("env:extra_2", "upload_port") == "/dev/extra_2/port" assert config.get("env:extra_2", "upload_port") == "/dev/sysenv/port"
assert config.get("env:base", "build_unflags") == ["-DREMOVE_MACRO"] assert config.get("env:base", "build_unflags") == ["-DREMOVE_MACRO"]
# env var as option # env var as option
@ -244,10 +247,22 @@ def test_sysenv_options(config):
"upload_port", "upload_port",
] ]
# sysenv # sysenv dirs
custom_core_dir = os.path.join(os.getcwd(), "custom") custom_core_dir = os.path.join(os.getcwd(), "custom-core")
custom_src_dir = os.path.join(os.getcwd(), "custom-src")
custom_build_dir = os.path.join(os.getcwd(), "custom-build")
os.environ["PLATFORMIO_HOME_DIR"] = custom_core_dir os.environ["PLATFORMIO_HOME_DIR"] = custom_core_dir
assert config.get("platformio", "core_dir") == os.path.realpath(custom_core_dir) os.environ["PLATFORMIO_SRC_DIR"] = custom_src_dir
os.environ["PLATFORMIO_BUILD_DIR"] = custom_build_dir
assert os.path.realpath(config.get("platformio", "core_dir")) == os.path.realpath(
custom_core_dir
)
assert os.path.realpath(config.get("platformio", "src_dir")) == os.path.realpath(
custom_src_dir
)
assert os.path.realpath(config.get("platformio", "build_dir")) == os.path.realpath(
custom_build_dir
)
# cleanup system environment variables # cleanup system environment variables
del os.environ["PLATFORMIO_BUILD_FLAGS"] del os.environ["PLATFORMIO_BUILD_FLAGS"]
@ -255,6 +270,8 @@ def test_sysenv_options(config):
del os.environ["PLATFORMIO_UPLOAD_PORT"] del os.environ["PLATFORMIO_UPLOAD_PORT"]
del os.environ["__PIO_TEST_CNF_EXTRA_FLAGS"] del os.environ["__PIO_TEST_CNF_EXTRA_FLAGS"]
del os.environ["PLATFORMIO_HOME_DIR"] del os.environ["PLATFORMIO_HOME_DIR"]
del os.environ["PLATFORMIO_SRC_DIR"]
del os.environ["PLATFORMIO_BUILD_DIR"]
def test_getraw_value(config): def test_getraw_value(config):
@ -289,6 +306,7 @@ def test_getraw_value(config):
config.getraw("custom", "debug_server") config.getraw("custom", "debug_server")
== f"\n{packages_dir}/tool-openocd/openocd\n--help" == f"\n{packages_dir}/tool-openocd/openocd\n--help"
) )
assert config.getraw("platformio", "build_dir") == "~/tmp/pio-$PROJECT_HASH"
def test_get_value(config): def test_get_value(config):
@ -319,10 +337,16 @@ def test_get_value(config):
os.path.join(DEFAULT_CORE_DIR, "packages/tool-openocd/openocd"), os.path.join(DEFAULT_CORE_DIR, "packages/tool-openocd/openocd"),
"--help", "--help",
] ]
# test relative dir
assert config.get("platformio", "src_dir") == os.path.abspath(
os.path.join(os.getcwd(), "source")
)
def test_items(config): def test_items(config):
assert config.items("custom") == [ assert config.items("custom") == [
("src_dir", "source"),
("build_dir", "~/tmp/pio-$PROJECT_HASH"),
("debug_flags", "-D DEBUG=1"), ("debug_flags", "-D DEBUG=1"),
("lib_flags", "-lc -lm"), ("lib_flags", "-lc -lm"),
("extra_flags", ""), ("extra_flags", ""),
@ -465,7 +489,8 @@ def test_dump(tmpdir_factory):
( (
"platformio", "platformio",
[ [
("build_dir", "~/tmp/pio-$PROJECT_HASH"), ("src_dir", "${custom.src_dir}"),
("build_dir", "${custom.build_dir}"),
("extra_configs", ["extra_envs.ini", "extra_debug.ini"]), ("extra_configs", ["extra_envs.ini", "extra_debug.ini"]),
("default_envs", ["base", "extra_2"]), ("default_envs", ["base", "extra_2"]),
], ],
@ -489,6 +514,8 @@ def test_dump(tmpdir_factory):
( (
"custom", "custom",
[ [
("src_dir", "source"),
("build_dir", "~/tmp/pio-$PROJECT_HASH"),
("debug_flags", "-D RELEASE"), ("debug_flags", "-D RELEASE"),
("lib_flags", "-lc -lm"), ("lib_flags", "-lc -lm"),
("extra_flags", "${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS}"), ("extra_flags", "${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS}"),