diff --git a/.travis.yml b/.travis.yml index a0515311..07e019a2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,7 +20,8 @@ matrix: install: - git submodule update --init --recursive - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install -U tox; else pip install -U tox; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install tox; else pip install -U tox; fi # ChipKIT issue: install 32-bit support for GCC PIC32 - if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi diff --git a/HISTORY.rst b/HISTORY.rst index ad714683..666336b3 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,13 +4,82 @@ Release Notes PlatformIO 3.0 -------------- +3.5.0 (2017-12-28) +~~~~~~~~~~~~~~~~~~ + +* `PlatformIO Home `__ - + interact with PlatformIO ecosystem using modern and cross-platform GUI: + + - Library Manager: + + * Search for new libraries in PlatformIO Registry + * "1-click" library installation, per-project libraries, extra storages + * List installed libraries in multiple storages + * List built-in libraries (by frameworks) + * Updates for installed libraries + * Multiple examples, trending libraries, and more. + + - PlatformIO Projects + - PIO Account + - Development platforms, frameworks and board explorer + - Device Manager: serial, logical, and multicast DNS services + +* Integration with `Jenkins CI `_ +* New `include `__ + folder for project's header files + (`issue #1107 `_) +* Depend on development platform using VSC URL (Git, Mercurial and Subversion) + instead of a name in `Project Configuration File "platformio.ini" `__. + Drop support for ``*_stage`` dev/platform names (use VCS URL instead). +* Reinstall/redownload package with a new ``-f, --force`` option for + `platformio lib install `__ + and `platformio platform install `__ + commands + (`issue #778 `_) +* Handle missed dependencies and provide a solution based on PlatformIO Library + Registry + (`issue #781 `_) +* New setting `projects_dir `__ + that allows to override a default PIO Home Projects location + (`issue #1161 `_) + +* `Library Dependency Finder (LDF) `__: + + - Search for dependencies used in `PIO Unit Testing `__ + (`issue #953 `_) + - Parse library source file in pair with a header when they have the same name + (`issue #1175 `_) + - Handle library dependencies defined as VCS or SemVer in + `Project Configuration File "platformio.ini" `__ + (`issue #1155 `_) + - Added option to configure library `Compatible Mode `__ + using `library.json `__ + +* New options for `platformio device list `__ + command: + + - ``--serial`` list available serial ports (default) + - ``--logical`` list logical devices + - ``--mdns`` discover multicast DNS services + (`issue #463 `_) + +* Fixed platforms, packages, and libraries updating behind proxy + (`issue #1061 `_) +* Fixed missing toolchain include paths for project generator + (`issue #1154 `_) +* Fixed "Super-Quick (Mac / Linux)" installation in "get-platformio.py" script + (`issue #1017 `_) +* Fixed "get-platformio.py" script which hangs on Windows 10 + (`issue #1118 `_) +* Other bug fixes and performance improvements + 3.4.1 (2017-08-02) ~~~~~~~~~~~~~~~~~~ * Pre/Post extra scripting for advanced control of PIO Build System (`issue #891 `_) * New `lib_archive `_ - option to control library archiving and linking behaviour + option to control library archiving and linking behavior (`issue #993 `_) * Add "inc" folder automatically to CPPPATH when "src" is available (works for project and library) (`issue #1003 `_) @@ -104,7 +173,7 @@ PlatformIO 3.0 command (`issue #430 `_) * List supported frameworks, SDKs with a new - `pio platform frameworks `__ command + `pio platform frameworks `__ command * Visual Studio Code extension for PlatformIO (`issue #619 `_) * Added new options ``--no-reset``, ``--monitor-rts`` and ``--monitor-dtr`` @@ -222,7 +291,7 @@ PlatformIO 3.0 * `PlatformIO Plus `__ - + Local and Embedded `Unit Testing `__ + + Local and Embedded `Unit Testing `__ (`issue #408 `_, `issue #519 `_) @@ -893,7 +962,7 @@ PlatformIO 2.0 `windows_x86 `__ development platforms (`issue #263 `_) -* Added `PlatformIO Demo `_ +* Added `PlatformIO Demo `_ page to documentation * Simplified `installation `__ process of PlatformIO @@ -1252,7 +1321,7 @@ PlatformIO 1.0 (`issue #83 `_) * Added ``--json-output`` option to `platformio boards `__ - and `platformio search `__ + and `platformio search `__ commands which allows to return the output in `JSON `_ format (`issue #42 `_) * Allowed to ignore some libs from *Library Dependency Finder* via @@ -1293,7 +1362,7 @@ PlatformIO 0.0 ~~~~~~~~~~~~~~~~~~~ * Added ``--json-output`` option to - `platformio list `__, + `platformio list `__, `platformio serialports list `__ and `platformio lib list `__ commands which allows to return the output in `JSON `_ format @@ -1337,14 +1406,14 @@ PlatformIO 0.0 * Ask user to install platform (when it hasn't been installed yet) within `platformio run `__ - and `platformio show `_ commands + and `platformio show `_ commands * Improved main `documentation `_ * Fixed "*OSError: [Errno 2] No such file or directory*" within `platformio run `__ command when PlatformIO isn't installed properly -* Fixed example for `Eclipse IDE with Tiva board `_ +* Fixed example for Eclipse IDE with Tiva board (`issue #32 `_) -* Upgraded `Eclipse Project Examples `_ +* Upgraded Eclipse Project Examples to latest *Luna* and *PlatformIO* releases 0.9.0 (2014-12-01) @@ -1433,7 +1502,7 @@ PlatformIO 0.0 * Implemented (especially for `SmartAnthill `_) `platformio run -t uploadlazy `_ target (no dependencies to framework libs, ELF and etc.) -* Allowed to skip default packages via `platformio install --skip-default-package `_ +* Allowed to skip default packages via `platformio install --skip-default-package `_ option * Added tools for *Raspberry Pi* platform * Added support for *Microduino* and *Raspduino* boards in @@ -1442,7 +1511,7 @@ PlatformIO 0.0 0.3.1 (2014-06-21) ~~~~~~~~~~~~~~~~~~ -* Fixed auto-installer for Windows OS (bug with %PATH% customisations) +* Fixed auto-installer for Windows OS (bug with %PATH% custom installation) 0.3.0 (2014-06-21) diff --git a/Makefile b/Makefile index 51b3d166..0031d316 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,10 @@ isort: yapf: yapf --recursive --in-place platformio/ -before-commit: isort yapf lint +test: + py.test -v -s tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py + +before-commit: isort yapf lint test clean-docs: rm -rf docs/_build diff --git a/docs b/docs index ebd68b4b..c76ccaf3 160000 --- a/docs +++ b/docs @@ -1 +1 @@ -Subproject commit ebd68b4bac5ea3dcb526637dce1f782cc2ce570d +Subproject commit c76ccaf33786e93392c80decdb457794ac234380 diff --git a/examples b/examples index a2d7ba27..2d716306 160000 --- a/examples +++ b/examples @@ -1 +1 @@ -Subproject commit a2d7ba27c3f2730381d99e587837c6aca9269e33 +Subproject commit 2d716306f33cbaa3d9146e417d02e15747cadb2a diff --git a/platformio/__init__.py b/platformio/__init__.py index 6e3f3cc4..33de06c8 100644 --- a/platformio/__init__.py +++ b/platformio/__init__.py @@ -14,7 +14,7 @@ import sys -VERSION = (3, 4, 1) +VERSION = (3, 5, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio" diff --git a/platformio/__main__.py b/platformio/__main__.py index b1872fd2..85cbd401 100644 --- a/platformio/__main__.py +++ b/platformio/__main__.py @@ -80,29 +80,45 @@ def process_result(ctx, result, force, caller): # pylint: disable=W0613 maintenance.on_platformio_end(ctx, result) -def main(): - try: - if "cygwin" in system().lower(): - raise exception.CygwinEnvDetected() +def configure(): + if "cygwin" in system().lower(): + raise exception.CygwinEnvDetected() - # https://urllib3.readthedocs.org - # /en/latest/security.html#insecureplatformwarning + # https://urllib3.readthedocs.org + # /en/latest/security.html#insecureplatformwarning + try: + import urllib3 + urllib3.disable_warnings() + except (AttributeError, ImportError): + pass + + # handle PLATFORMIO_FORCE_COLOR + if str(os.getenv("PLATFORMIO_FORCE_COLOR", "")).lower() == "true": try: - import urllib3 - urllib3.disable_warnings() - except (AttributeError, ImportError): + # pylint: disable=protected-access + click._compat.isatty = lambda stream: True + except: # pylint: disable=bare-except pass - # handle PLATFORMIO_FORCE_COLOR - if str(os.getenv("PLATFORMIO_FORCE_COLOR", "")).lower() == "true": - try: - # pylint: disable=protected-access - click._compat.isatty = lambda stream: True - except: # pylint: disable=bare-except - pass + # Handle IOError issue with VSCode's Terminal (Windows) + click_echo_origin = [click.echo, click.secho] + def _safe_echo(origin, *args, **kwargs): + try: + click_echo_origin[origin](*args, **kwargs) + except IOError: + (sys.stderr.write if kwargs.get("err") else + sys.stdout.write)("%s\n" % (args[0] if args else "")) + + click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs) + click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs) + + +def main(): + try: + configure() cli(None, None, None) - except Exception as e: # pylint: disable=W0703 + except Exception as e: # pylint: disable=broad-except if not isinstance(e, exception.ReturnErrorCode): maintenance.on_platformio_exception(e) error_str = "Error: " diff --git a/platformio/app.py b/platformio/app.py index f4e5ea2b..f6015d36 100644 --- a/platformio/app.py +++ b/platformio/app.py @@ -18,16 +18,33 @@ import os import uuid from copy import deepcopy from os import environ, getenv, listdir, remove -from os.path import dirname, getmtime, isdir, isfile, join +from os.path import abspath, dirname, expanduser, getmtime, isdir, isfile, join from time import time import requests from lockfile import LockFailed, LockFile from platformio import __version__, exception, util -from platformio.exception import InvalidSettingName, InvalidSettingValue + + +def projects_dir_validate(projects_dir): + assert isdir(projects_dir) + return abspath(projects_dir) + DEFAULT_SETTINGS = { + "auto_update_libraries": { + "description": "Automatically update libraries (Yes/No)", + "value": False + }, + "auto_update_platforms": { + "description": "Automatically update platforms (Yes/No)", + "value": False + }, + "check_libraries_interval": { + "description": "Check for the library updates interval (days)", + "value": 7 + }, "check_platformio_interval": { "description": "Check for the new PlatformIO interval (days)", "value": 3 @@ -36,37 +53,30 @@ DEFAULT_SETTINGS = { "description": "Check for the platform updates interval (days)", "value": 7 }, - "check_libraries_interval": { - "description": "Check for the library updates interval (days)", - "value": 7 - }, - "auto_update_platforms": { - "description": "Automatically update platforms (Yes/No)", - "value": False - }, - "auto_update_libraries": { - "description": "Automatically update libraries (Yes/No)", - "value": False - }, - "force_verbose": { - "description": "Force verbose output when processing environments", - "value": False + "enable_cache": { + "description": "Enable caching for API requests and Library Manager", + "value": True }, "enable_ssl": { "description": "Enable SSL for PlatformIO Services", "value": False }, - "enable_cache": { - "description": "Enable caching for API requests and Library Manager", - "value": True - }, "enable_telemetry": { "description": ("Telemetry service (Yes/No)"), "value": True - } + }, + "force_verbose": { + "description": "Force verbose output when processing environments", + "value": False + }, + "projects_dir": { + "description": "Default location for PlatformIO projects (PIO Home)", + "value": join(expanduser("~"), "Documents", "PlatformIO", "Projects"), + "validator": projects_dir_validate + }, } SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None} @@ -95,11 +105,14 @@ class State(object): def __exit__(self, type_, value, traceback): if self._prev_state != self._state: - with open(self.path, "w") as fp: - if "dev" in __version__: - json.dump(self._state, fp, indent=4) - else: - json.dump(self._state, fp) + try: + with open(self.path, "w") as fp: + if "dev" in __version__: + json.dump(self._state, fp, indent=4) + else: + json.dump(self._state, fp) + except IOError: + raise exception.HomeDirPermissionsError(util.get_home_dir()) self._unlock_state_file() def _lock_state_file(self): @@ -114,13 +127,7 @@ class State(object): try: self._lockfile.acquire() except LockFailed: - raise exception.PlatformioException( - "The directory `{0}` or its parent directory is not owned by " - "the current user and PlatformIO can not store configuration " - "data. \nPlease check the permissions and owner of that " - "directory. Otherwise, please remove manually `{0}` " - "directory and PlatformIO will create new from the current " - "user.".format(dirname(self.path))) + raise exception.HomeDirPermissionsError(dirname(self.path)) def _unlock_state_file(self): if self._lockfile: @@ -134,16 +141,10 @@ class ContentCache(object): self._db_path = None self._lockfile = None - if not get_setting("enable_cache"): - return - self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache") self._db_path = join(self.cache_dir, "db.data") def __enter__(self): - if not self._db_path or not isfile(self._db_path): - return self - self.delete() return self @@ -155,6 +156,7 @@ class ContentCache(object): os.makedirs(self.cache_dir) self._lockfile = LockFile(self.cache_dir) if self._lockfile.is_locked() and \ + isfile(self._lockfile.lock_file) and \ (time() - getmtime(self._lockfile.lock_file)) > 10: self._lockfile.break_lock() @@ -192,11 +194,13 @@ class ContentCache(object): return data def set(self, key, data, valid): + if not get_setting("enable_cache"): + return False cache_path = self.get_cache_path(key) if isfile(cache_path): self.delete(key) if not data: - return + return False if not isdir(self.cache_dir): os.makedirs(self.cache_dir) tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400} @@ -220,6 +224,8 @@ class ContentCache(object): def delete(self, keys=None): """ Keys=None, delete expired items """ + if not isfile(self._db_path): + return None if not keys: keys = [] if not isinstance(keys, list): @@ -266,19 +272,19 @@ def clean_cache(): def sanitize_setting(name, value): if name not in DEFAULT_SETTINGS: - raise InvalidSettingName(name) + raise exception.InvalidSettingName(name) defdata = DEFAULT_SETTINGS[name] try: if "validator" in defdata: - value = defdata['validator']() + value = defdata['validator'](value) elif isinstance(defdata['value'], bool): if not isinstance(value, bool): value = str(value).lower() in ("true", "yes", "y", "1") elif isinstance(defdata['value'], int): value = int(value) except Exception: - raise InvalidSettingValue(value, name) + raise exception.InvalidSettingValue(value, name) return value @@ -354,7 +360,9 @@ def get_cid(): except: # pylint: disable=bare-except pass cid = str( - uuid.UUID(bytes=hashlib.md5(str(_uid if _uid else uuid.getnode())) - .digest())) - set_state_item("cid", cid) + uuid.UUID( + bytes=hashlib.md5(str(_uid if _uid else uuid.getnode())) + .digest())) + if "windows" in util.get_systype() or os.getuid() > 0: + set_state_item("cid", cid) return cid diff --git a/platformio/builder/main.py b/platformio/builder/main.py index f811c13a..6e073cce 100644 --- a/platformio/builder/main.py +++ b/platformio/builder/main.py @@ -16,7 +16,7 @@ import base64 import json import sys from os import environ -from os.path import join +from os.path import expanduser, join from time import time from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS, @@ -87,6 +87,7 @@ DEFAULT_ENV_OPTIONS = dict( UNIX_TIME=int(time()), PIOHOME_DIR=util.get_home_dir(), PROJECT_DIR=util.get_project_dir(), + PROJECTINCLUDE_DIR=util.get_projectinclude_dir(), PROJECTSRC_DIR=util.get_projectsrc_dir(), PROJECTTEST_DIR=util.get_projecttest_dir(), PROJECTDATA_DIR=util.get_projectdata_dir(), @@ -138,9 +139,13 @@ for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPTS", # Configure extra library source directories for LDF if util.get_project_optional_dir("lib_extra_dirs"): - env.Prepend(LIBSOURCE_DIRS=util.parse_conf_multi_values( - util.get_project_optional_dir("lib_extra_dirs"))) + env.Prepend( + LIBSOURCE_DIRS=util.parse_conf_multi_values( + util.get_project_optional_dir("lib_extra_dirs"))) env.Prepend(LIBSOURCE_DIRS=env.get("LIB_EXTRA_DIRS", [])) +env['LIBSOURCE_DIRS'] = [ + expanduser(d) if d.startswith("~") else d for d in env['LIBSOURCE_DIRS'] +] env.LoadPioPlatform(commonvars) @@ -167,7 +172,8 @@ if "envdump" in COMMAND_LINE_TARGETS: if "idedata" in COMMAND_LINE_TARGETS: try: - print "\n%s\n" % json.dumps(env.DumpIDEData()) + print "\n%s\n" % util.path_to_unicode( + json.dumps(env.DumpIDEData(), ensure_ascii=False)) env.Exit(0) except UnicodeDecodeError: sys.stderr.write( diff --git a/platformio/builder/tools/pioide.py b/platformio/builder/tools/pioide.py index e05b2d79..726d52dd 100644 --- a/platformio/builder/tools/pioide.py +++ b/platformio/builder/tools/pioide.py @@ -15,6 +15,7 @@ from __future__ import absolute_import from glob import glob +from os import environ from os.path import join from SCons.Defaults import processDefines @@ -23,7 +24,7 @@ from platformio import util from platformio.managers.core import get_core_package_dir -def dump_includes(env): +def _dump_includes(env): includes = [] for item in env.get("CPPPATH", []): @@ -31,7 +32,7 @@ def dump_includes(env): # installed libs for lb in env.GetLibBuilders(): - includes.extend(lb.get_inc_dirs()) + includes.extend(lb.get_include_dirs()) # includes from toolchains p = env.PioPlatform() @@ -41,6 +42,8 @@ def dump_includes(env): toolchain_dir = util.glob_escape(p.get_package_dir(name)) toolchain_incglobs = [ join(toolchain_dir, "*", "include*"), + join(toolchain_dir, "*", "include", "c++", "*"), + join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"), join(toolchain_dir, "lib", "gcc", "*", "*", "include*") ] for g in toolchain_incglobs: @@ -53,7 +56,29 @@ def dump_includes(env): return includes -def dump_defines(env): +def _get_gcc_defines(env): + items = [] + try: + sysenv = environ.copy() + sysenv['PATH'] = str(env['ENV']['PATH']) + result = util.exec_command( + "echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True) + except OSError: + return items + if result['returncode'] != 0: + return items + for line in result['out'].split("\n"): + tokens = line.strip().split(" ", 2) + if not tokens or tokens[0] != "#define": + continue + if len(tokens) > 2: + items.append("%s=%s" % (tokens[1], tokens[2])) + else: + items.append(tokens[1]) + return items + + +def _dump_defines(env): defines = [] # global symbols for item in processDefines(env.get("CPPDEFINES", [])): @@ -61,9 +86,18 @@ def dump_defines(env): # special symbol for Atmel AVR MCU if env['PIOPLATFORM'] == "atmelavr": - defines.append( - "__AVR_%s__" % env.BoardConfig().get("build.mcu").upper() - .replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny")) + board_mcu = env.get("BOARD_MCU") + if not board_mcu and "BOARD" in env: + board_mcu = env.BoardConfig().get("build.mcu") + if board_mcu: + defines.append( + str("__AVR_%s__" % board_mcu.upper() + .replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny"))) + + # built-in GCC marcos + if env.GetCompilerType() == "gcc": + defines.extend(_get_gcc_defines(env)) + return defines @@ -75,9 +109,9 @@ def DumpIDEData(env): "libsource_dirs": [env.subst(l) for l in env.get("LIBSOURCE_DIRS", [])], "defines": - dump_defines(env), + _dump_defines(env), "includes": - dump_includes(env), + _dump_includes(env), "cc_flags": env.subst(LINTCCOM), "cxx_flags": @@ -89,7 +123,9 @@ def DumpIDEData(env): "gdb_path": util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")), "prog_path": - env.subst("$PROG_PATH") + env.subst("$PROG_PATH"), + "compiler_type": + env.GetCompilerType() } env_ = env.Clone() diff --git a/platformio/builder/tools/piolib.py b/platformio/builder/tools/piolib.py index ddc8229e..8973bdfb 100644 --- a/platformio/builder/tools/piolib.py +++ b/platformio/builder/tools/piolib.py @@ -17,13 +17,15 @@ from __future__ import absolute_import +import hashlib import os import sys -from os.path import basename, commonprefix, isdir, isfile, join, realpath, sep +from os.path import (basename, commonprefix, dirname, isdir, isfile, join, + realpath, sep) from platform import system import SCons.Scanner -from SCons.Script import ARGUMENTS, DefaultEnvironment +from SCons.Script import ARGUMENTS, COMMAND_LINE_TARGETS, DefaultEnvironment from platformio import util from platformio.builder.tools import platformio as piotool @@ -82,9 +84,14 @@ class LibBuilderBase(object): LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"] LDF_MODE_DEFAULT = "chain" + COMPAT_MODES = [0, 1, 2] + COMPAT_MODE_DEFAULT = 1 + CLASSIC_SCANNER = SCons.Scanner.C.CScanner() ADVANCED_SCANNER = SCons.Scanner.C.CScanner(advanced=True) - INC_DIRS_CACHE = None + PARSE_SRC_BY_H_NAME = True + + _INCLUDE_DIRS_CACHE = None def __init__(self, env, path, manifest=None, verbose=False): self.env = env.Clone() @@ -93,13 +100,11 @@ class LibBuilderBase(object): self.verbose = verbose self._manifest = manifest if manifest else self.load_manifest() - self._ldf_mode = self.validate_ldf_mode( - self.env.get("LIB_LDF_MODE", self.LDF_MODE_DEFAULT)) self._is_dependent = False self._is_built = False self._depbuilders = list() self._circular_deps = list() - self._scanned_paths = list() + self._processed_files = list() # reset source filter, could be overridden with extra script self.env['SRC_FILTER'] = "" @@ -140,20 +145,29 @@ class LibBuilderBase(object): "-" % os.sep ] + @property + def include_dir(self): + if not all([isdir(join(self.path, d)) for d in ("include", "src")]): + return None + return join(self.path, "include") + @property def src_dir(self): return (join(self.path, "src") if isdir(join(self.path, "src")) else self.path) + def get_include_dirs(self): + items = [self.src_dir] + include_dir = self.include_dir + if include_dir and include_dir not in items: + items.append(include_dir) + return items + @property def build_dir(self): - return join("$BUILD_DIR", "lib", basename(self.path)) - - def get_inc_dirs(self): - items = [self.src_dir] - if all([isdir(join(self.path, d)) for d in ("inc", "src")]): - items.append(join(self.path, "inc")) - return items + return join("$BUILD_DIR", + "lib%s" % hashlib.sha1(self.path).hexdigest()[:3], + basename(self.path)) @property def build_flags(self): @@ -171,21 +185,15 @@ class LibBuilderBase(object): def lib_archive(self): return self.env.get("LIB_ARCHIVE", "") != "false" - @staticmethod - def validate_ldf_mode(mode): - if isinstance(mode, basestring): - mode = mode.strip().lower() - if mode in LibBuilderBase.LDF_MODES: - return mode - try: - return LibBuilderBase.LDF_MODES[int(mode)] - except (IndexError, ValueError): - pass - return LibBuilderBase.LDF_MODE_DEFAULT - @property def lib_ldf_mode(self): - return self._ldf_mode + return self.validate_ldf_mode( + self.env.get("LIB_LDF_MODE", self.LDF_MODE_DEFAULT)) + + @property + def lib_compat_mode(self): + return self.validate_compat_mode( + self.env.get("LIB_COMPAT_MODE", self.COMPAT_MODE_DEFAULT)) @property def depbuilders(self): @@ -200,15 +208,35 @@ class LibBuilderBase(object): return self._is_built @staticmethod - def items_in_list(items, ilist): + def validate_ldf_mode(mode): + if isinstance(mode, basestring): + mode = mode.strip().lower() + if mode in LibBuilderBase.LDF_MODES: + return mode + try: + return LibBuilderBase.LDF_MODES[int(mode)] + except (IndexError, ValueError): + pass + return LibBuilderBase.LDF_MODE_DEFAULT - def _items_to_list(items_): - if not isinstance(items_, list): - items_ = [i.strip() for i in items_.split(",")] - return [i.lower() for i in items_ if i] + @staticmethod + def validate_compat_mode(mode): + try: + mode = int(mode) + assert mode in LibBuilderBase.COMPAT_MODES + return mode + except (AssertionError, ValueError): + return LibBuilderBase.COMPAT_MODE_DEFAULT - items = _items_to_list(items) - ilist = _items_to_list(ilist) + @staticmethod + def items_to_list(items): + if not isinstance(items, list): + items = [i.strip() for i in items.split(",")] + return [i.lower() for i in items if i] + + def items_in_list(self, items, ilist): + items = self.items_to_list(items) + ilist = self.items_to_list(ilist) if "*" in items or "*" in ilist: return True return set(items) & set(ilist) @@ -222,13 +250,6 @@ class LibBuilderBase(object): def load_manifest(self): return {} - def get_src_files(self): - return [ - join(self.src_dir, item) - for item in self.env.MatchSourceFiles(self.src_dir, - self.src_filter) - ] - def process_extra_options(self): with util.cd(self.path): self.env.ProcessUnFlags(self.build_unflags) @@ -237,10 +258,12 @@ class LibBuilderBase(object): self.env.SConscriptChdir(1) self.env.SConscript( realpath(self.extra_script), - exports={"env": self.env, - "pio_lib_builder": self}) + exports={ + "env": self.env, + "pio_lib_builder": self + }) - def _process_dependencies(self): + def process_dependencies(self): if not self.dependencies: return for item in self.dependencies: @@ -260,7 +283,7 @@ class LibBuilderBase(object): continue found = False - for lb in self.envorigin.GetLibBuilders(): + for lb in self.env.GetLibBuilders(): if item['name'] != lb.name: continue elif "frameworks" in item and \ @@ -279,56 +302,81 @@ class LibBuilderBase(object): "library\n" % (item['name'], self.name)) self.env.Exit(1) - def _validate_search_paths(self, search_paths=None): - if not search_paths: - search_paths = [] - assert isinstance(search_paths, list) + def get_search_files(self): + items = [ + join(self.src_dir, item) + for item in self.env.MatchSourceFiles(self.src_dir, + self.src_filter) + ] + include_dir = self.include_dir + if include_dir: + items.extend([ + join(include_dir, item) + for item in self.env.MatchSourceFiles(include_dir) + ]) + return items - _search_paths = [] - for path in search_paths: - if path not in self._scanned_paths: - _search_paths.append(path) - self._scanned_paths.append(path) + def _validate_search_files(self, search_files=None): + if not search_files: + search_files = [] + assert isinstance(search_files, list) - return _search_paths + _search_files = [] + for path in search_files: + if path not in self._processed_files: + _search_files.append(path) + self._processed_files.append(path) - def _get_found_includes(self, search_paths=None): + return _search_files + + def _get_found_includes(self, search_files=None): # all include directories - if not LibBuilderBase.INC_DIRS_CACHE: - inc_dirs = [] - used_inc_dirs = [] - for lb in self.envorigin.GetLibBuilders(): - items = [self.env.Dir(d) for d in lb.get_inc_dirs()] - if lb.dependent: - used_inc_dirs.extend(items) - else: - inc_dirs.extend(items) - LibBuilderBase.INC_DIRS_CACHE = used_inc_dirs + inc_dirs + if not LibBuilderBase._INCLUDE_DIRS_CACHE: + LibBuilderBase._INCLUDE_DIRS_CACHE = [] + for lb in self.env.GetLibBuilders(): + LibBuilderBase._INCLUDE_DIRS_CACHE.extend( + [self.env.Dir(d) for d in lb.get_include_dirs()]) # append self include directories - inc_dirs = [self.env.Dir(d) for d in self.get_inc_dirs()] - inc_dirs.extend(LibBuilderBase.INC_DIRS_CACHE) + include_dirs = [self.env.Dir(d) for d in self.get_include_dirs()] + include_dirs.extend(LibBuilderBase._INCLUDE_DIRS_CACHE) result = [] - for path in self._validate_search_paths(search_paths): + for path in self._validate_search_files(search_files): try: assert "+" in self.lib_ldf_mode incs = self.env.File(path).get_found_includes( - self.env, LibBuilderBase.ADVANCED_SCANNER, tuple(inc_dirs)) + self.env, LibBuilderBase.ADVANCED_SCANNER, + tuple(include_dirs)) except Exception as e: # pylint: disable=broad-except if self.verbose and "+" in self.lib_ldf_mode: sys.stderr.write( "Warning! Classic Pre Processor is used for `%s`, " "advanced has failed with `%s`\n" % (path, e)) - incs = self.env.File(path).get_found_includes( - self.env, LibBuilderBase.CLASSIC_SCANNER, tuple(inc_dirs)) + _incs = self.env.File(path).get_found_includes( + self.env, LibBuilderBase.CLASSIC_SCANNER, + tuple(include_dirs)) + incs = [] + for inc in _incs: + incs.append(inc) + if not self.PARSE_SRC_BY_H_NAME: + continue + _h_path = inc.get_abspath() + if not self.env.IsFileWithExt(_h_path, + piotool.SRC_HEADER_EXT): + continue + _f_part = _h_path[:_h_path.rindex(".")] + for ext in piotool.SRC_C_EXT: + if isfile("%s.%s" % (_f_part, ext)): + incs.append( + self.env.File("%s.%s" % (_f_part, ext))) # print path, map(lambda n: n.get_abspath(), incs) for inc in incs: if inc not in result: result.append(inc) return result - def depend_recursive(self, lb, search_paths=None): + def depend_recursive(self, lb, search_files=None): def _already_depends(_lb): if self in _lb.depbuilders: @@ -348,32 +396,32 @@ class LibBuilderBase(object): self._circular_deps.append(lb) elif lb not in self._depbuilders: self._depbuilders.append(lb) - LibBuilderBase.INC_DIRS_CACHE = None - lb.search_deps_recursive(search_paths) + LibBuilderBase._INCLUDE_DIRS_CACHE = None + lb.search_deps_recursive(search_files) - def search_deps_recursive(self, search_paths=None): + def search_deps_recursive(self, search_files=None): if not self._is_dependent: self._is_dependent = True - self._process_dependencies() + self.process_dependencies() if self.lib_ldf_mode.startswith("deep"): - search_paths = self.get_src_files() + search_files = self.get_search_files() # when LDF is disabled if self.lib_ldf_mode == "off": return lib_inc_map = {} - for inc in self._get_found_includes(search_paths): - for lb in self.envorigin.GetLibBuilders(): + for inc in self._get_found_includes(search_files): + for lb in self.env.GetLibBuilders(): if inc.get_abspath() in lb: if lb not in lib_inc_map: lib_inc_map[lb] = [] lib_inc_map[lb].append(inc.get_abspath()) break - for lb, lb_search_paths in lib_inc_map.items(): - self.depend_recursive(lb, lb_search_paths) + for lb, lb_search_files in lib_inc_map.items(): + self.depend_recursive(lb, lb_search_files) def build(self): libs = [] @@ -384,16 +432,16 @@ class LibBuilderBase(object): self.env.AppendUnique(**{key: lb.env.get(key)}) for lb in self._circular_deps: - self.env.AppendUnique(CPPPATH=lb.get_inc_dirs()) + self.env.AppendUnique(CPPPATH=lb.get_include_dirs()) if self._is_built: return libs self._is_built = True - self.env.AppendUnique(CPPPATH=self.get_inc_dirs()) + self.env.AppendUnique(CPPPATH=self.get_include_dirs()) if self.lib_ldf_mode == "off": - for lb in self.envorigin.GetLibBuilders(): + for lb in self.env.GetLibBuilders(): if self == lb or not lb.is_built: continue for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"): @@ -427,13 +475,13 @@ class ArduinoLibBuilder(LibBuilderBase): manifest[key.strip()] = value.strip() return manifest - def get_inc_dirs(self): - inc_dirs = LibBuilderBase.get_inc_dirs(self) + def get_include_dirs(self): + include_dirs = LibBuilderBase.get_include_dirs(self) if isdir(join(self.path, "src")): - return inc_dirs + return include_dirs if isdir(join(self.path, "utility")): - inc_dirs.append(join(self.path, "utility")) - return inc_dirs + include_dirs.append(join(self.path, "utility")) + return include_dirs @property def src_filter(self): @@ -458,19 +506,25 @@ class MbedLibBuilder(LibBuilderBase): return {} return util.load_json(join(self.path, "module.json")) + @property + def include_dir(self): + if isdir(join(self.path, "include")): + return join(self.path, "include") + return None + @property def src_dir(self): if isdir(join(self.path, "source")): return join(self.path, "source") return LibBuilderBase.src_dir.fget(self) - def get_inc_dirs(self): - inc_dirs = LibBuilderBase.get_inc_dirs(self) - if self.path not in inc_dirs: - inc_dirs.append(self.path) + def get_include_dirs(self): + include_dirs = LibBuilderBase.get_include_dirs(self) + if self.path not in include_dirs: + include_dirs.append(self.path) for p in self._manifest.get("extraIncludes", []): - inc_dirs.append(join(self.path, p)) - return inc_dirs + include_dirs.append(join(self.path, p)) + return include_dirs def is_frameworks_compatible(self, frameworks): return self.items_in_list(frameworks, ["mbed"]) @@ -482,6 +536,14 @@ class PlatformIOLibBuilder(LibBuilderBase): assert isfile(join(self.path, "library.json")) manifest = util.load_json(join(self.path, "library.json")) assert "name" in manifest + + # replace "espressif" old name dev/platform with ESP8266 + if "platforms" in manifest: + manifest['platforms'] = [ + "espressif8266" if p == "espressif" else p + for p in self.items_to_list(manifest['platforms']) + ] + return manifest def _is_arduino_manifest(self): @@ -537,6 +599,13 @@ class PlatformIOLibBuilder(LibBuilderBase): self._manifest.get("build").get("libLDFMode")) return LibBuilderBase.lib_ldf_mode.fget(self) + @property + def lib_compat_mode(self): + if "libCompatMode" in self._manifest.get("build", {}): + return self.validate_compat_mode( + self._manifest.get("build").get("libCompatMode")) + return LibBuilderBase.lib_compat_mode.fget(self) + def is_platforms_compatible(self, platforms): items = self._manifest.get("platforms") if not items: @@ -549,27 +618,49 @@ class PlatformIOLibBuilder(LibBuilderBase): return LibBuilderBase.is_frameworks_compatible(self, frameworks) return self.items_in_list(frameworks, items) - def get_inc_dirs(self): - inc_dirs = LibBuilderBase.get_inc_dirs(self) + def get_include_dirs(self): + include_dirs = LibBuilderBase.get_include_dirs(self) - # backwards compatibility with PlatformIO 2.0 + # backwards compatibility with PlatformIO 2.0 if ("build" not in self._manifest and self._is_arduino_manifest() and not isdir(join(self.path, "src")) and isdir(join(self.path, "utility"))): - inc_dirs.append(join(self.path, "utility")) + include_dirs.append(join(self.path, "utility")) for path in self.env.get("CPPPATH", []): if path not in self.envorigin.get("CPPPATH", []): - inc_dirs.append(self.env.subst(path)) - return inc_dirs + include_dirs.append(self.env.subst(path)) + return include_dirs class ProjectAsLibBuilder(LibBuilderBase): + @property + def include_dir(self): + include_dir = self.env.subst("$PROJECTINCLUDE_DIR") + return include_dir if isdir(include_dir) else None + @property def src_dir(self): return self.env.subst("$PROJECTSRC_DIR") + def get_include_dirs(self): + include_dirs = LibBuilderBase.get_include_dirs(self) + include_dirs.append(self.env.subst("$PROJECTINCLUDE_DIR")) + return include_dirs + + def get_search_files(self): + # project files + items = LibBuilderBase.get_search_files(self) + # test files + if "__test" in COMMAND_LINE_TARGETS: + items.extend([ + join("$PROJECTTEST_DIR", item) + for item in self.env.MatchSourceFiles("$PROJECTTEST_DIR", + "$PIOTEST_SRC_FILTER") + ]) + return items + @property def lib_ldf_mode(self): mode = LibBuilderBase.lib_ldf_mode.fget(self) @@ -586,39 +677,63 @@ class ProjectAsLibBuilder(LibBuilderBase): # skip for project, options are already processed pass - def search_deps_recursive(self, search_paths=None): - for dep in self.env.get("LIB_DEPS", []): - for token in ("@", "="): - if token in dep: - dep, _ = dep.split(token, 1) - for lb in self.envorigin.GetLibBuilders(): - if lb.name == dep: + def process_dependencies(self): # pylint: disable=too-many-branches + uris = self.env.get("LIB_DEPS", []) + if not uris: + return + storage_dirs = [] + for lb in self.env.GetLibBuilders(): + if dirname(lb.path) not in storage_dirs: + storage_dirs.append(dirname(lb.path)) + + for uri in uris: + found = False + for storage_dir in storage_dirs: + if found: + break + lm = LibraryManager(storage_dir) + pkg_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri)) + if not pkg_dir: + continue + for lb in self.env.GetLibBuilders(): + if lb.path != pkg_dir: + continue + if lb not in self.depbuilders: + self.depend_recursive(lb) + found = True + break + + if not found: + for lb in self.env.GetLibBuilders(): + if lb.name != uri: + continue if lb not in self.depbuilders: self.depend_recursive(lb) break - return LibBuilderBase.search_deps_recursive(self, search_paths) def build(self): self._is_built = True # do not build Project now - self.env.AppendUnique(CPPPATH=self.get_inc_dirs()) + self.env.AppendUnique(CPPPATH=self.get_include_dirs()) return LibBuilderBase.build(self) def GetLibBuilders(env): # pylint: disable=too-many-branches if "__PIO_LIB_BUILDERS" in DefaultEnvironment(): - return DefaultEnvironment()['__PIO_LIB_BUILDERS'] + return sorted( + DefaultEnvironment()['__PIO_LIB_BUILDERS'], + key=lambda lb: 0 if lb.dependent else 1) items = [] - compat_mode = int(env.get("LIB_COMPAT_MODE", 1)) - verbose = (int(ARGUMENTS.get("PIOVERBOSE", 0)) - and not env.GetOption('clean')) + verbose = int(ARGUMENTS.get("PIOVERBOSE", + 0)) and not env.GetOption('clean') def _check_lib_builder(lb): + compat_mode = lb.lib_compat_mode if lb.name in env.get("LIB_IGNORE", []): if verbose: sys.stderr.write("Ignored library %s\n" % lb.path) - return + return None if compat_mode > 1 and not lb.is_platforms_compatible( env['PIOPLATFORM']): if verbose: @@ -678,7 +793,7 @@ def BuildProjectLibraries(env): found_lbs = [lb for lb in lib_builders if lb.dependent] for lb in lib_builders: if lb in found_lbs: - lb.search_deps_recursive(lb.get_src_files()) + lb.search_deps_recursive(lb.get_search_files()) for lb in lib_builders: for deplb in lb.depbuilders[:]: if deplb not in found_lbs: @@ -690,9 +805,12 @@ def BuildProjectLibraries(env): title = "<%s>" % lb.name if lb.version: title += " v%s" % lb.version + sys.stdout.write("%s|-- %s" % (margin, title)) if int(ARGUMENTS.get("PIOVERBOSE", 0)): - title += " (%s)" % lb.path - print "%s|-- %s" % (margin, title) + sys.stdout.write(" (") + sys.stdout.write(lb.path) + sys.stdout.write(")") + sys.stdout.write("\n") if lb.depbuilders: print_deps_tree(lb, level + 1) @@ -709,10 +827,10 @@ def BuildProjectLibraries(env): correct_found_libs() if project.depbuilders: - print "Library Dependency Graph" + print "Library Dependency Graph ( http://bit.ly/configure-pio-ldf )" print_deps_tree(project) else: - print "Project does not have dependencies" + print "No dependencies" return project.build() diff --git a/platformio/builder/tools/piomisc.py b/platformio/builder/tools/piomisc.py index fc08ed93..7c08237f 100644 --- a/platformio/builder/tools/piomisc.py +++ b/platformio/builder/tools/piomisc.py @@ -50,7 +50,7 @@ class InoToCPPConverter(object): def convert(self, nodes): contents = self.merge(nodes) if not contents: - return + return None return self.process(contents) def merge(self, nodes): @@ -199,7 +199,8 @@ def _delete_file(path): pass -def GetCompilerType(env): +@util.memoized +def _get_compiler_type(env): try: sysenv = environ.copy() sysenv['PATH'] = str(env['ENV']['PATH']) @@ -216,6 +217,10 @@ def GetCompilerType(env): return None +def GetCompilerType(env): + return _get_compiler_type(env) + + def GetActualLDScript(env): def _lookup_in_ldpath(script): @@ -271,7 +276,7 @@ def PioClean(env, clean_dir): def ProcessDebug(env): if not env.subst("$PIODEBUGFLAGS"): - env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb"]) + env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"]) env.Append( BUILD_FLAGS=env.get("PIODEBUGFLAGS", []), BUILD_UNFLAGS=["-Os", "-O0", "-O1", "-O2", "-O3"]) @@ -288,11 +293,12 @@ def ProcessTest(env): src_filter = ["+<*.cpp>", "+<*.c>"] if "PIOTEST" in env: src_filter.append("+<%s%s>" % (env['PIOTEST'], sep)) + env.Replace(PIOTEST_SRC_FILTER=src_filter) return env.CollectBuildFiles( "$BUILDTEST_DIR", "$PROJECTTEST_DIR", - src_filter=src_filter, + "$PIOTEST_SRC_FILTER", duplicate=False) diff --git a/platformio/builder/tools/pioplatform.py b/platformio/builder/tools/pioplatform.py index ab679a68..02519dbe 100644 --- a/platformio/builder/tools/pioplatform.py +++ b/platformio/builder/tools/pioplatform.py @@ -41,8 +41,9 @@ def PioPlatform(env): def BoardConfig(env, board=None): p = initPioPlatform(env['PLATFORM_MANIFEST']) try: - config = p.board_config(board if board else env['BOARD']) - except exception.UnknownBoard as e: + assert env.get("BOARD", board), "BoardConfig: Board is not defined" + config = p.board_config(board if board else env.get("BOARD")) + except (AssertionError, exception.UnknownBoard) as e: sys.stderr.write("Error: %s\n" % str(e)) env.Exit(1) return config @@ -61,6 +62,9 @@ def LoadPioPlatform(env, variables): p = env.PioPlatform() installed_packages = p.get_installed_packages() + # Ensure real platform name + env['PIOPLATFORM'] = p.name + # Add toolchains and uploaders to $PATH for name in installed_packages: type_ = p.get_package_type(name) @@ -80,9 +84,8 @@ def LoadPioPlatform(env, variables): board_config = env.BoardConfig() for k in variables.keys(): - if (k in env - or not any([k.startswith("BOARD_"), - k.startswith("UPLOAD_")])): + if k in env or \ + not any([k.startswith("BOARD_"), k.startswith("UPLOAD_")]): continue _opt, _val = k.lower().split("_", 1) if _opt == "board": diff --git a/platformio/builder/tools/pioupload.py b/platformio/builder/tools/pioupload.py index 0ff3cb1f..ab7b27ca 100644 --- a/platformio/builder/tools/pioupload.py +++ b/platformio/builder/tools/pioupload.py @@ -58,7 +58,7 @@ def WaitForNewSerialPort(env, before): elapsed = 0 before = [p['port'] for p in before] while elapsed < 5 and new_port is None: - now = [p['port'] for p in util.get_serialports()] + now = [p['port'] for p in util.get_serial_ports()] for p in now: if p not in before: new_port = p @@ -107,29 +107,33 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument def _look_for_mbed_disk(): msdlabels = ("mbed", "nucleo", "frdm", "microbit") - for item in util.get_logicaldisks(): - if item['disk'].startswith( - "/net") or not _is_match_pattern(item['disk']): + for item in util.get_logical_devices(): + if item['path'].startswith("/net") or not _is_match_pattern( + item['path']): continue mbed_pages = [ - join(item['disk'], n) for n in ("mbed.htm", "mbed.html") + join(item['path'], n) for n in ("mbed.htm", "mbed.html") ] if any([isfile(p) for p in mbed_pages]): - return item['disk'] - if (item['name'] - and any([l in item['name'].lower() for l in msdlabels])): - return item['disk'] + return item['path'] + if item['name'] \ + and any([l in item['name'].lower() for l in msdlabels]): + return item['path'] return None def _look_for_serial_port(): port = None board_hwids = [] + upload_protocol = env.subst("$UPLOAD_PROTOCOL") if "BOARD" in env and "build.hwids" in env.BoardConfig(): board_hwids = env.BoardConfig().get("build.hwids") - for item in util.get_serialports(filter_hwid=True): + for item in util.get_serial_ports(filter_hwid=True): if not _is_match_pattern(item['port']): continue port = item['port'] + if upload_protocol.startswith("blackmagic") \ + and "GDB" in item['description']: + return port for hwid in board_hwids: hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "") if hwid_str in item['hwid']: @@ -140,7 +144,8 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument print env.subst("Use manually specified: $UPLOAD_PORT") return - if "mbed" in env.subst("$PIOFRAMEWORK"): + if "mbed" in env.subst("$PIOFRAMEWORK") \ + and not env.subst("$UPLOAD_PROTOCOL"): env.Replace(UPLOAD_PORT=_look_for_mbed_disk()) else: if (system() == "Linux" and not any([ diff --git a/platformio/builder/tools/piowinhooks.py b/platformio/builder/tools/piowinhooks.py index 5ac19f5d..7db4f943 100644 --- a/platformio/builder/tools/piowinhooks.py +++ b/platformio/builder/tools/piowinhooks.py @@ -72,7 +72,7 @@ def exists(_): def generate(env): if system() != "Windows": - return + return None env.Replace(_long_sources_hook=long_sources_hook) env.Replace(_long_incflags_hook=long_incflags_hook) diff --git a/platformio/builder/tools/platformio.py b/platformio/builder/tools/platformio.py index cbe4c3e5..dd6be6a4 100644 --- a/platformio/builder/tools/platformio.py +++ b/platformio/builder/tools/platformio.py @@ -27,18 +27,18 @@ from SCons.Util import case_sensitive_suffixes, is_Sequence from platformio.util import glob_escape, pioversion_to_intstr -SRC_BUILD_EXT = ["c", "cc", "cpp", "S", "spp", "SPP", "sx", "s", "asm", "ASM"] SRC_HEADER_EXT = ["h", "hpp"] +SRC_C_EXT = ["c", "cc", "cpp"] +SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"] SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-" % sep] def BuildProgram(env): def _append_pio_macros(): - env.AppendUnique(CPPDEFINES=[ - ("PLATFORMIO", - int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr()))) - ]) + env.AppendUnique(CPPDEFINES=[( + "PLATFORMIO", + int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())))]) _append_pio_macros() @@ -46,9 +46,6 @@ def BuildProgram(env): if not case_sensitive_suffixes(".s", ".S"): env.Replace(AS="$CC", ASCOM="$ASPPCOM") - if "__debug" in COMMAND_LINE_TARGETS: - env.ProcessDebug() - # process extra flags from board if "BOARD" in env and "build.extra_flags" in env.BoardConfig(): env.ProcessFlags(env.BoardConfig().get("build.extra_flags")) @@ -57,13 +54,26 @@ def BuildProgram(env): # apply user flags env.ProcessFlags(env.get("BUILD_FLAGS")) + # process framework scripts env.BuildFrameworks(env.get("PIOFRAMEWORK")) # restore PIO macros if it was deleted by framework _append_pio_macros() + # Search for project source files + env.Append( + LIBPATH=["$BUILD_DIR"], + PIOBUILDFILES=env.CollectBuildFiles( + "$BUILDSRC_DIR", "$PROJECTSRC_DIR", "$SRC_FILTER", + duplicate=False)) + + if "__debug" in COMMAND_LINE_TARGETS: + env.ProcessDebug() + if "__test" in COMMAND_LINE_TARGETS: + env.Append(PIOBUILDFILES=env.ProcessTest()) + # build dependent libs - deplibs = env.BuildProjectLibraries() + env.Append(LIBS=env.BuildProjectLibraries()) # append specified LD_SCRIPT if ("LDSCRIPT_PATH" in env @@ -71,26 +81,14 @@ def BuildProgram(env): env.Append(LINKFLAGS=['-Wl,-T"$LDSCRIPT_PATH"']) # enable "cyclic reference" for linker - if env.get("LIBS", deplibs) and env.GetCompilerType() == "gcc": + if env.get("LIBS") and env.GetCompilerType() == "gcc": env.Prepend(_LIBFLAGS="-Wl,--start-group ") env.Append(_LIBFLAGS=" -Wl,--end-group") # Handle SRC_BUILD_FLAGS env.ProcessFlags(env.get("SRC_BUILD_FLAGS")) - env.Append( - LIBS=deplibs, - LIBPATH=["$BUILD_DIR"], - PIOBUILDFILES=env.CollectBuildFiles( - "$BUILDSRC_DIR", - "$PROJECTSRC_DIR", - src_filter=env.get("SRC_FILTER"), - duplicate=False)) - - if "__test" in COMMAND_LINE_TARGETS: - env.Append(PIOBUILDFILES=env.ProcessTest()) - - if not env['PIOBUILDFILES'] and not COMMAND_LINE_TARGETS: + if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS: sys.stderr.write( "Error: Nothing to build. Please put your source code files " "to '%s' folder\n" % env.subst("$PROJECTSRC_DIR")) @@ -185,6 +183,7 @@ def MatchSourceFiles(env, src_dir, src_filter=None): items.add(item.replace(src_dir + sep, "")) src_dir = env.subst(src_dir) + src_filter = env.subst(src_filter) if src_filter else None src_filter = src_filter or SRC_FILTER_DEFAULT if isinstance(src_filter, (list, tuple)): src_filter = " ".join(src_filter) @@ -269,12 +268,12 @@ def BuildLibrary(env, variant_dir, src_dir, src_filter=None): lib = env.Clone() return lib.StaticLibrary( lib.subst(variant_dir), - lib.CollectBuildFiles(variant_dir, src_dir, src_filter=src_filter)) + lib.CollectBuildFiles(variant_dir, src_dir, src_filter)) def BuildSources(env, variant_dir, src_dir, src_filter=None): DefaultEnvironment().Append(PIOBUILDFILES=env.Clone().CollectBuildFiles( - variant_dir, src_dir, src_filter=src_filter)) + variant_dir, src_dir, src_filter)) def exists(_): diff --git a/platformio/commands/boards.py b/platformio/commands/boards.py index 0d9d6015..e4921354 100644 --- a/platformio/commands/boards.py +++ b/platformio/commands/boards.py @@ -16,7 +16,6 @@ import json import click -from platformio.exception import APIRequestError, InternetIsOffline from platformio.managers.platform import PlatformManager @@ -43,6 +42,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912 click.secho(platform, bold=True) click.echo("-" * terminal_width) print_boards(boards) + return True def print_boards(boards): @@ -80,27 +80,13 @@ def print_boards(boards): def _get_boards(installed=False): - boards = PlatformManager().get_installed_boards() - if not installed: - know_boards = ["%s:%s" % (b['platform'], b['id']) for b in boards] - try: - for board in PlatformManager().get_registered_boards(): - key = "%s:%s" % (board['platform'], board['id']) - if key not in know_boards: - boards.append(board) - except InternetIsOffline: - pass - return sorted(boards, key=lambda b: b['name']) + pm = PlatformManager() + return pm.get_installed_boards() if installed else pm.get_all_boards() def _print_boards_json(query, installed=False): result = [] - try: - boards = _get_boards(installed) - except APIRequestError: - if not installed: - boards = _get_boards(True) - for board in boards: + for board in _get_boards(installed): if query: search_data = "%s %s" % (board['id'], json.dumps(board).lower()) if query.lower() not in search_data.lower(): diff --git a/platformio/commands/device.py b/platformio/commands/device.py index 201ae474..877b62e6 100644 --- a/platformio/commands/device.py +++ b/platformio/commands/device.py @@ -28,19 +28,71 @@ def cli(): @cli.command("list", short_help="List devices") +@click.option("--serial", is_flag=True, help="List serial ports, default") +@click.option("--logical", is_flag=True, help="List logical devices") +@click.option("--mdns", is_flag=True, help="List multicast DNS services") @click.option("--json-output", is_flag=True) -def device_list(json_output): +def device_list( # pylint: disable=too-many-branches + serial, logical, mdns, json_output): + if not logical and not mdns: + serial = True + data = {} + if serial: + data['serial'] = util.get_serial_ports() + if logical: + data['logical'] = util.get_logical_devices() + if mdns: + data['mdns'] = util.get_mdns_services() + + single_key = data.keys()[0] if len(data.keys()) == 1 else None if json_output: - click.echo(json.dumps(util.get_serialports())) - return + return click.echo(json.dumps(data[single_key] if single_key else data)) - for item in util.get_serialports(): - click.secho(item['port'], fg="cyan") - click.echo("-" * len(item['port'])) - click.echo("Hardware ID: %s" % item['hwid']) - click.echo("Description: %s" % item['description']) - click.echo("") + titles = { + "serial": "Serial Ports", + "logical": "Logical Devices", + "mdns": "Multicast DNS Services" + } + + for key, value in data.iteritems(): + if not single_key: + click.secho(titles[key], bold=True) + click.echo("=" * len(titles[key])) + + if key == "serial": + for item in value: + click.secho(item['port'], fg="cyan") + click.echo("-" * len(item['port'])) + click.echo("Hardware ID: %s" % item['hwid']) + click.echo("Description: %s" % item['description']) + click.echo("") + + if key == "logical": + for item in value: + click.secho(item['path'], fg="cyan") + click.echo("-" * len(item['path'])) + click.echo("Name: %s" % item['name']) + click.echo("") + + if key == "mdns": + for item in value: + click.secho(item['name'], fg="cyan") + click.echo("-" * len(item['name'])) + click.echo("Type: %s" % item['type']) + click.echo("IP: %s" % item['ip']) + click.echo("Port: %s" % item['port']) + if item['properties']: + click.echo("Properties: %s" % ("; ".join([ + "%s=%s" % (k, v) + for k, v in item['properties'].iteritems() + ]))) + click.echo("") + + if single_key: + click.echo("") + + return True @cli.command("monitor", short_help="Monitor device (Serial)") @@ -123,7 +175,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches pass if not kwargs['port']: - ports = util.get_serialports(filter_hwid=True) + ports = util.get_serial_ports(filter_hwid=True) if len(ports) == 1: kwargs['port'] = ports[0]['port'] @@ -154,7 +206,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches def get_project_options(project_dir, environment): config = util.load_project_config(project_dir) if not config.sections(): - return + return None known_envs = [s[4:] for s in config.sections() if s.startswith("env:")] if environment: @@ -163,7 +215,7 @@ def get_project_options(project_dir, environment): raise exception.UnknownEnvNames(environment, ", ".join(known_envs)) if not known_envs: - return + return None if config.has_option("platformio", "env_default"): env_default = config.get("platformio", diff --git a/platformio/commands/home.py b/platformio/commands/home.py new file mode 100644 index 00000000..ae797781 --- /dev/null +++ b/platformio/commands/home.py @@ -0,0 +1,42 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +import click +import requests + +from platformio.managers.core import pioplus_call + + +@click.command("home", short_help="PIO Home") +@click.option("--port", type=int, default=8008, help="HTTP port, default=8008") +@click.option( + "--host", + default="127.0.0.1", + help="HTTP host, default=127.0.0.1. " + "You can open PIO Home for inbound connections with --host=0.0.0.0") +@click.option("--no-open", is_flag=True) +def cli(*args, **kwargs): # pylint: disable=unused-argument + pioplus_call(sys.argv[1:]) + + +def shutdown_servers(): + port = 8010 + while port < 9000: + try: + requests.get("http://127.0.0.1:%d?__shutdown__=1" % port) + port += 1 + except: # pylint: disable=bare-except + return diff --git a/platformio/commands/init.py b/platformio/commands/init.py index ac16bb7e..7cfc0672 100644 --- a/platformio/commands/init.py +++ b/platformio/commands/init.py @@ -231,11 +231,6 @@ def init_ci_conf(project_dir): # python: # - "2.7" # -# sudo: false -# cache: -# directories: -# - "~/.platformio" -# # install: # - pip install -U platformio # @@ -251,11 +246,6 @@ def init_ci_conf(project_dir): # python: # - "2.7" # -# sudo: false -# cache: -# directories: -# - "~/.platformio" -# # env: # - PLATFORMIO_CI_SRC=path/to/test/file.c # - PLATFORMIO_CI_SRC=examples/file.ino diff --git a/platformio/commands/lib.py b/platformio/commands/lib.py index f801e23a..051720c6 100644 --- a/platformio/commands/lib.py +++ b/platformio/commands/lib.py @@ -33,8 +33,7 @@ from platformio.util import get_api_result "-g", "--global", is_flag=True, - help="Manage global PlatformIO" - " library storage `%s`" % join(util.get_home_dir(), "lib")) + help="Manage global PlatformIO library storage") @click.option( "-d", "--storage-dir", @@ -93,11 +92,17 @@ def cli(ctx, **options): "--interactive", is_flag=True, help="Allow to make a choice for all prompts") +@click.option( + "-f", + "--force", + is_flag=True, + help="Reinstall/redownload library if exists") @click.pass_obj -def lib_install(lm, libraries, silent, interactive): +def lib_install(lm, libraries, silent, interactive, force): # @TODO "save" option for library in libraries: - lm.install(library, silent=silent, interactive=interactive) + lm.install( + library, silent=silent, interactive=interactive, force=force) @cli.command("uninstall", short_help="Uninstall libraries") @@ -128,7 +133,7 @@ def lib_update(lm, libraries, only_check, json_output): requirements = None url = None if not pkg_dir: - name, requirements, url = lm.parse_pkg_input(library) + name, requirements, url = lm.parse_pkg_uri(library) pkg_dir = lm.get_package_dir(name, requirements, url) if not pkg_dir: continue @@ -143,6 +148,8 @@ def lib_update(lm, libraries, only_check, json_output): for library in libraries: lm.update(library, only_check=only_check) + return True + def print_lib_item(item): click.secho(item['name'], fg="cyan") @@ -204,7 +211,7 @@ def lib_search(query, json_output, page, noninteractive, **filters): result = get_api_result( "/v2/lib/search", dict(query=" ".join(query), page=page), - cache_valid="3d") + cache_valid="1d") if json_output: click.echo(json.dumps(result)) @@ -234,8 +241,8 @@ def lib_search(query, json_output, page, noninteractive, **filters): for item in result['items']: print_lib_item(item) - if (int(result['page']) * int(result['perpage']) >= - int(result['total'])): + if (int(result['page']) * int(result['perpage']) >= int( + result['total'])): break if noninteractive: @@ -252,7 +259,7 @@ def lib_search(query, json_output, page, noninteractive, **filters): "/v2/lib/search", {"query": " ".join(query), "page": int(result['page']) + 1}, - cache_valid="3d") + cache_valid="1d") @cli.command("list", short_help="List installed libraries") @@ -265,11 +272,13 @@ def lib_list(lm, json_output): return click.echo(json.dumps(items)) if not items: - return + return None for item in sorted(items, key=lambda i: i['name']): print_lib_item(item) + return True + @util.memoized def get_builtin_libs(storage_names=None): @@ -308,13 +317,15 @@ def lib_builtin(storage, json_output): for item in sorted(storage_['items'], key=lambda i: i['name']): print_lib_item(item) + return True + @cli.command("show", short_help="Show detailed info about a library") @click.argument("library", metavar="[LIBRARY]") @click.option("--json-output", is_flag=True) def lib_show(library, json_output): lm = LibraryManager() - name, requirements, _ = lm.parse_pkg_input(library) + name, requirements, _ = lm.parse_pkg_uri(library) lib_id = lm.get_pkg_id_by_name( name, requirements, silent=json_output, interactive=not json_output) lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d") @@ -381,6 +392,8 @@ def lib_show(library, json_output): for row in rows: click.echo(row) + return True + @cli.command("register", short_help="Register a new library") @click.argument("config_url") @@ -438,8 +451,8 @@ def lib_stats(json_output): printitem_tpl.format( name=click.style(name, fg="cyan"), url=click.style( - "http://platformio.org/lib/search?query=" + quote( - "keyword:%s" % name), + "http://platformio.org/lib/search?query=" + + quote("keyword:%s" % name), fg="blue"))) for key in ("updated", "added"): @@ -468,3 +481,5 @@ def lib_stats(json_output): for item in result.get(key, []): _print_lib_item(item) click.echo() + + return True diff --git a/platformio/commands/platform.py b/platformio/commands/platform.py index 9699a4d7..8fcc70b0 100644 --- a/platformio/commands/platform.py +++ b/platformio/commands/platform.py @@ -47,7 +47,7 @@ def _print_platforms(platforms): def _get_registry_platforms(): - platforms = util.get_api_result("/platforms", cache_valid="30d") + platforms = util.get_api_result("/platforms", cache_valid="7d") pm = PlatformManager() for platform in platforms or []: platform['versions'] = pm.get_all_repo_versions(platform['name']) @@ -188,7 +188,7 @@ def platform_search(query, json_output): @click.option("--json-output", is_flag=True) def platform_frameworks(query, json_output): frameworks = [] - for framework in util.get_api_result("/frameworks", cache_valid="30d"): + for framework in util.get_api_result("/frameworks", cache_valid="7d"): if query == "all": query = "" search_data = json.dumps(framework) @@ -257,7 +257,7 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches click.echo("Frameworks: %s" % ", ".join(data['frameworks'])) if not data['packages']: - return + return None if not isinstance(data['packages'][0], dict): click.echo("Packages: %s" % ", ".join(data['packages'])) @@ -287,21 +287,29 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches click.echo("------") print_boards(data['boards']) + return True + @cli.command("install", short_help="Install new development platform") @click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]") @click.option("--with-package", multiple=True) @click.option("--without-package", multiple=True) @click.option("--skip-default-package", is_flag=True) +@click.option( + "-f", + "--force", + is_flag=True, + help="Reinstall/redownload dev/platform and its packages if exist") def platform_install(platforms, with_package, without_package, - skip_default_package): + skip_default_package, force): pm = PlatformManager() for platform in platforms: if pm.install( name=platform, with_packages=with_package, without_packages=without_package, - skip_default_package=skip_default_package): + skip_default_package=skip_default_package, + force=force): click.secho( "The platform '%s' has been successfully installed!\n" "The rest of packages will be installed automatically " @@ -351,7 +359,7 @@ def platform_update(platforms, only_packages, only_check, json_output): requirements = None url = None if not pkg_dir: - name, requirements, url = pm.parse_pkg_input(platform) + name, requirements, url = pm.parse_pkg_uri(platform) pkg_dir = pm.get_package_dir(name, requirements, url) if not pkg_dir: continue @@ -375,3 +383,5 @@ def platform_update(platforms, only_packages, only_check, json_output): pm.update( platform, only_packages=only_packages, only_check=only_check) click.echo() + + return True diff --git a/platformio/commands/run.py b/platformio/commands/run.py index fd7a29a0..59654de5 100644 --- a/platformio/commands/run.py +++ b/platformio/commands/run.py @@ -132,8 +132,8 @@ class EnvironmentProcessor(object): "upload_protocol", "upload_speed", "upload_flags", "upload_resetmethod", "lib_deps", "lib_ignore", "lib_extra_dirs", "lib_ldf_mode", "lib_compat_mode", - "lib_archive", "piotest", "test_transport", "test_ignore", - "test_port", "debug_tool", "debug_port", + "lib_archive", "piotest", "test_transport", "test_filter", + "test_ignore", "test_port", "debug_tool", "debug_port", "debug_init_cmds", "debug_extra_cmds", "debug_server", "debug_init_break", "debug_load_cmd", "monitor_port", "monitor_baud", "monitor_rts", "monitor_dtr") @@ -180,13 +180,14 @@ class EnvironmentProcessor(object): self.options[k] = self.options[k].strip() if not self.silent: - click.echo( - "[%s] Processing %s (%s)" % - (datetime.now().strftime("%c"), - click.style(self.name, fg="cyan", bold=True), "; ".join([ - "%s: %s" % (k, ", ".join(util.parse_conf_multi_values(v))) - for k, v in self.options.items() - ]))) + click.echo("[%s] Processing %s (%s)" % + (datetime.now().strftime("%c"), + click.style(self.name, fg="cyan", bold=True), + "; ".join([ + "%s: %s" % + (k, ", ".join(util.parse_conf_multi_values(v))) + for k, v in self.options.items() + ]))) click.secho("-" * terminal_width, bold=True) self.options = self._validate_options(self.options) @@ -227,7 +228,7 @@ class EnvironmentProcessor(object): v = self.RENAMED_PLATFORMS[v] # warn about unknown options - if k not in self.KNOWN_OPTIONS: + if k not in self.KNOWN_OPTIONS and not k.startswith("custom_"): click.secho( "Detected non-PlatformIO `%s` option in `[env:%s]` section" % (k, self.name), @@ -278,10 +279,10 @@ class EnvironmentProcessor(object): if d.strip() ], self.verbose) if "lib_deps" in self.options: - _autoinstall_libdeps( - self.cmd_ctx, - util.parse_conf_multi_values(self.options['lib_deps']), - self.verbose) + _autoinstall_libdeps(self.cmd_ctx, + util.parse_conf_multi_values( + self.options['lib_deps']), + self.verbose) try: p = PlatformFactory.newPlatform(self.options['platform']) @@ -323,8 +324,8 @@ def _clean_pioenvs_dir(pioenvs_dir): # if project's config is modified if (isdir(pioenvs_dir) - and getmtime(join(util.get_project_dir(), "platformio.ini")) > - getmtime(pioenvs_dir)): + and getmtime(join(util.get_project_dir(), + "platformio.ini")) > getmtime(pioenvs_dir)): util.rmtree_(pioenvs_dir) # check project structure diff --git a/platformio/commands/settings.py b/platformio/commands/settings.py index a29d3997..f163ff6d 100644 --- a/platformio/commands/settings.py +++ b/platformio/commands/settings.py @@ -32,8 +32,8 @@ def settings_get(name): click.echo( list_tpl.format( name=click.style("Name", fg="cyan"), - value=(click.style("Value", fg="green") + click.style( - " [Default]", fg="yellow")), + value=(click.style("Value", fg="green") + + click.style(" [Default]", fg="yellow")), description="Description")) click.echo("-" * terminal_width) diff --git a/platformio/commands/upgrade.py b/platformio/commands/upgrade.py index df9fc0ee..97014d2b 100644 --- a/platformio/commands/upgrade.py +++ b/platformio/commands/upgrade.py @@ -18,62 +18,61 @@ import click import requests from platformio import VERSION, __version__, exception, util -from platformio.managers.core import update_core_packages +from platformio.commands.home import shutdown_servers @click.command( "upgrade", short_help="Upgrade PlatformIO to the latest version") -def cli(): - # Update PlatformIO's Core packages - update_core_packages(silent=True) - - latest = get_latest_version() - if __version__ == latest: +@click.option("--dev", is_flag=True, help="Use development branch") +def cli(dev): + if not dev and __version__ == get_latest_version(): return click.secho( "You're up-to-date!\nPlatformIO %s is currently the " "newest version available." % __version__, fg="green") - else: - click.secho("Please wait while upgrading PlatformIO ...", fg="yellow") - to_develop = not all([c.isdigit() for c in latest if c != "."]) - cmds = ([ - "pip", "install", "--upgrade", - "https://github.com/platformio/platformio-core/archive/develop.zip" - if to_develop else "platformio" - ], ["platformio", "--version"]) + click.secho("Please wait while upgrading PlatformIO ...", fg="yellow") - cmd = None - r = None - try: - for cmd in cmds: - cmd = [util.get_pythonexe_path(), "-m"] + cmd - r = None + # kill all PIO Home servers, they block `pioplus` binary + shutdown_servers() + + to_develop = dev or not all([c.isdigit() for c in __version__ if c != "."]) + cmds = ([ + "pip", "install", "--upgrade", + "https://github.com/platformio/platformio-core/archive/develop.zip" + if to_develop else "platformio" + ], ["platformio", "--version"]) + + cmd = None + r = None + try: + for cmd in cmds: + cmd = [util.get_pythonexe_path(), "-m"] + cmd + r = None + r = util.exec_command(cmd) + + # try pip with disabled cache + if r['returncode'] != 0 and cmd[2] == "pip": + cmd.insert(3, "--no-cache-dir") r = util.exec_command(cmd) - # try pip with disabled cache - if r['returncode'] != 0 and cmd[2] == "pip": - cmd.insert(3, "--no-cache-dir") - r = util.exec_command(cmd) - - assert r['returncode'] == 0 - assert "version" in r['out'] - actual_version = r['out'].strip().split("version", 1)[1].strip() + assert r['returncode'] == 0 + assert "version" in r['out'] + actual_version = r['out'].strip().split("version", 1)[1].strip() + click.secho( + "PlatformIO has been successfully upgraded to %s" % actual_version, + fg="green") + click.echo("Release notes: ", nl=False) + click.secho( + "http://docs.platformio.org/en/latest/history.html", fg="cyan") + except Exception as e: # pylint: disable=broad-except + if not r: + raise exception.UpgradeError("\n".join([str(cmd), str(e)])) + permission_errors = ("permission denied", "not permitted") + if (any([m in r['err'].lower() for m in permission_errors]) + and "windows" not in util.get_systype()): click.secho( - "PlatformIO has been successfully upgraded to %s" % - actual_version, - fg="green") - click.echo("Release notes: ", nl=False) - click.secho( - "http://docs.platformio.org/en/latest/history.html", fg="cyan") - except Exception as e: # pylint: disable=W0703 - if not r: - raise exception.UpgradeError("\n".join([str(cmd), str(e)])) - permission_errors = ("permission denied", "not permitted") - if (any([m in r['err'].lower() for m in permission_errors]) - and "windows" not in util.get_systype()): - click.secho( - """ + """ ----------------- Permission denied ----------------- @@ -83,12 +82,14 @@ You need the `sudo` permission to install Python packages. Try WARNING! Don't use `sudo` for the rest PlatformIO commands. """, - fg="yellow", - err=True) - raise exception.ReturnErrorCode(1) - else: - raise exception.UpgradeError( - "\n".join([str(cmd), r['out'], r['err']])) + fg="yellow", + err=True) + raise exception.ReturnErrorCode(1) + else: + raise exception.UpgradeError("\n".join( + [str(cmd), r['out'], r['err']])) + + return True def get_latest_version(): diff --git a/platformio/downloader.py b/platformio/downloader.py index 986d3283..4dc84a83 100644 --- a/platformio/downloader.py +++ b/platformio/downloader.py @@ -15,6 +15,7 @@ from email.utils import parsedate_tz from math import ceil from os.path import getsize, join +from sys import getfilesystemencoding, version_info from time import mktime import click @@ -30,9 +31,13 @@ class FileDownloader(object): CHUNK_SIZE = 1024 def __init__(self, url, dest_dir=None): + self._request = None # make connection self._request = requests.get( - url, stream=True, headers=util.get_request_defheaders()) + url, + stream=True, + headers=util.get_request_defheaders(), + verify=version_info >= (2, 7, 9)) if self._request.status_code != 200: raise FDUnrecognizedStatusCode(self._request.status_code, url) @@ -48,7 +53,8 @@ class FileDownloader(object): self._progressbar = None self._destination = self._fname if dest_dir: - self.set_destination(join(dest_dir, self._fname)) + self.set_destination( + join(dest_dir.decode(getfilesystemencoding()), self._fname)) def set_destination(self, destination): self._destination = destination @@ -65,21 +71,29 @@ class FileDownloader(object): return int(self._request.headers['content-length']) def start(self): + label = "Downloading" itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE) f = open(self._destination, "wb") - - if app.is_disabled_progressbar() or self.get_size() == -1: - click.echo("Downloading...") - for chunk in itercontent: - if chunk: - f.write(chunk) - else: - chunks = int(ceil(self.get_size() / float(self.CHUNK_SIZE))) - with click.progressbar(length=chunks, label="Downloading") as pb: - for _ in pb: - f.write(next(itercontent)) - f.close() - self._request.close() + try: + if app.is_disabled_progressbar() or self.get_size() == -1: + click.echo("%s..." % label) + for chunk in itercontent: + if chunk: + f.write(chunk) + else: + chunks = int(ceil(self.get_size() / float(self.CHUNK_SIZE))) + with click.progressbar(length=chunks, label=label) as pb: + for _ in pb: + f.write(next(itercontent)) + except IOError as e: + click.secho( + "Error: Please read http://bit.ly/package-manager-ioerror", + fg="red", + err=True) + raise e + finally: + f.close() + self._request.close() if self.get_lmtime(): self._preserve_filemtime(self.get_lmtime()) diff --git a/platformio/exception.py b/platformio/exception.py index e190d7a4..e477bd30 100644 --- a/platformio/exception.py +++ b/platformio/exception.py @@ -53,15 +53,15 @@ class IncompatiblePlatform(PlatformioException): class PlatformNotInstalledYet(PlatformioException): - MESSAGE = "The platform '{0}' has not been installed yet. "\ - "Use `platformio platform install {0}` command" + MESSAGE = ("The platform '{0}' has not been installed yet. " + "Use `platformio platform install {0}` command") class BoardNotDefined(PlatformioException): - MESSAGE = "You need to specify board ID using `-b` or `--board` "\ - "option. Supported boards list is available via "\ - "`platformio boards` command" + MESSAGE = ( + "You need to specify board ID using `-b` or `--board` option. " + "Supported boards list is available via `platformio boards` command") class UnknownBoard(PlatformioException): @@ -91,16 +91,16 @@ class MissingPackageManifest(PlatformioException): class UndefinedPackageVersion(PlatformioException): - MESSAGE = "Could not find a version that satisfies the requirement '{0}'"\ - " for your system '{1}'" + MESSAGE = ("Could not find a version that satisfies the requirement '{0}'" + " for your system '{1}'") class PackageInstallError(PlatformioException): - MESSAGE = "Could not install '{0}' with version requirements '{1}' "\ - "for your system '{2}'.\n"\ - "If you use Antivirus, it can block PlatformIO Package "\ - "Manager. Try to disable it for a while." + MESSAGE = ( + "Could not install '{0}' with version requirements '{1}' for your " + "system '{2}'.\n If you use Antivirus, it can block PlatformIO " + "Package Manager. Try to disable it for a while.") class FDUnrecognizedStatusCode(PlatformioException): @@ -110,21 +110,22 @@ class FDUnrecognizedStatusCode(PlatformioException): class FDSizeMismatch(PlatformioException): - MESSAGE = "The size ({0:d} bytes) of downloaded file '{1}' "\ - "is not equal to remote size ({2:d} bytes)" + MESSAGE = ("The size ({0:d} bytes) of downloaded file '{1}' " + "is not equal to remote size ({2:d} bytes)") class FDSHASumMismatch(PlatformioException): - MESSAGE = "The 'sha1' sum '{0}' of downloaded file '{1}' "\ - "is not equal to remote '{2}'" + MESSAGE = ("The 'sha1' sum '{0}' of downloaded file '{1}' " + "is not equal to remote '{2}'") class NotPlatformIOProject(PlatformioException): - MESSAGE = "Not a PlatformIO project. `platformio.ini` file has not been "\ - "found in current working directory ({0}). To initialize new project "\ - "please use `platformio init` command" + MESSAGE = ( + "Not a PlatformIO project. `platformio.ini` file has not been " + "found in current working directory ({0}). To initialize new project " + "please use `platformio init` command") class UndefinedEnvPlatform(PlatformioException): @@ -164,24 +165,27 @@ class APIRequestError(PlatformioException): class InternetIsOffline(PlatformioException): - MESSAGE = "You are not connected to the Internet" + MESSAGE = ( + "You are not connected to the Internet.\n" + "If you build a project first time, we need Internet connection " + "to install all dependencies and toolchain.") class LibNotFound(PlatformioException): - MESSAGE = "Library `{0}` has not been found in PlatformIO Registry.\n"\ - "You can ignore this message, if `{0}` is a built-in library "\ - "(included in framework, SDK). E.g., SPI, Wire, etc." + MESSAGE = ("Library `{0}` has not been found in PlatformIO Registry.\n" + "You can ignore this message, if `{0}` is a built-in library " + "(included in framework, SDK). E.g., SPI, Wire, etc.") class NotGlobalLibDir(PlatformioException): - MESSAGE = "The `{0}` is not a PlatformIO project.\n\n"\ - "To manage libraries "\ - "in global storage `{1}`,\n"\ - "please use `platformio lib --global {2}` or specify custom "\ - "storage `platformio lib --storage-dir /path/to/storage/ {2}`."\ - "\nCheck `platformio lib --help` for details." + MESSAGE = ( + "The `{0}` is not a PlatformIO project.\n\n" + "To manage libraries in global storage `{1}`,\n" + "please use `platformio lib --global {2}` or specify custom storage " + "`platformio lib --storage-dir /path/to/storage/ {2}`.\n" + "Check `platformio lib --help` for details.") class InvalidLibConfURL(PlatformioException): @@ -206,9 +210,9 @@ class InvalidSettingValue(PlatformioException): class CIBuildEnvsEmpty(PlatformioException): - MESSAGE = "Can't find PlatformIO build environments.\n"\ - "Please specify `--board` or path to `platformio.ini` with "\ - "predefined environments using `--project-conf` option" + MESSAGE = ("Can't find PlatformIO build environments.\n" + "Please specify `--board` or path to `platformio.ini` with " + "predefined environments using `--project-conf` option") class UpgradeError(PlatformioException): @@ -221,7 +225,17 @@ class UpgradeError(PlatformioException): """ +class HomeDirPermissionsError(PlatformioException): + + MESSAGE = ( + "The directory `{0}` or its parent directory is not owned by the " + "current user and PlatformIO can not store configuration data.\n" + "Please check the permissions and owner of that directory.\n" + "Otherwise, please remove manually `{0}` directory and PlatformIO " + "will create new from the current user.") + + class CygwinEnvDetected(PlatformioException): - MESSAGE = "PlatformIO does not work within Cygwin environment. "\ - "Use native Terminal instead." + MESSAGE = ("PlatformIO does not work within Cygwin environment. " + "Use native Terminal instead.") diff --git a/platformio/ide/projectgenerator.py b/platformio/ide/projectgenerator.py index 1561bc6e..e1d5b12a 100644 --- a/platformio/ide/projectgenerator.py +++ b/platformio/ide/projectgenerator.py @@ -15,11 +15,10 @@ import json import os import re -from cStringIO import StringIO from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath import bottle -import click +from click.testing import CliRunner from platformio import exception, util from platformio.commands.run import cli as cmd_run @@ -60,24 +59,28 @@ class ProjectGenerator(object): @util.memoized def get_project_build_data(self): - data = {"defines": [], "includes": [], "cxx_path": None} + data = { + "defines": [], + "includes": [], + "cxx_path": None, + "prog_path": None + } envdata = self.get_project_env() if not envdata: return data - out = StringIO() - with util.capture_stdout(out): - click.get_current_context().invoke( - cmd_run, - project_dir=self.project_dir, - environment=[envdata['env_name']], - target=["idedata"]) - result = out.getvalue() + result = CliRunner().invoke(cmd_run, [ + "--project-dir", self.project_dir, "--environment", + envdata['env_name'], "--target", "idedata" + ]) - if '"includes":' not in result: - raise exception.PlatformioException(result) + if result.exit_code != 0 and not isinstance(result.exception, + exception.ReturnErrorCode): + raise result.exception + if '"includes":' not in result.output: + raise exception.PlatformioException(result.output) - for line in result.split("\n"): + for line in result.output.split("\n"): line = line.strip() if line.startswith('{"') and line.endswith("}"): data = json.loads(line) diff --git a/platformio/ide/tpls/vscode/.gitignore.tpl b/platformio/ide/tpls/vscode/.gitignore.tpl index 08e91a14..f39f7c9b 100644 --- a/platformio/ide/tpls/vscode/.gitignore.tpl +++ b/platformio/ide/tpls/vscode/.gitignore.tpl @@ -1,3 +1,3 @@ .pioenvs .piolibdeps -.vscode +.vscode/c_cpp_properties.json diff --git a/platformio/ide/tpls/vscode/.vscode/c_cpp_properties.json.tpl b/platformio/ide/tpls/vscode/.vscode/c_cpp_properties.json.tpl index 5fea5091..ced616b7 100644 --- a/platformio/ide/tpls/vscode/.vscode/c_cpp_properties.json.tpl +++ b/platformio/ide/tpls/vscode/.vscode/c_cpp_properties.json.tpl @@ -31,7 +31,8 @@ "{{!define.replace('"', '\\"')}}", % end "" - ] + ], + "intelliSenseMode": "clang-x64" } ] } \ No newline at end of file diff --git a/platformio/maintenance.py b/platformio/maintenance.py index 3e198f08..abbd7ca2 100644 --- a/platformio/maintenance.py +++ b/platformio/maintenance.py @@ -34,33 +34,10 @@ from platformio.managers.lib import LibraryManager from platformio.managers.platform import PlatformFactory, PlatformManager -def in_silence(ctx=None): - ctx = ctx or app.get_session_var("command_ctx") - assert ctx - ctx_args = ctx.args or [] - return ctx_args and any([ - ctx.args[0] == "upgrade", "--json-output" in ctx_args, - "--version" in ctx_args - ]) - - def on_platformio_start(ctx, force, caller): - if not caller: - if getenv("PLATFORMIO_CALLER"): - caller = getenv("PLATFORMIO_CALLER") - elif getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"): - caller = "vscode" - elif util.is_container(): - if getenv("C9_UID"): - caller = "C9" - elif getenv("USER") == "cabox": - caller = "CA" - elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")): - caller = "Che" - app.set_session_var("command_ctx", ctx) app.set_session_var("force_option", force) - app.set_session_var("caller_id", caller) + set_caller(caller) telemetry.on_command() if not in_silence(ctx): @@ -75,7 +52,8 @@ def on_platformio_end(ctx, result): # pylint: disable=W0613 check_platformio_upgrade() check_internal_updates(ctx, "platforms") check_internal_updates(ctx, "libraries") - except (exception.GetLatestVersionError, exception.APIRequestError): + except (exception.InternetIsOffline, exception.GetLatestVersionError, + exception.APIRequestError): click.secho( "Failed to check for PlatformIO upgrades. " "Please check your Internet connection.", @@ -86,6 +64,32 @@ def on_platformio_exception(e): telemetry.on_exception(e) +def in_silence(ctx=None): + ctx = ctx or app.get_session_var("command_ctx") + assert ctx + ctx_args = ctx.args or [] + return ctx_args and any([ + ctx.args[0] == "upgrade", "--json-output" in ctx_args, + "--version" in ctx_args + ]) + + +def set_caller(caller=None): + if not caller: + if getenv("PLATFORMIO_CALLER"): + caller = getenv("PLATFORMIO_CALLER") + elif getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"): + caller = "vscode" + elif util.is_container(): + if getenv("C9_UID"): + caller = "C9" + elif getenv("USER") == "cabox": + caller = "CA" + elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")): + caller = "Che" + app.set_session_var("caller_id", caller) + + class Upgrader(object): def __init__(self, from_version, to_version): @@ -98,7 +102,7 @@ class Upgrader(object): self._upgrade_to_3_0_0), (semantic_version.Version("3.0.0-b.11"), self._upgrade_to_3_0_0b11), - (semantic_version.Version("3.4.0-a.9"), + (semantic_version.Version("3.5.0-a.2"), self._update_dev_platforms)] def run(self, ctx): @@ -234,12 +238,14 @@ def check_platformio_upgrade(): if (time() - interval) < last_check.get("platformio_upgrade", 0): return - # Update PlatformIO's Core packages - update_core_packages(silent=True) - last_check['platformio_upgrade'] = int(time()) app.set_state_item("last_check", last_check) + util.internet_on(raise_exception=True) + + # Update PlatformIO's Core packages + update_core_packages(silent=True) + latest_version = get_latest_version() if semantic_version.Version.coerce(util.pepver_to_semver( latest_version)) <= semantic_version.Version.coerce( @@ -282,6 +288,8 @@ def check_internal_updates(ctx, what): last_check[what + '_update'] = int(time()) app.set_state_item("last_check", last_check) + util.internet_on(raise_exception=True) + pm = PlatformManager() if what == "platforms" else LibraryManager() outdated_items = [] for manifest in pm.get_installed(): diff --git a/platformio/managers/core.py b/platformio/managers/core.py index b052c23e..42a6c775 100644 --- a/platformio/managers/core.py +++ b/platformio/managers/core.py @@ -15,14 +15,15 @@ import os import subprocess import sys -from os.path import join +from os.path import dirname, join from platformio import __version__, exception, util from platformio.managers.package import PackageManager CORE_PACKAGES = { - "pysite-pioplus": ">=0.3.0,<2", - "tool-pioplus": ">=0.9.1,<2", + "contrib-piohome": ">=0.6.0,<2", + "contrib-pysite": ">=0.1.2,<2", + "tool-pioplus": ">=0.12.1,<2", "tool-unity": "~1.20302.1", "tool-scons": "~3.20501.2" } @@ -35,15 +36,18 @@ PIOPLUS_AUTO_UPDATES_MAX = 100 class CorePackageManager(PackageManager): def __init__(self): - PackageManager.__init__( - self, - join(util.get_home_dir(), "packages"), [ - "https://dl.bintray.com/platformio/dl-packages/manifest.json", - "http%s://dl.platformio.org/packages/manifest.json" % - ("" if sys.version_info < (2, 7, 9) else "s") - ]) + PackageManager.__init__(self, join(util.get_home_dir(), "packages"), [ + "https://dl.bintray.com/platformio/dl-packages/manifest.json", + "http%s://dl.platformio.org/packages/manifest.json" % + ("" if sys.version_info < (2, 7, 9) else "s") + ]) - def install(self, name, requirements=None, *args, **kwargs): + def install( # pylint: disable=keyword-arg-before-vararg + self, + name, + requirements=None, + *args, + **kwargs): PackageManager.install(self, name, requirements, *args, **kwargs) self.cleanup_packages() return self.get_package_dir(name, requirements) @@ -71,7 +75,8 @@ class CorePackageManager(PackageManager): def get_core_package_dir(name): - assert name in CORE_PACKAGES + if name not in CORE_PACKAGES: + raise exception.PlatformioException("Please upgrade PIO Core") requirements = CORE_PACKAGES[name] pm = CorePackageManager() pkg_dir = pm.get_package_dir(name, requirements) @@ -88,6 +93,7 @@ def update_core_packages(only_check=False, silent=False): continue if not silent or pm.outdated(pkg_dir, requirements): pm.update(name, requirements, only_check=only_check) + return True def pioplus_call(args, **kwargs): @@ -99,8 +105,11 @@ def pioplus_call(args, **kwargs): sys.version.split()[0])) pioplus_path = join(get_core_package_dir("tool-pioplus"), "pioplus") - os.environ['PYTHONEXEPATH'] = util.get_pythonexe_path() - os.environ['PYTHONPYSITEDIR'] = get_core_package_dir("pysite-pioplus") + pythonexe_path = util.get_pythonexe_path() + os.environ['PYTHONEXEPATH'] = pythonexe_path + os.environ['PYTHONPYSITEDIR'] = get_core_package_dir("contrib-pysite") + os.environ['PATH'] = (os.pathsep).join( + [dirname(pythonexe_path), os.environ['PATH']]) util.copy_pythonpath_to_osenv() code = subprocess.call([pioplus_path] + args, **kwargs) @@ -124,3 +133,5 @@ def pioplus_call(args, **kwargs): if code != 0: raise exception.ReturnErrorCode(1) + + return True diff --git a/platformio/managers/lib.py b/platformio/managers/lib.py index 5ce9fe99..ef00787b 100644 --- a/platformio/managers/lib.py +++ b/platformio/managers/lib.py @@ -21,7 +21,6 @@ from os.path import isdir, join import arrow import click -import semantic_version from platformio import app, commands, exception, util from platformio.managers.package import BasePkgManager @@ -71,7 +70,10 @@ class LibraryManager(BasePkgManager): del manifest['sentence'] if "author" in manifest: - manifest['authors'] = [{"name": manifest['author']}] + if isinstance(manifest['author'], dict): + manifest['authors'] = [manifest['author']] + else: + manifest['authors'] = [{"name": manifest['author']}] del manifest['author'] if "authors" in manifest and not isinstance(manifest['authors'], list): @@ -101,6 +103,7 @@ class LibraryManager(BasePkgManager): "sam": "atmelsam", "samd": "atmelsam", "esp8266": "espressif8266", + "esp32": "espressif32", "arc32": "intel_arc32" } for arch in manifest['architectures'].split(","): @@ -149,8 +152,7 @@ class LibraryManager(BasePkgManager): ] return items - @staticmethod - def max_satisfying_repo_version(versions, requirements=None): + def max_satisfying_repo_version(self, versions, requirements=None): def _cmp_dates(datestr1, datestr2): date1 = arrow.get(datestr1) @@ -159,29 +161,22 @@ class LibraryManager(BasePkgManager): return 0 return -1 if date1 < date2 else 1 + semver_spec = self.parse_semver_spec( + requirements) if requirements else None item = None - reqspec = None - if requirements: - try: - reqspec = semantic_version.Spec(requirements) - except ValueError: - pass - for v in versions: - specver = None - try: - specver = semantic_version.Version(v['name'], partial=True) - except ValueError: - pass - if reqspec: - if not specver or specver not in reqspec: + for v in versions: + semver_new = self.parse_semver_version(v['name']) + if semver_spec: + if not semver_new or semver_new not in semver_spec: continue - if not item or semantic_version.Version( - item['name'], partial=True) < specver: + if not item or self.parse_semver_version( + item['name']) < semver_new: item = v elif requirements: if requirements == v['name']: return v + else: if not item or _cmp_dates(item['released'], v['released']) == -1: @@ -193,7 +188,7 @@ class LibraryManager(BasePkgManager): util.get_api_result( "/lib/info/%d" % self.get_pkg_id_by_name( name, requirements, silent=silent), - cache_valid="1d")['versions'], requirements) + cache_valid="1h")['versions'], requirements) return item['name'] if item else None def get_pkg_id_by_name(self, @@ -236,11 +231,11 @@ class LibraryManager(BasePkgManager): requirements=None, silent=False, trigger_event=True, - interactive=False): + interactive=False, + force=False): pkg_dir = None try: - _name, _requirements, _url = self.parse_pkg_input( - name, requirements) + _name, _requirements, _url = self.parse_pkg_uri(name, requirements) if not _url: name = "id=%d" % self.get_pkg_id_by_name( _name, @@ -248,15 +243,20 @@ class LibraryManager(BasePkgManager): silent=silent, interactive=interactive) requirements = _requirements - pkg_dir = BasePkgManager.install(self, name, requirements, silent, - trigger_event) + pkg_dir = BasePkgManager.install( + self, + name, + requirements, + silent=silent, + trigger_event=trigger_event, + force=force) except exception.InternetIsOffline as e: if not silent: click.secho(str(e), fg="yellow") - return + return None if not pkg_dir: - return + return None manifest = self.load_manifest(pkg_dir) if "dependencies" not in manifest: @@ -268,7 +268,12 @@ class LibraryManager(BasePkgManager): for filters in self.normalize_dependencies(manifest['dependencies']): assert "name" in filters if any([s in filters.get("version", "") for s in ("\\", "/")]): - self.install("{name}={version}".format(**filters)) + self.install( + "{name}={version}".format(**filters), + silent=silent, + trigger_event=trigger_event, + interactive=interactive, + force=force) else: try: lib_info = self.search_for_library(filters, silent, @@ -281,14 +286,18 @@ class LibraryManager(BasePkgManager): if filters.get("version"): self.install( lib_info['id'], - requirements=filters.get("version"), + filters.get("version"), silent=silent, - trigger_event=trigger_event) + trigger_event=trigger_event, + interactive=interactive, + force=force) else: self.install( lib_info['id'], silent=silent, - trigger_event=trigger_event) + trigger_event=trigger_event, + interactive=interactive, + force=force) return pkg_dir @staticmethod @@ -315,7 +324,7 @@ class LibraryManager(BasePkgManager): lib_info = None result = util.get_api_result( - "/v2/lib/search", dict(query=" ".join(query)), cache_valid="3d") + "/v2/lib/search", dict(query=" ".join(query)), cache_valid="1h") if result['total'] == 1: lib_info = result['items'][0] elif result['total'] > 1: diff --git a/platformio/managers/package.py b/platformio/managers/package.py index 54fcaa61..cc3e444c 100644 --- a/platformio/managers/package.py +++ b/platformio/managers/package.py @@ -30,7 +30,7 @@ from platformio.downloader import FileDownloader from platformio.unpacker import FileUnpacker from platformio.vcsclient import VCSClientFactory -# pylint: disable=too-many-arguments +# pylint: disable=too-many-arguments, too-many-return-statements class PackageRepoIterator(object): @@ -78,9 +78,15 @@ class PkgRepoMixin(object): PIO_VERSION = semantic_version.Version(util.pepver_to_semver(__version__)) @staticmethod - def max_satisfying_repo_version(versions, requirements=None): + def is_system_compatible(valid_systems): + if valid_systems in (None, "all", "*"): + return True + if not isinstance(valid_systems, list): + valid_systems = list([valid_systems]) + return util.get_systype() in valid_systems + + def max_satisfying_repo_version(self, versions, requirements=None): item = None - systype = util.get_systype() reqspec = None if requirements: try: @@ -89,8 +95,7 @@ class PkgRepoMixin(object): pass for v in versions: - if "system" in v and v['system'] not in ("all", "*") and \ - systype not in v['system']: + if not self.is_system_compatible(v.get("system")): continue if "platformio" in v.get("engines", {}): if PkgRepoMixin.PIO_VERSION not in semantic_version.Spec( @@ -121,8 +126,9 @@ class PkgRepoMixin(object): def get_all_repo_versions(self, name): result = [] for versions in PackageRepoIterator(name, self.repositories): - result.extend([v['version'] for v in versions]) - return sorted(set(result)) + result.extend( + [semantic_version.Version(v['version']) for v in versions]) + return [str(v) for v in sorted(set(result))] class PkgInstallerMixin(object): @@ -187,15 +193,36 @@ class PkgInstallerMixin(object): @staticmethod def unpack(source_path, dest_dir): - fu = FileUnpacker(source_path, dest_dir) - return fu.start() + with FileUnpacker(source_path) as fu: + return fu.unpack(dest_dir) + + @staticmethod + def parse_semver_spec(value, raise_exception=False): + try: + return semantic_version.Spec(value) + except ValueError as e: + if raise_exception: + raise e + return None + + @staticmethod + def parse_semver_version(value, raise_exception=False): + try: + try: + return semantic_version.Version(value) + except ValueError: + return semantic_version.Version.coerce(value) + except ValueError as e: + if raise_exception: + raise e + return None @staticmethod def get_install_dirname(manifest): name = re.sub(r"[^\da-z\_\-\. ]", "_", manifest['name'], flags=re.I) if "id" in manifest: name += "_ID%d" % manifest['id'] - return name + return str(name) def get_src_manifest_path(self, pkg_dir): if not isdir(pkg_dir): @@ -258,7 +285,7 @@ class PkgInstallerMixin(object): if "version" not in manifest: manifest['version'] = "0.0.0" - manifest['__pkg_dir'] = pkg_dir + manifest['__pkg_dir'] = util.path_to_unicode(pkg_dir) self.cache_set(cache_key, manifest) return manifest @@ -283,21 +310,23 @@ class PkgInstallerMixin(object): continue elif not pkg_id and manifest['name'] != name: continue + elif not PkgRepoMixin.is_system_compatible(manifest.get("system")): + continue # strict version or VCS HASH if requirements and requirements == manifest['version']: return manifest try: - if requirements and not semantic_version.Spec( - requirements).match( - semantic_version.Version( - manifest['version'], partial=True)): + if requirements and not self.parse_semver_spec( + requirements, raise_exception=True).match( + self.parse_semver_version( + manifest['version'], raise_exception=True)): continue - elif not best or (semantic_version.Version( - manifest['version'], partial=True) > - semantic_version.Version( - best['version'], partial=True)): + elif not best or (self.parse_semver_version( + manifest['version'], raise_exception=True) > + self.parse_semver_version( + best['version'], raise_exception=True)): best = manifest except ValueError: pass @@ -383,7 +412,7 @@ class PkgInstallerMixin(object): finally: if isdir(tmp_dir): util.rmtree_(tmp_dir) - return + return None def _update_src_manifest(self, data, src_dir): if not isdir(src_dir): @@ -405,16 +434,10 @@ class PkgInstallerMixin(object): pkg_dir = join(self.package_dir, pkg_dirname) cur_manifest = self.load_manifest(pkg_dir) - tmp_semver = None + tmp_semver = self.parse_semver_version(tmp_manifest['version']) cur_semver = None - try: - tmp_semver = semantic_version.Version( - tmp_manifest['version'], partial=True) - if cur_manifest: - cur_semver = semantic_version.Version( - cur_manifest['version'], partial=True) - except ValueError: - pass + if cur_manifest: + cur_semver = self.parse_semver_version(cur_manifest['version']) # package should satisfy requirements if requirements: @@ -490,51 +513,57 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): click.echo("%s: %s" % (self.__class__.__name__, message), nl=nl) @staticmethod - def parse_pkg_input( # pylint: disable=too-many-branches + def parse_pkg_uri( # pylint: disable=too-many-branches text, requirements=None): text = str(text) - # git@github.com:user/package.git - url_marker = text[:4] - if url_marker not in ("git@", "git+") or ":" not in text: - url_marker = "://" + name, url = None, None + # Parse requirements req_conditions = [ - not requirements, - "@" in text, - not url_marker.startswith("git") - ] # yapf: disable + "@" in text, not requirements, ":" not in text + or text.rfind("/") < text.rfind("@") + ] if all(req_conditions): text, requirements = text.rsplit("@", 1) + + # Handle PIO Library Registry ID if text.isdigit(): text = "id=" + text + # Parse custom name + elif "=" in text and not text.startswith("id="): + name, text = text.split("=", 1) - name, url = (None, text) - if "=" in text and not text.startswith("id="): - name, url = text.split("=", 1) + # Parse URL + # if valid URL with scheme vcs+protocol:// + if "+" in text and text.find("+") < text.find("://"): + url = text + elif "/" in text or "\\" in text: + git_conditions = [ + # Handle GitHub URL (https://github.com/user/package) + text.startswith("https://github.com/") and not text.endswith( + (".zip", ".tar.gz")), + (text.split("#", 1)[0] + if "#" in text else text).endswith(".git") + ] + hg_conditions = [ + # Handle Developer Mbed URL + # (https://developer.mbed.org/users/user/code/package/) + text.startswith("https://developer.mbed.org") + ] + if any(git_conditions): + url = "git+" + text + elif any(hg_conditions): + url = "hg+" + text + elif "://" not in text and (isfile(text) or isdir(text)): + url = "file://" + text + elif "://" in text: + url = text + # Handle short version of GitHub URL + elif text.count("/") == 1: + url = "git+https://github.com/" + text - git_conditions = [ - # Handle GitHub URL (https://github.com/user/package) - url.startswith("https://github.com/") and not url.endswith( - (".zip", ".tar.gz")), - url.startswith("http") - and (url.split("#", 1)[0] if "#" in url else url).endswith(".git") - ] - if any(git_conditions): - url = "git+" + url - - # Handle Developer Mbed URL - # (https://developer.mbed.org/users/user/code/package/) - if url.startswith("https://developer.mbed.org"): - url = "hg+" + url - - if any([s in url for s in ("\\", "/")]) and url_marker not in url: - if isfile(url) or isdir(url): - url = "file://" + url - elif url.count("/") == 1 and "git" not in url_marker: - url = "git+https://github.com/" + url - - # determine name - if url_marker in url and not name: + # Parse name from URL + if url and not name: _url = url.split("#", 1)[0] if "#" in url else url if _url.endswith(("\\", "/")): _url = _url[:-1] @@ -542,8 +571,6 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): if "." in name and not name.startswith("."): name = name.rsplit(".", 1)[0] - if url_marker not in url: - url = None return (name or text, requirements, url) def outdated(self, pkg_dir, requirements=None): @@ -553,11 +580,12 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): `False` - package is up-to-date `String` - a found latest version """ - assert isdir(pkg_dir) + if not isdir(pkg_dir): + return None latest = None manifest = self.load_manifest(pkg_dir) - # skip a fixed package to a specific version - if "@" in pkg_dir and "__src_url" not in manifest: + # skip fixed package to a specific version + if "@" in pkg_dir and "__src_url" not in manifest and not requirements: return None if "__src_url" in manifest: @@ -585,8 +613,10 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): up_to_date = False try: assert "__src_url" not in manifest - up_to_date = (semantic_version.Version.coerce(manifest['version']) - >= semantic_version.Version.coerce(latest)) + up_to_date = (self.parse_semver_version( + manifest['version'], raise_exception=True) >= + self.parse_semver_version( + latest, raise_exception=True)) except (AssertionError, ValueError): up_to_date = latest == manifest['version'] @@ -596,18 +626,22 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): name, requirements=None, silent=False, - trigger_event=True): + trigger_event=True, + force=False): + name, requirements, url = self.parse_pkg_uri(name, requirements) + package_dir = self.get_package_dir(name, requirements, url) # avoid circle dependencies if not self.INSTALL_HISTORY: self.INSTALL_HISTORY = [] - history_key = "%s-%s" % (name, requirements) if requirements else name + history_key = "%s-%s-%s" % (name, requirements or "", url or "") if history_key in self.INSTALL_HISTORY: - return + return package_dir self.INSTALL_HISTORY.append(history_key) - name, requirements, url = self.parse_pkg_input(name, requirements) - package_dir = self.get_package_dir(name, requirements, url) + if package_dir and force: + self.uninstall(package_dir) + package_dir = None if not package_dir or not silent: msg = "Installing " + click.style(name, fg="cyan") @@ -652,8 +686,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): if isdir(package): pkg_dir = package else: - name, requirements, url = self.parse_pkg_input( - package, requirements) + name, requirements, url = self.parse_pkg_uri(package, requirements) pkg_dir = self.get_package_dir(name, requirements, url) if not pkg_dir: @@ -689,15 +722,11 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): label=manifest['name']) return True - def update( # pylint: disable=too-many-return-statements - self, - package, - requirements=None, - only_check=False): + def update(self, package, requirements=None, only_check=False): if isdir(package): pkg_dir = package else: - pkg_dir = self.get_package_dir(*self.parse_pkg_input(package)) + pkg_dir = self.get_package_dir(*self.parse_pkg_uri(package)) if not pkg_dir: raise exception.UnknownPackage("%s @ %s" % (package, @@ -713,7 +742,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): nl=False) if not util.internet_on(): click.echo("[%s]" % (click.style("Off-line", fg="yellow"))) - return + return None latest = self.outdated(pkg_dir, requirements) if latest: @@ -721,10 +750,10 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin): elif latest is False: click.echo("[%s]" % (click.style("Up-to-date", fg="green"))) else: - click.echo("[%s]" % (click.style("Skip", fg="yellow"))) + click.echo("[%s]" % (click.style("Fixed", fg="yellow"))) if only_check or not latest: - return + return True if "__src_url" in manifest: vcs = VCSClientFactory.newClient(pkg_dir, manifest['__src_url']) diff --git a/platformio/managers/platform.py b/platformio/managers/platform.py index dec4bd30..b3090705 100644 --- a/platformio/managers/platform.py +++ b/platformio/managers/platform.py @@ -18,6 +18,7 @@ import re from imp import load_source from multiprocessing import cpu_count from os.path import basename, dirname, isdir, isfile, join +from urllib import quote import click import semantic_version @@ -63,9 +64,10 @@ class PlatformManager(BasePkgManager): skip_default_package=False, trigger_event=True, silent=False, + force=False, **_): # pylint: disable=too-many-arguments, arguments-differ platform_dir = BasePkgManager.install( - self, name, requirements, silent=silent) + self, name, requirements, silent=silent, force=force) p = PlatformFactory.newPlatform(platform_dir) # @Hook: when 'update' operation (trigger_event is False), @@ -76,7 +78,8 @@ class PlatformManager(BasePkgManager): with_packages, without_packages, skip_default_package, - silent=silent) + silent=silent, + force=force) self.cleanup_packages(p.packages.keys()) return True @@ -84,10 +87,12 @@ class PlatformManager(BasePkgManager): if isdir(package): pkg_dir = package else: - name, requirements, url = self.parse_pkg_input( - package, requirements) + name, requirements, url = self.parse_pkg_uri(package, requirements) pkg_dir = self.get_package_dir(name, requirements, url) + if not pkg_dir: + raise exception.UnknownPlatform(package) + p = PlatformFactory.newPlatform(pkg_dir) BasePkgManager.uninstall(self, pkg_dir, requirements) @@ -108,25 +113,28 @@ class PlatformManager(BasePkgManager): if isdir(package): pkg_dir = package else: - name, requirements, url = self.parse_pkg_input( - package, requirements) + name, requirements, url = self.parse_pkg_uri(package, requirements) pkg_dir = self.get_package_dir(name, requirements, url) - p = PlatformFactory.newPlatform(pkg_dir) - pkgs_before = pkgs_after = p.get_installed_packages().keys() + if not pkg_dir: + raise exception.UnknownPlatform(package) + p = PlatformFactory.newPlatform(pkg_dir) + pkgs_before = p.get_installed_packages().keys() + + missed_pkgs = set() if not only_packages: BasePkgManager.update(self, pkg_dir, requirements, only_check) p = PlatformFactory.newPlatform(pkg_dir) - pkgs_after = p.get_installed_packages().keys() + missed_pkgs = set(pkgs_before) & set(p.packages.keys()) + missed_pkgs -= set(p.get_installed_packages().keys()) p.update_packages(only_check) self.cleanup_packages(p.packages.keys()) - pkgs_missed = set(pkgs_before) - set(pkgs_after) - if pkgs_missed: + if missed_pkgs: p.install_packages( - with_packages=pkgs_missed, skip_default_package=True) + with_packages=list(missed_pkgs), skip_default_package=True) return True @@ -164,7 +172,19 @@ class PlatformManager(BasePkgManager): @staticmethod @util.memoized def get_registered_boards(): - return util.get_api_result("/boards", cache_valid="30d") + return util.get_api_result("/boards", cache_valid="7d") + + def get_all_boards(self): + boards = self.get_installed_boards() + know_boards = ["%s:%s" % (b['platform'], b['id']) for b in boards] + try: + for board in self.get_registered_boards(): + key = "%s:%s" % (board['platform'], board['id']) + if key not in know_boards: + boards.append(board) + except (exception.APIRequestError, exception.InternetIsOffline): + pass + return sorted(boards, key=lambda b: b['name']) def board_config(self, id_, platform=None): for manifest in self.get_installed_boards(): @@ -197,18 +217,19 @@ class PlatformFactory(object): @classmethod def newPlatform(cls, name, requirements=None): + pm = PlatformManager() platform_dir = None if isdir(name): platform_dir = name - name = PlatformManager().load_manifest(platform_dir)['name'] + name = pm.load_manifest(platform_dir)['name'] elif name.endswith("platform.json") and isfile(name): platform_dir = dirname(name) name = util.load_json(name)['name'] else: - if not requirements and "@" in name: - name, requirements = name.rsplit("@", 1) - platform_dir = PlatformManager().get_package_dir( - name, requirements) + name, requirements, url = pm.parse_pkg_uri(name, requirements) + platform_dir = pm.get_package_dir(name, requirements, url) + if platform_dir: + name = pm.load_manifest(platform_dir)['name'] if not platform_dir: raise exception.UnknownPlatform(name if not requirements else @@ -230,11 +251,13 @@ class PlatformFactory(object): class PlatformPackagesMixin(object): - def install_packages(self, - with_packages=None, - without_packages=None, - skip_default_package=False, - silent=False): + def install_packages( # pylint: disable=too-many-arguments + self, + with_packages=None, + without_packages=None, + skip_default_package=False, + silent=False, + force=False): with_packages = set(self.find_pkg_names(with_packages or [])) without_packages = set(self.find_pkg_names(without_packages or [])) @@ -249,14 +272,11 @@ class PlatformPackagesMixin(object): continue elif (name in with_packages or not (skip_default_package or opts.get("optional", False))): - if self.is_valid_requirements(version): - self.pm.install(name, version, silent=silent) - else: - requirements = None - if "@" in version: - version, requirements = version.rsplit("@", 1) + if ":" in version: self.pm.install( - "%s=%s" % (name, version), requirements, silent=silent) + "%s=%s" % (name, version), silent=silent, force=force) + else: + self.pm.install(name, version, silent=silent, force=force) return True @@ -279,10 +299,10 @@ class PlatformPackagesMixin(object): def update_packages(self, only_check=False): for name, manifest in self.get_installed_packages().items(): - version = self.packages[name].get("version", "") - if "@" in version: - _, version = version.rsplit("@", 1) - self.pm.update(manifest['__pkg_dir'], version, only_check) + requirements = self.packages[name].get("version", "") + if ":" in requirements: + _, requirements, __ = self.pm.parse_pkg_uri(requirements) + self.pm.update(manifest['__pkg_dir'], requirements, only_check) def get_installed_packages(self): items = {} @@ -294,18 +314,19 @@ class PlatformPackagesMixin(object): def are_outdated_packages(self): for name, manifest in self.get_installed_packages().items(): - version = self.packages[name].get("version", "") - if "@" in version: - _, version = version.rsplit("@", 1) - if self.pm.outdated(manifest['__pkg_dir'], version): + requirements = self.packages[name].get("version", "") + if ":" in requirements: + _, requirements, __ = self.pm.parse_pkg_uri(requirements) + if self.pm.outdated(manifest['__pkg_dir'], requirements): return True return False def get_package_dir(self, name): version = self.packages[name].get("version", "") - if self.is_valid_requirements(version): - return self.pm.get_package_dir(name, version) - return self.pm.get_package_dir(*self._parse_pkg_input(name, version)) + if ":" in version: + return self.pm.get_package_dir(*self.pm.parse_pkg_uri( + "%s=%s" % (name, version))) + return self.pm.get_package_dir(name, version) def get_package_version(self, name): pkg_dir = self.get_package_dir(name) @@ -313,16 +334,6 @@ class PlatformPackagesMixin(object): return None return self.pm.load_manifest(pkg_dir).get("version") - @staticmethod - def is_valid_requirements(requirements): - return requirements and "://" not in requirements - - def _parse_pkg_input(self, name, version): - requirements = None - if "@" in version: - version, requirements = version.rsplit("@", 1) - return self.pm.parse_pkg_input("%s=%s" % (name, version), requirements) - class PlatformRunMixin(object): @@ -384,6 +395,12 @@ class PlatformRunMixin(object): is_error = self.LINE_ERROR_RE.search(line) is not None self._echo_line(line, level=3 if is_error else 2) + a_pos = line.find("fatal error:") + b_pos = line.rfind(": No such file or directory") + if a_pos == -1 or b_pos == -1: + return + self._echo_missed_dependency(line[a_pos + 12:b_pos].strip()) + def _echo_line(self, line, level): if line.startswith("scons: "): line = line[7:] @@ -395,6 +412,27 @@ class PlatformRunMixin(object): fg = "green" click.secho(line, fg=fg, err=level > 1) + @staticmethod + def _echo_missed_dependency(filename): + if "/" in filename or not filename.endswith((".h", ".hpp")): + return + banner = """ +{dots} +* Looking for {filename_styled} dependency? Check our library registry! +* +* CLI > platformio lib search "header:{filename}" +* Web > {link} +* +{dots} +""".format(filename=filename, + filename_styled=click.style(filename, fg="cyan"), + link=click.style( + "http://platformio.org/lib/search?query=header:%s" % quote( + filename, safe=""), + fg="blue"), + dots="*" * (55 + len(filename))) + click.echo(banner, err=True) + @staticmethod def get_job_nums(): try: @@ -498,8 +536,8 @@ class PlatformBase( # pylint: disable=too-many-public-methods config = PlatformBoardConfig(manifest_path) if "platform" in config and config.get("platform") != self.name: return - elif ("platforms" in config - and self.name not in config.get("platforms")): + elif "platforms" in config \ + and self.name not in config.get("platforms"): return config.manifest['platform'] = self.name self._BOARDS_CACHE[board_id] = config @@ -637,24 +675,37 @@ class PlatformBoardConfig(object): def get_brief_data(self): return { - "id": self.id, - "name": self._manifest['name'], - "platform": self._manifest.get("platform"), - "mcu": self._manifest.get("build", {}).get("mcu", "").upper(), + "id": + self.id, + "name": + self._manifest['name'], + "platform": + self._manifest.get("platform"), + "mcu": + self._manifest.get("build", {}).get("mcu", "").upper(), "fcpu": - int(self._manifest.get("build", {}).get("f_cpu", "0L")[:-1]), - "ram": self._manifest.get("upload", {}).get("maximum_ram_size", 0), - "rom": self._manifest.get("upload", {}).get("maximum_size", 0), - "connectivity": self._manifest.get("connectivity"), - "frameworks": self._manifest.get("frameworks"), - "debug": self.get_debug_data(), - "vendor": self._manifest['vendor'], - "url": self._manifest['url'] + int( + re.sub(r"[^\d]+", "", + self._manifest.get("build", {}).get("f_cpu", "0L"))), + "ram": + self._manifest.get("upload", {}).get("maximum_ram_size", 0), + "rom": + self._manifest.get("upload", {}).get("maximum_size", 0), + "connectivity": + self._manifest.get("connectivity"), + "frameworks": + self._manifest.get("frameworks"), + "debug": + self.get_debug_data(), + "vendor": + self._manifest['vendor'], + "url": + self._manifest['url'] } def get_debug_data(self): if not self._manifest.get("debug", {}).get("tools"): - return + return None tools = {} for name, options in self._manifest['debug']['tools'].items(): tools[name] = {} diff --git a/platformio/telemetry.py b/platformio/telemetry.py index 7c23e6b5..c3fd438f 100644 --- a/platformio/telemetry.py +++ b/platformio/telemetry.py @@ -15,10 +15,11 @@ import atexit import platform import Queue -import sys +import re import threading from collections import deque -from os import getenv +from os import getenv, sep +from os.path import join from time import sleep, time from traceback import format_exc @@ -109,7 +110,7 @@ class MeasurementProtocol(TelemetryBase): self['cd1'] = util.get_systype() self['cd2'] = "Python/%s %s" % (platform.python_version(), platform.platform()) - self['cd3'] = " ".join(_filter_args(sys.argv[1:])) + # self['cd3'] = " ".join(_filter_args(sys.argv[1:])) self['cd4'] = 1 if (not util.is_ci() and (caller_id or not util.is_container())) else 0 if caller_id: @@ -314,14 +315,29 @@ def on_event(category, action, label=None, value=None, screen_name=None): def on_exception(e): + + def _cleanup_description(text): + text = text.replace("Traceback (most recent call last):", "") + text = re.sub( + r'File "([^"]+)"', + lambda m: join(*m.group(1).split(sep)[-2:]), + text, + flags=re.M) + text = re.sub(r"\s+", " ", text, flags=re.M) + return text.strip() + skip_conditions = [ isinstance(e, cls) - for cls in (IOError, exception.AbortedByUser, - exception.NotGlobalLibDir, exception.InternetIsOffline, + for cls in (IOError, exception.ReturnErrorCode, + exception.AbortedByUser, exception.NotGlobalLibDir, + exception.InternetIsOffline, exception.NotPlatformIOProject, exception.UserSideException) ] - skip_conditions.append("[API] Account: " in str(e)) + try: + skip_conditions.append("[API] Account: " in str(e)) + except UnicodeEncodeError as ue: + e = ue if any(skip_conditions): return is_crash = any([ @@ -329,8 +345,8 @@ def on_exception(e): "Error" in e.__class__.__name__ ]) mp = MeasurementProtocol() - mp['exd'] = ("%s: %s" % (type(e).__name__, format_exc() - if is_crash else e))[:2048] + description = _cleanup_description(format_exc() if is_crash else str(e)) + mp['exd'] = ("%s: %s" % (type(e).__name__, description))[:2048] mp['exf'] = 1 if is_crash else 0 mp.send("exception") @@ -391,3 +407,4 @@ def resend_backuped_reports(): # clean tm['backup'] = [] app.set_state_item("telemetry", tm) + return True diff --git a/platformio/unpacker.py b/platformio/unpacker.py index 036dcc4c..4e789d1c 100644 --- a/platformio/unpacker.py +++ b/platformio/unpacker.py @@ -13,7 +13,7 @@ # limitations under the License. from os import chmod -from os.path import join, splitext +from os.path import join from tarfile import open as tarfile_open from time import mktime from zipfile import ZipFile @@ -39,6 +39,9 @@ class ArchiveBase(object): def after_extract(self, item, dest_dir): pass + def close(self): + self._afo.close() + class TARArchive(ArchiveBase): @@ -76,28 +79,32 @@ class ZIPArchive(ArchiveBase): class FileUnpacker(object): - def __init__(self, archpath, dest_dir="."): - self._archpath = archpath - self._dest_dir = dest_dir + def __init__(self, archpath): + self.archpath = archpath self._unpacker = None - _, archext = splitext(archpath.lower()) - if archext in (".gz", ".bz2"): - self._unpacker = TARArchive(archpath) - elif archext == ".zip": - self._unpacker = ZIPArchive(archpath) - + def __enter__(self): + if self.archpath.lower().endswith((".gz", ".bz2")): + self._unpacker = TARArchive(self.archpath) + elif self.archpath.lower().endswith(".zip"): + self._unpacker = ZIPArchive(self.archpath) if not self._unpacker: - raise UnsupportedArchiveType(archpath) + raise UnsupportedArchiveType(self.archpath) + return self - def start(self): + def __exit__(self, *args): + if self._unpacker: + self._unpacker.close() + + def unpack(self, dest_dir="."): + assert self._unpacker if app.is_disabled_progressbar(): click.echo("Unpacking...") for item in self._unpacker.get_items(): - self._unpacker.extract_item(item, self._dest_dir) + self._unpacker.extract_item(item, dest_dir) else: items = self._unpacker.get_items() with click.progressbar(items, label="Unpacking") as pb: for item in pb: - self._unpacker.extract_item(item, self._dest_dir) + self._unpacker.extract_item(item, dest_dir) return True diff --git a/platformio/util.py b/platformio/util.py index f5a7b830..a040380f 100644 --- a/platformio/util.py +++ b/platformio/util.py @@ -22,13 +22,13 @@ import socket import stat import subprocess import sys -from contextlib import contextmanager +from functools import wraps from glob import glob from os.path import (abspath, basename, dirname, expanduser, isdir, isfile, join, normpath, splitdrive) from shutil import rmtree from threading import Thread -from time import sleep +from time import sleep, time import click import requests @@ -149,6 +149,25 @@ class memoized(object): self.cache = {} +class throttle(object): + + def __init__(self, threshhold): + self.threshhold = threshhold # milliseconds + self.last = 0 + + def __call__(self, fn): + + @wraps(fn) + def wrapper(*args, **kwargs): + diff = int(round((time() - self.last) * 1000)) + if diff < self.threshhold: + sleep((self.threshhold - diff) * 0.001) + self.last = time() + return fn(*args, **kwargs) + + return wrapper + + def singleton(cls): """ From PEP-318 http://www.python.org/dev/peps/pep-0318/#examples """ _instances = {} @@ -161,12 +180,8 @@ def singleton(cls): return get_instance -@contextmanager -def capture_stdout(output): - stdout = sys.stdout - sys.stdout = output - yield - sys.stdout = stdout +def path_to_unicode(path): + return path.decode(sys.getfilesystemencoding()).encode("utf-8") def load_json(file_path): @@ -281,6 +296,11 @@ def get_projectsrc_dir(): return get_project_optional_dir("src_dir", join(get_project_dir(), "src")) +def get_projectinclude_dir(): + return get_project_optional_dir("include_dir", + join(get_project_dir(), "include")) + + def get_projecttest_dir(): return get_project_optional_dir("test_dir", join(get_project_dir(), "test")) @@ -317,11 +337,10 @@ def get_projectdata_dir(): def load_project_config(path=None): if not path or isdir(path): - project_dir = path or get_project_dir() - if not is_platformio_project(project_dir): - raise exception.NotPlatformIOProject(project_dir) - path = join(project_dir, "platformio.ini") - assert isfile(path) + path = join(path or get_project_dir(), "platformio.ini") + if not isfile(path): + raise exception.NotPlatformIOProject( + dirname(path) if path.endswith("platformio.ini") else path) cp = ProjectConfig() cp.read(path) return cp @@ -336,8 +355,8 @@ def parse_conf_multi_values(items): ] -def change_filemtime(path, time): - os.utime(path, (time, time)) +def change_filemtime(path, mtime): + os.utime(path, (mtime, mtime)) def is_ci(): @@ -398,7 +417,7 @@ def copy_pythonpath_to_osenv(): os.environ['PYTHONPATH'] = os.pathsep.join(_PYTHONPATH) -def get_serialports(filter_hwid=False): +def get_serial_ports(filter_hwid=False): try: from serial.tools.list_ports import comports except ImportError: @@ -426,29 +445,117 @@ def get_serialports(filter_hwid=False): return result -def get_logicaldisks(): - disks = [] +def get_logical_devices(): + items = [] if platform.system() == "Windows": - result = exec_command( - ["wmic", "logicaldisk", "get", "name,VolumeName"]).get("out", "") - disknamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?") - for line in result.split("\n"): - match = disknamere.match(line.strip()) - if not match: - continue - disks.append({"disk": match.group(1), "name": match.group(2)}) + try: + result = exec_command( + ["wmic", "logicaldisk", "get", "name,VolumeName"]).get( + "out", "") + devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?") + for line in result.split("\n"): + match = devicenamere.match(line.strip()) + if not match: + continue + items.append({ + "path": match.group(1) + "\\", + "name": match.group(2) + }) + return items + except WindowsError: # pylint: disable=undefined-variable + pass + # try "fsutil" + result = exec_command(["fsutil", "fsinfo", "drives"]).get("out", "") + for device in re.findall(r"[A-Z]:\\", result): + items.append({"path": device, "name": None}) + return items else: result = exec_command(["df"]).get("out") - disknamere = re.compile(r"\d+\%\s+([a-z\d\-_/]+)$", flags=re.I) + devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I) for line in result.split("\n"): - match = disknamere.search(line.strip()) + match = devicenamere.match(line.strip()) if not match: continue - disks.append({ - "disk": match.group(1), + items.append({ + "path": match.group(1), "name": basename(match.group(1)) }) - return disks + return items + + +### Backward compatibility for PIO Core <3.5 +get_serialports = get_serial_ports +get_logicaldisks = lambda: [{ + "disk": d['path'], + "name": d['name'] +} for d in get_logical_devices()] + + +def get_mdns_services(): + try: + import zeroconf + except ImportError: + from site import addsitedir + from platformio.managers.core import get_core_package_dir + contrib_pysite_dir = get_core_package_dir("contrib-pysite") + addsitedir(contrib_pysite_dir) + sys.path.insert(0, contrib_pysite_dir) + import zeroconf + + class mDNSListener(object): + + def __init__(self): + self._zc = zeroconf.Zeroconf( + interfaces=zeroconf.InterfaceChoice.All) + self._found_types = [] + self._found_services = [] + + def __enter__(self): + zeroconf.ServiceBrowser(self._zc, "_services._dns-sd._udp.local.", + self) + return self + + def __exit__(self, etype, value, traceback): + self._zc.close() + + def remove_service(self, zc, type_, name): + pass + + def add_service(self, zc, type_, name): + try: + assert zeroconf.service_type_name(name) + assert str(name) + except (AssertionError, UnicodeError, + zeroconf.BadTypeInNameException): + return + if name not in self._found_types: + self._found_types.append(name) + zeroconf.ServiceBrowser(self._zc, name, self) + if type_ in self._found_types: + s = zc.get_service_info(type_, name) + if s: + self._found_services.append(s) + + def get_services(self): + return self._found_services + + items = [] + with mDNSListener() as mdns: + sleep(3) + for service in mdns.get_services(): + items.append({ + "type": + service.type, + "name": + service.name, + "ip": + ".".join([str(ord(c)) for c in service.address]), + "port": + service.port, + "properties": + service.properties + }) + return items def get_request_defheaders(): @@ -461,6 +568,7 @@ def _api_request_session(): return requests.Session() +@throttle(500) def _get_api_result( url, # pylint: disable=too-many-branches params=None, @@ -470,7 +578,7 @@ def _get_api_result( result = None r = None - disable_ssl_check = sys.version_info < (2, 7, 9) + verify_ssl = sys.version_info >= (2, 7, 9) headers = get_request_defheaders() if not url.startswith("http"): @@ -486,14 +594,14 @@ def _get_api_result( data=data, headers=headers, auth=auth, - verify=not disable_ssl_check) + verify=verify_ssl) else: r = _api_request_session().get( url, params=params, headers=headers, auth=auth, - verify=not disable_ssl_check) + verify=verify_ssl) result = r.json() r.raise_for_status() except requests.exceptions.HTTPError as e: @@ -513,6 +621,7 @@ def _get_api_result( def get_api_result(url, params=None, data=None, auth=None, cache_valid=None): + internet_on(raise_exception=True) from platformio.app import ContentCache total = 0 max_retries = 5 @@ -532,8 +641,6 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None): return result except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: - if not internet_on(): - raise exception.InternetIsOffline() from platformio.maintenance import in_silence total += 1 if not in_silence(): @@ -548,18 +655,38 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None): "Please try later.") -def internet_on(timeout=3): +PING_INTERNET_IPS = [ + "192.30.253.113", # github.com + "159.122.18.156", # dl.bintray.com + "193.222.52.25" # dl.platformio.org +] + + +@memoized +def _internet_on(): + timeout = 2 socket.setdefaulttimeout(timeout) - for host in ("dl.bintray.com", "dl.platformio.org"): + for ip in PING_INTERNET_IPS: try: - socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, - 80)) + if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")): + requests.get( + "http://%s" % ip, allow_redirects=False, timeout=timeout) + else: + socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip, + 80)) return True except: # pylint: disable=bare-except pass return False +def internet_on(raise_exception=False): + result = _internet_on() + if raise_exception and not result: + raise exception.InternetIsOffline() + return result + + def get_pythonexe_path(): return os.environ.get("PYTHONEXEPATH", normpath(sys.executable)) @@ -596,8 +723,13 @@ def pepver_to_semver(pepver): def rmtree_(path): def _onerror(_, name, __): - os.chmod(name, stat.S_IWRITE) - os.remove(name) + try: + os.chmod(name, stat.S_IWRITE) + os.remove(name) + except Exception as e: # pylint: disable=broad-except + click.secho( + "Please manually remove file `%s`" % name, fg="red", err=True) + raise e return rmtree(path, onerror=_onerror) diff --git a/platformio/vcsclient.py b/platformio/vcsclient.py index b7c72ea1..6a924370 100644 --- a/platformio/vcsclient.py +++ b/platformio/vcsclient.py @@ -14,7 +14,7 @@ import re from os.path import join -from subprocess import check_call +from subprocess import CalledProcessError, check_call from sys import modules from urlparse import urlparse @@ -29,8 +29,9 @@ class VCSClientFactory(object): result = urlparse(remote_url) type_ = result.scheme tag = None - if not type_ and remote_url.startswith("git@"): + if not type_ and remote_url.startswith("git+"): type_ = "git" + remote_url = remote_url[4:] elif "+" in result.scheme: type_, _ = result.scheme.split("+", 1) remote_url = remote_url[len(type_) + 1:] @@ -93,7 +94,12 @@ class VCSClientBase(object): args = [self.command] + args if "cwd" not in kwargs: kwargs['cwd'] = self.src_dir - return check_call(args, **kwargs) == 0 + try: + check_call(args, **kwargs) + return True + except CalledProcessError as e: + raise PlatformioException( + "VCS: Could not process command %s" % e.cmd) def get_cmd_output(self, args, **kwargs): args = [self.command] + args @@ -111,6 +117,13 @@ class GitClient(VCSClientBase): command = "git" + def check_client(self): + try: + return VCSClientBase.check_client(self) + except UserSideException: + raise UserSideException( + "Please install Git client from https://git-scm.com/downloads") + def get_branches(self): output = self.get_cmd_output(["branch"]) output = output.replace("*", "") # fix active branch diff --git a/scripts/99-platformio-udev.rules b/scripts/99-platformio-udev.rules index 7adfcf0c..5703cd26 100644 --- a/scripts/99-platformio-udev.rules +++ b/scripts/99-platformio-udev.rules @@ -91,3 +91,7 @@ SUBSYSTEMS=="usb", ATTRS{idVendor}=="0451", ATTRS{idProduct}=="f432", MODE="0666 # CMSIS-DAP compatible adapters ATTRS{product}=="*CMSIS-DAP*", MODE="664", GROUP="plugdev" + +# Black Magic Probe +SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic GDB Server" +SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic UART Port" diff --git a/scripts/docspregen.py b/scripts/docspregen.py index 8b14713d..585b9ea2 100644 --- a/scripts/docspregen.py +++ b/scripts/docspregen.py @@ -335,7 +335,7 @@ Boards vendors = {} for data in BOARDS: - frameworks = data['frameworks'] + frameworks = data['frameworks'] or [] vendor = data['vendor'] if type_ in frameworks: if vendor in vendors: diff --git a/scripts/get-platformio.py b/scripts/get-platformio.py index d9abf5d5..90ad5fea 100644 --- a/scripts/get-platformio.py +++ b/scripts/get-platformio.py @@ -14,6 +14,7 @@ import os import subprocess +import site import sys from platform import system from tempfile import NamedTemporaryFile @@ -26,39 +27,34 @@ def fix_winpython_pathenv(): """ Add Python & Python Scripts to the search path on Windows """ - import ctypes - from ctypes.wintypes import HWND, UINT, WPARAM, LPARAM, LPVOID try: import _winreg as winreg except ImportError: import winreg # took these lines from the native "win_add2path.py" - pythonpath = os.path.dirname(CURINTERPRETER_PATH) + pythonpath = os.path.dirname(os.path.normpath(sys.executable)) scripts = os.path.join(pythonpath, "Scripts") - if not os.path.isdir(scripts): - os.makedirs(scripts) + appdata = os.environ["APPDATA"] + if hasattr(site, "USER_SITE"): + userpath = site.USER_SITE.replace(appdata, "%APPDATA%") + userscripts = os.path.join(userpath, "Scripts") + else: + userscripts = None - with winreg.CreateKey(winreg.HKEY_CURRENT_USER, u"Environment") as key: + with winreg.CreateKey(winreg.HKEY_CURRENT_USER, "Environment") as key: try: - envpath = winreg.QueryValueEx(key, u"PATH")[0] + envpath = winreg.QueryValueEx(key, "PATH")[0] except WindowsError: envpath = u"%PATH%" paths = [envpath] - for path in (pythonpath, scripts): + for path in (pythonpath, scripts, userscripts): if path and path not in envpath and os.path.isdir(path): paths.append(path) envpath = os.pathsep.join(paths) - winreg.SetValueEx(key, u"PATH", 0, winreg.REG_EXPAND_SZ, envpath) - winreg.ExpandEnvironmentStrings(envpath) - - # notify the system about the changes - SendMessage = ctypes.windll.user32.SendMessageW - SendMessage.argtypes = HWND, UINT, WPARAM, LPVOID - SendMessage.restype = LPARAM - SendMessage(0xFFFF, 0x1A, 0, u"Environment") + winreg.SetValueEx(key, "PATH", 0, winreg.REG_EXPAND_SZ, envpath) return True @@ -92,6 +88,10 @@ def exec_python_cmd(args): def install_pip(): + r = exec_python_cmd(["-m", "pip", "--version"]) + if r['returncode'] == 0: + print r['out'] + return try: from urllib2 import urlopen except ImportError: @@ -112,16 +112,16 @@ def install_pip(): def install_platformio(): r = None - cmd = ["pip", "install", "-U", "platformio"] + cmd = ["-m", "pip", "install", "-U", "platformio"] # cmd = [ - # "pip", "install", "-U", + # "-m", "pip", "install", "-U", # "https://github.com/platformio/platformio-core/archive/develop.zip" # ] try: r = exec_python_cmd(cmd) assert r['returncode'] == 0 except AssertionError: - cmd.insert(1, "--no-cache-dir") + cmd.insert(2, "--no-cache-dir") r = exec_python_cmd(cmd) if r: print_exec_result(r) diff --git a/setup.py b/setup.py index 116656cc..df14dd1d 100644 --- a/setup.py +++ b/setup.py @@ -18,14 +18,14 @@ from platformio import (__author__, __description__, __email__, __license__, __title__, __url__, __version__) install_requires = [ - "arrow<1", + "arrow>=0.10.0,!=0.11.0", "bottle<0.13", "click>=5,<6", "colorama", "lockfile>=0.9.1,<0.13", "pyserial>=3,<4,!=3.3", "requests>=2.4.0,<3", - "semantic_version>=2.5.0" + "semantic_version>=2.5.0,<3" ] setup( diff --git a/tests/commands/test_lib.py b/tests/commands/test_lib.py index 31ed9809..f2b8d725 100644 --- a/tests/commands/test_lib.py +++ b/tests/commands/test_lib.py @@ -14,7 +14,6 @@ import json import re -from os.path import basename from platformio import exception, util from platformio.commands.init import cli as cmd_init @@ -39,7 +38,7 @@ def test_global_install_registry(clirunner, validate_cliresult, result = clirunner.invoke(cmd_lib, [ "-g", "install", "58", "547@2.2.4", "DallasTemperature", "http://dl.platformio.org/libraries/archives/3/5174.tar.gz", - "ArduinoJson@5.6.7", "ArduinoJson@~5.7.0", "1089@fee16e880b" + "ArduinoJson@5.6.7", "ArduinoJson@~5.7.0", "168@00589a3250" ]) validate_cliresult(result) @@ -65,7 +64,7 @@ def test_global_install_registry(clirunner, validate_cliresult, items2 = [ "ArduinoJson_ID64", "ArduinoJson_ID64@5.6.7", "DallasTemperature_ID54", "DHT22_ID58", "ESPAsyncTCP_ID305", "NeoPixelBus_ID547", "OneWire_ID1", - "IRremoteESP8266_ID1089" + "EspSoftwareSerial_ID168" ] assert set(items1) == set(items2) @@ -134,7 +133,7 @@ def test_global_install_repository(clirunner, validate_cliresult, assert "is already installed" in result.output -def test_global_lib_list(clirunner, validate_cliresult, isolated_pio_home): +def test_global_lib_list(clirunner, validate_cliresult): result = clirunner.invoke(cmd_lib, ["-g", "list"]) validate_cliresult(result) assert all([n in result.output for n in ("OneWire", "DHT22", "64")]) @@ -142,31 +141,30 @@ def test_global_lib_list(clirunner, validate_cliresult, isolated_pio_home): result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"]) assert all([ n in result.output - for n in ("PJON", "git+https://github.com/knolleary/pubsubclient", - "https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip" - ) + for n in ( + "PJON", "git+https://github.com/knolleary/pubsubclient", + "https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip") ]) items1 = [i['name'] for i in json.loads(result.output)] items2 = [ "OneWire", "DHT22", "PJON", "ESPAsyncTCP", "ArduinoJson", "PubSubClient", "rs485-nodeproto", "Adafruit ST7735 Library", "RadioHead-1.62", "DallasTemperature", "NeoPixelBus", - "IRremoteESP8266", "platformio-libmirror" + "EspSoftwareSerial", "platformio-libmirror" ] assert set(items1) == set(items2) -def test_global_lib_update_check(clirunner, validate_cliresult, - isolated_pio_home): +def test_global_lib_update_check(clirunner, validate_cliresult): result = clirunner.invoke( cmd_lib, ["-g", "update", "--only-check", "--json-output"]) validate_cliresult(result) output = json.loads(result.output) - assert set(["ArduinoJson", "IRremoteESP8266", "NeoPixelBus"]) == set( - [l['name'] for l in output]) + assert set(["ArduinoJson", "EspSoftwareSerial", + "NeoPixelBus"]) == set([l['name'] for l in output]) -def test_global_lib_update(clirunner, validate_cliresult, isolated_pio_home): +def test_global_lib_update(clirunner, validate_cliresult): # update library using package directory result = clirunner.invoke( cmd_lib, @@ -184,10 +182,10 @@ def test_global_lib_update(clirunner, validate_cliresult, isolated_pio_home): result = clirunner.invoke(cmd_lib, ["-g", "update"]) validate_cliresult(result) validate_cliresult(result) - assert result.output.count("[Skip]") == 5 + assert result.output.count("[Fixed]") == 5 assert result.output.count("[Up-to-date]") == 10 assert "Uninstalling ArduinoJson @ 5.7.3" in result.output - assert "Uninstalling IRremoteESP8266 @ fee16e880b" in result.output + assert "Uninstalling EspSoftwareSerial @ 00589a3250" in result.output # update unknown library result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"]) @@ -208,14 +206,14 @@ def test_global_lib_uninstall(clirunner, validate_cliresult, # uninstall the rest libraries result = clirunner.invoke(cmd_lib, [ - "-g", "uninstall", "1", "ArduinoJson@!=5.6.7", - "https://github.com/bblanchon/ArduinoJson.git", "IRremoteESP8266@>=0.2" + "-g", "uninstall", "1", "https://github.com/bblanchon/ArduinoJson.git", + "ArduinoJson@!=5.6.7", "EspSoftwareSerial@>=3.3.1" ]) validate_cliresult(result) items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()] items2 = [ - "ArduinoJson", "ArduinoJson_ID64@5.6.7", "DallasTemperature_ID54", + "ArduinoJson_ID64", "ArduinoJson_ID64@5.6.7", "DallasTemperature_ID54", "DHT22_ID58", "ESPAsyncTCP_ID305", "NeoPixelBus_ID547", "PJON", "PJON@src-79de467ebe19de18287becff0a1fb42d", "PubSubClient", "RadioHead-1.62", "rs485-nodeproto", "platformio-libmirror" @@ -228,7 +226,7 @@ def test_global_lib_uninstall(clirunner, validate_cliresult, assert isinstance(result.exception, exception.UnknownPackage) -def test_lib_show(clirunner, validate_cliresult, isolated_pio_home): +def test_lib_show(clirunner, validate_cliresult): result = clirunner.invoke(cmd_lib, ["show", "64"]) validate_cliresult(result) assert all( @@ -238,14 +236,14 @@ def test_lib_show(clirunner, validate_cliresult, isolated_pio_home): assert "OneWire" in result.output -def test_lib_builtin(clirunner, validate_cliresult, isolated_pio_home): +def test_lib_builtin(clirunner, validate_cliresult): result = clirunner.invoke(cmd_lib, ["builtin"]) validate_cliresult(result) result = clirunner.invoke(cmd_lib, ["builtin", "--json-output"]) validate_cliresult(result) -def test_lib_stats(clirunner, validate_cliresult, isolated_pio_home): +def test_lib_stats(clirunner, validate_cliresult): result = clirunner.invoke(cmd_lib, ["stats"]) validate_cliresult(result) assert all([ diff --git a/tests/commands/test_platform.py b/tests/commands/test_platform.py index bdc50916..2bb747fd 100644 --- a/tests/commands/test_platform.py +++ b/tests/commands/test_platform.py @@ -54,11 +54,11 @@ def test_install_unknown_from_registry(clirunner, validate_cliresult, def test_install_known_version(clirunner, validate_cliresult, isolated_pio_home): result = clirunner.invoke(cli_platform.platform_install, [ - "atmelavr@1.1.0", "--skip-default-package", "--with-package", + "atmelavr@1.2.0", "--skip-default-package", "--with-package", "tool-avrdude" ]) validate_cliresult(result) - assert "atmelavr @ 1.1.0" in result.output + assert "atmelavr @ 1.2.0" in result.output assert "Installing tool-avrdude @" in result.output assert len(isolated_pio_home.join("packages").listdir()) == 1 @@ -69,7 +69,7 @@ def test_install_from_vcs(clirunner, validate_cliresult, isolated_pio_home): "platform-espressif8266.git#feature/stage", "--skip-default-package" ]) validate_cliresult(result) - assert "espressif8266_stage" in result.output + assert "espressif8266" in result.output def test_list_json_output(clirunner, validate_cliresult, isolated_pio_home): @@ -79,14 +79,14 @@ def test_list_json_output(clirunner, validate_cliresult, isolated_pio_home): assert isinstance(list_result, list) assert len(list_result) platforms = [item['name'] for item in list_result] - assert set(["atmelavr", "espressif8266_stage"]) == set(platforms) + assert set(["atmelavr", "espressif8266"]) == set(platforms) def test_list_raw_output(clirunner, validate_cliresult, isolated_pio_home): result = clirunner.invoke(cli_platform.platform_list) validate_cliresult(result) assert all( - [s in result.output for s in ("atmelavr", "espressif8266_stage")]) + [s in result.output for s in ("atmelavr", "espressif8266")]) def test_update_check(clirunner, validate_cliresult, isolated_pio_home): @@ -102,13 +102,13 @@ def test_update_check(clirunner, validate_cliresult, isolated_pio_home): def test_update_raw(clirunner, validate_cliresult, isolated_pio_home): result = clirunner.invoke(cli_platform.platform_update) validate_cliresult(result) - assert "Uninstalling atmelavr @ 1.1.0:" in result.output + assert "Uninstalling atmelavr @ 1.2.0:" in result.output assert "PlatformManager: Installing atmelavr @" in result.output assert len(isolated_pio_home.join("packages").listdir()) == 1 def test_uninstall(clirunner, validate_cliresult, isolated_pio_home): result = clirunner.invoke(cli_platform.platform_uninstall, - ["atmelavr", "espressif8266_stage"]) + ["atmelavr", "espressif8266"]) validate_cliresult(result) assert len(isolated_pio_home.join("platforms").listdir()) == 0 diff --git a/tests/test_managers.py b/tests/test_managers.py index 1f58e03e..559c9d29 100644 --- a/tests/test_managers.py +++ b/tests/test_managers.py @@ -25,8 +25,8 @@ def test_pkg_input_parser(): [("PkgName", "!=1.2.3,<2.0"), ("PkgName", "!=1.2.3,<2.0", None)], ["PkgName@1.2.3", ("PkgName", "1.2.3", None)], [("PkgName@1.2.3", "1.2.5"), ("PkgName@1.2.3", "1.2.5", None)], - ["id:13", ("id:13", None, None)], - ["id:13@~1.2.3", ("id:13", "~1.2.3", None)], + ["id=13", ("id=13", None, None)], + ["id=13@~1.2.3", ("id=13", "~1.2.3", None)], [ util.get_home_dir(), (".platformio", None, "file://" + util.get_home_dir()) @@ -117,11 +117,15 @@ def test_pkg_input_parser(): ], [ "git@github.com:user/package.git", - ("package", None, "git@github.com:user/package.git") + ("package", None, "git+git@github.com:user/package.git") ], [ "git@github.com:user/package.git#v1.2.0", - ("package", None, "git@github.com:user/package.git#v1.2.0") + ("package", None, "git+git@github.com:user/package.git#v1.2.0") + ], + [ + "LocalName=git@github.com:user/package.git#v1.2.0@~1.2.0", + ("LocalName", "~1.2.0", "git+git@github.com:user/package.git#v1.2.0") ], [ "git+ssh://git@gitlab.private-server.com/user/package#1.2.0", @@ -132,13 +136,19 @@ def test_pkg_input_parser(): "git+ssh://user@gitlab.private-server.com:1234/package#1.2.0", ("package", None, "git+ssh://user@gitlab.private-server.com:1234/package#1.2.0") + ], + [ + "LocalName=git+ssh://user@gitlab.private-server.com:1234" + "/package#1.2.0@!=13", + ("LocalName", "!=13", + "git+ssh://user@gitlab.private-server.com:1234/package#1.2.0") ] ] for params, result in items: if isinstance(params, tuple): - assert PackageManager.parse_pkg_input(*params) == result + assert PackageManager.parse_pkg_uri(*params) == result else: - assert PackageManager.parse_pkg_input(params) == result + assert PackageManager.parse_pkg_uri(params) == result def test_install_packages(isolated_pio_home, tmpdir): @@ -146,7 +156,7 @@ def test_install_packages(isolated_pio_home, tmpdir): dict(id=1, name="name_1", version="shasum"), dict(id=1, name="name_1", version="2.0.0"), dict(id=1, name="name_1", version="2.1.0"), - dict(id=1, name="name_1", version="1.2.0"), + dict(id=1, name="name_1", version="1.2"), dict(id=1, name="name_1", version="1.0.0"), dict(name="name_2", version="1.0.0"), dict(name="name_2", version="2.0.0", @@ -167,7 +177,7 @@ def test_install_packages(isolated_pio_home, tmpdir): assert len(pm.get_installed()) == len(packages) - 1 pkg_dirnames = [ - 'name_1_ID1', 'name_1_ID1@1.0.0', 'name_1_ID1@1.2.0', + 'name_1_ID1', 'name_1_ID1@1.0.0', 'name_1_ID1@1.2', 'name_1_ID1@2.0.0', 'name_1_ID1@shasum', 'name_2', 'name_2@src-177cbce1f0705580d17790fda1cc2ef5', 'name_2@src-f863b537ab00f4c7b5011fc44b120e1f' @@ -182,12 +192,11 @@ def test_get_package(isolated_pio_home): [("1", ), None], [("id=1", "shasum"), dict(id=1, name="name_1", version="shasum")], [("id=1", "*"), dict(id=1, name="name_1", version="2.1.0")], - [("id=1", "^1"), dict(id=1, name="name_1", version="1.2.0")], - [("id=1", "^1"), dict(id=1, name="name_1", version="1.2.0")], - [("name_1", "<2"), dict(id=1, name="name_1", version="1.2.0")], + [("id=1", "^1"), dict(id=1, name="name_1", version="1.2")], + [("id=1", "^1"), dict(id=1, name="name_1", version="1.2")], + [("name_1", "<2"), dict(id=1, name="name_1", version="1.2")], [("name_1", ">2"), None], - [("name_1", "2-0-0"), dict(id=1, name="name_1", version="2.1.0")], - [("name_1", "2-0-0"), dict(id=1, name="name_1", version="2.1.0")], + [("name_1", "2-0-0"), None], [("name_2", ), dict(name="name_2", version="4.0.0")], [("url_has_higher_priority", None, "git+https://github.com"), dict(name="name_2", version="2.0.0", diff --git a/tests/test_misc.py b/tests/test_misc.py new file mode 100644 index 00000000..e8121c99 --- /dev/null +++ b/tests/test_misc.py @@ -0,0 +1,22 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests + +from platformio import util + + +def test_ping_internet_ips(): + for ip in util.PING_INTERNET_IPS: + requests.get("http://%s" % ip, allow_redirects=False, timeout=2) diff --git a/tests/test_pkgmanifest.py b/tests/test_pkgmanifest.py index a34d6864..b7a73061 100644 --- a/tests/test_pkgmanifest.py +++ b/tests/test_pkgmanifest.py @@ -41,4 +41,4 @@ def test_packages(): if "X-Checksum-Sha1" not in r.headers: return pytest.skip("X-Checksum-Sha1 is not provided") - assert item['sha1'] == r.headers.get("X-Checksum-Sha1"), item + assert item['sha1'] == r.headers.get("X-Checksum-Sha1")[0:40], item diff --git a/tox.ini b/tox.ini index d0974980..ad3288c7 100644 --- a/tox.ini +++ b/tox.ini @@ -20,8 +20,7 @@ basepython = python2.7 usedevelop = True deps = isort - flake8 - yapf<0.17 + yapf pylint pytest commands = python --version @@ -47,10 +46,8 @@ commands = [testenv:lint] basepython = python2.7 deps = - flake8 pylint commands = - flake8 ./platformio pylint --rcfile=./.pylintrc ./platformio [testenv]