forked from platformio/platformio-core
Merge branch 'feature/pkg-next' into develop
This commit is contained in:
@ -25,8 +25,9 @@ from time import time
|
||||
|
||||
import requests
|
||||
|
||||
from platformio import __version__, exception, fs, lockfile, proc
|
||||
from platformio import __version__, exception, fs, proc
|
||||
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.project.helpers import (
|
||||
get_default_projects_dir,
|
||||
get_project_cache_dir,
|
||||
@ -125,7 +126,7 @@ class State(object):
|
||||
def _lock_state_file(self):
|
||||
if not self.lock:
|
||||
return
|
||||
self._lockfile = lockfile.LockFile(self.path)
|
||||
self._lockfile = LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except IOError:
|
||||
@ -143,6 +144,9 @@ class State(object):
|
||||
def as_dict(self):
|
||||
return self._storage
|
||||
|
||||
def keys(self):
|
||||
return self._storage.keys()
|
||||
|
||||
def get(self, key, default=True):
|
||||
return self._storage.get(key, default)
|
||||
|
||||
@ -187,7 +191,7 @@ class ContentCache(object):
|
||||
def _lock_dbindex(self):
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self._lockfile = lockfile.LockFile(self.cache_dir)
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except: # pylint: disable=bare-except
|
||||
|
@ -34,11 +34,13 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from platformio import exception, fs, util
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manifest.parser import (
|
||||
ManifestParserError,
|
||||
ManifestParserFactory,
|
||||
)
|
||||
from platformio.package.meta import PackageSourceItem
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
@ -851,34 +853,36 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
pass
|
||||
|
||||
def install_dependencies(self):
|
||||
def _is_builtin(uri):
|
||||
def _is_builtin(spec):
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.name == uri:
|
||||
if lb.name == spec:
|
||||
return True
|
||||
return False
|
||||
|
||||
not_found_uri = []
|
||||
for uri in self.dependencies:
|
||||
not_found_specs = []
|
||||
for spec in self.dependencies:
|
||||
# check if built-in library
|
||||
if _is_builtin(uri):
|
||||
if _is_builtin(spec):
|
||||
continue
|
||||
|
||||
found = False
|
||||
for storage_dir in self.env.GetLibSourceDirs():
|
||||
lm = LibraryManager(storage_dir)
|
||||
if lm.get_package_dir(*lm.parse_pkg_uri(uri)):
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
if lm.get_package(spec):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
not_found_uri.append(uri)
|
||||
not_found_specs.append(spec)
|
||||
|
||||
did_install = False
|
||||
lm = LibraryManager(self.env.subst(join("$PROJECT_LIBDEPS_DIR", "$PIOENV")))
|
||||
for uri in not_found_uri:
|
||||
lm = LibraryPackageManager(
|
||||
self.env.subst(join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
|
||||
)
|
||||
for spec in not_found_specs:
|
||||
try:
|
||||
lm.install(uri)
|
||||
lm.install(spec)
|
||||
did_install = True
|
||||
except (exception.LibNotFound, exception.InternetIsOffline) as e:
|
||||
except (UnknownPackageError, exception.InternetIsOffline) as e:
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
|
||||
# reset cache
|
||||
@ -886,17 +890,17 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=None)
|
||||
|
||||
def process_dependencies(self): # pylint: disable=too-many-branches
|
||||
for uri in self.dependencies:
|
||||
for spec in self.dependencies:
|
||||
found = False
|
||||
for storage_dir in self.env.GetLibSourceDirs():
|
||||
if found:
|
||||
break
|
||||
lm = LibraryManager(storage_dir)
|
||||
lib_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri))
|
||||
if not lib_dir:
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
pkg = lm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lib_dir != lb.path:
|
||||
if pkg.path != lb.path:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
@ -908,7 +912,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
# look for built-in libraries by a name
|
||||
# which don't have package manifest
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.name != uri:
|
||||
if lb.name != spec:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
@ -1000,10 +1004,6 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
|
||||
|
||||
def ConfigureProjectLibBuilder(env):
|
||||
def _get_vcs_info(lb):
|
||||
path = LibraryManager.get_src_manifest_path(lb.path)
|
||||
return fs.load_json(path) if path else None
|
||||
|
||||
def _correct_found_libs(lib_builders):
|
||||
# build full dependency graph
|
||||
found_lbs = [lb for lb in lib_builders if lb.dependent]
|
||||
@ -1019,15 +1019,15 @@ def ConfigureProjectLibBuilder(env):
|
||||
margin = "| " * (level)
|
||||
for lb in root.depbuilders:
|
||||
title = "<%s>" % lb.name
|
||||
vcs_info = _get_vcs_info(lb)
|
||||
if lb.version:
|
||||
pkg = PackageSourceItem(lb.path)
|
||||
if pkg.metadata:
|
||||
title += " %s" % pkg.metadata.version
|
||||
elif lb.version:
|
||||
title += " %s" % lb.version
|
||||
if vcs_info and vcs_info.get("version"):
|
||||
title += " #%s" % vcs_info.get("version")
|
||||
click.echo("%s|-- %s" % (margin, title), nl=False)
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
if vcs_info:
|
||||
click.echo(" [%s]" % vcs_info.get("url"), nl=False)
|
||||
if pkg.metadata and pkg.metadata.spec.external:
|
||||
click.echo(" [%s]" % pkg.metadata.spec.url, nl=False)
|
||||
click.echo(" (", nl=False)
|
||||
click.echo(lb.path, nl=False)
|
||||
click.echo(")", nl=False)
|
||||
|
@ -139,7 +139,7 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
)
|
||||
|
||||
def _get_plaform_data():
|
||||
data = ["PLATFORM: %s %s" % (platform.title, platform.version)]
|
||||
data = ["PLATFORM: %s (%s)" % (platform.title, platform.version)]
|
||||
if platform.src_version:
|
||||
data.append("#" + platform.src_version)
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)) and platform.src_url:
|
||||
|
@ -16,7 +16,7 @@ import os
|
||||
import time
|
||||
|
||||
from platformio import __accounts_api__, app
|
||||
from platformio.clients.rest import RESTClient
|
||||
from platformio.clients.http import HTTPClient
|
||||
from platformio.exception import PlatformioException
|
||||
|
||||
|
||||
@ -35,7 +35,7 @@ class AccountAlreadyAuthorized(AccountError):
|
||||
MESSAGE = "You are already authorized with {0} account."
|
||||
|
||||
|
||||
class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
|
||||
|
||||
@ -67,7 +67,7 @@ class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
token = self.fetch_authentication_token()
|
||||
headers["Authorization"] = "Bearer %s" % token
|
||||
kwargs["headers"] = headers
|
||||
return self.send_request(*args, **kwargs)
|
||||
return self.request_json_data(*args, **kwargs)
|
||||
|
||||
def login(self, username, password):
|
||||
try:
|
||||
@ -79,11 +79,11 @@ class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
result = self.send_request(
|
||||
data = self.request_json_data(
|
||||
"post", "/v1/login", data={"username": username, "password": password},
|
||||
)
|
||||
app.set_state_item("account", result)
|
||||
return result
|
||||
app.set_state_item("account", data)
|
||||
return data
|
||||
|
||||
def login_with_code(self, client_id, code, redirect_uri):
|
||||
try:
|
||||
@ -95,7 +95,7 @@ class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
result = self.send_request(
|
||||
result = self.request_json_data(
|
||||
"post",
|
||||
"/v1/login/code",
|
||||
data={"client_id": client_id, "code": code, "redirect_uri": redirect_uri},
|
||||
@ -107,7 +107,7 @@ class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
refresh_token = self.get_refresh_token()
|
||||
self.delete_local_session()
|
||||
try:
|
||||
self.send_request(
|
||||
self.request_json_data(
|
||||
"post", "/v1/logout", data={"refresh_token": refresh_token},
|
||||
)
|
||||
except AccountError:
|
||||
@ -133,7 +133,7 @@ class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
return self.send_request(
|
||||
return self.request_json_data(
|
||||
"post",
|
||||
"/v1/registration",
|
||||
data={
|
||||
@ -153,7 +153,9 @@ class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
return self.send_request("post", "/v1/forgot", data={"username": username},)
|
||||
return self.request_json_data(
|
||||
"post", "/v1/forgot", data={"username": username},
|
||||
)
|
||||
|
||||
def get_profile(self):
|
||||
return self.send_auth_request("get", "/v1/profile",)
|
||||
@ -276,15 +278,15 @@ class AccountClient(RESTClient): # pylint:disable=too-many-public-methods
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
try:
|
||||
result = self.send_request(
|
||||
data = self.request_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
headers={
|
||||
"Authorization": "Bearer %s" % auth.get("refresh_token")
|
||||
},
|
||||
)
|
||||
app.set_state_item("account", result)
|
||||
return result.get("auth").get("access_token")
|
||||
app.set_state_item("account", data)
|
||||
return data.get("auth").get("access_token")
|
||||
except AccountError:
|
||||
self.delete_local_session()
|
||||
raise AccountNotAuthorized()
|
||||
|
@ -19,12 +19,20 @@ from platformio import app, util
|
||||
from platformio.exception import PlatformioException
|
||||
|
||||
|
||||
class RESTClientError(PlatformioException):
|
||||
pass
|
||||
class HTTPClientError(PlatformioException):
|
||||
def __init__(self, message, response=None):
|
||||
super(HTTPClientError, self).__init__()
|
||||
self.message = message
|
||||
self.response = response
|
||||
|
||||
def __str__(self): # pragma: no cover
|
||||
return self.message
|
||||
|
||||
|
||||
class RESTClient(object):
|
||||
def __init__(self, base_url):
|
||||
class HTTPClient(object):
|
||||
def __init__(
|
||||
self, base_url,
|
||||
):
|
||||
if base_url.endswith("/"):
|
||||
base_url = base_url[:-1]
|
||||
self.base_url = base_url
|
||||
@ -33,19 +41,30 @@ class RESTClient(object):
|
||||
retry = Retry(
|
||||
total=5,
|
||||
backoff_factor=1,
|
||||
method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
# method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
|
||||
status_forcelist=[413, 429, 500, 502, 503, 504],
|
||||
)
|
||||
adapter = requests.adapters.HTTPAdapter(max_retries=retry)
|
||||
self._session.mount(base_url, adapter)
|
||||
|
||||
def __del__(self):
|
||||
if not self._session:
|
||||
return
|
||||
self._session.close()
|
||||
self._session = None
|
||||
|
||||
@util.throttle(500)
|
||||
def send_request(self, method, path, **kwargs):
|
||||
# check internet before and resolve issue with 60 seconds timeout
|
||||
# check Internet before and resolve issue with 60 seconds timeout
|
||||
# print(self, method, path, kwargs)
|
||||
util.internet_on(raise_exception=True)
|
||||
try:
|
||||
response = getattr(self._session, method)(self.base_url + path, **kwargs)
|
||||
return getattr(self._session, method)(self.base_url + path, **kwargs)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
|
||||
raise RESTClientError(e)
|
||||
raise HTTPClientError(str(e))
|
||||
|
||||
def request_json_data(self, *args, **kwargs):
|
||||
response = self.send_request(*args, **kwargs)
|
||||
return self.raise_error_from_response(response)
|
||||
|
||||
@staticmethod
|
||||
@ -59,4 +78,4 @@ class RESTClient(object):
|
||||
message = response.json()["message"]
|
||||
except (KeyError, ValueError):
|
||||
message = response.text
|
||||
raise RESTClientError(message)
|
||||
raise HTTPClientError(message, response)
|
@ -14,13 +14,18 @@
|
||||
|
||||
from platformio import __registry_api__, fs
|
||||
from platformio.clients.account import AccountClient
|
||||
from platformio.clients.rest import RESTClient
|
||||
from platformio.package.spec import PackageType
|
||||
from platformio.clients.http import HTTPClient, HTTPClientError
|
||||
from platformio.package.meta import PackageType
|
||||
|
||||
try:
|
||||
from urllib.parse import quote
|
||||
except ImportError:
|
||||
from urllib import quote
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
|
||||
class RegistryClient(RESTClient):
|
||||
class RegistryClient(HTTPClient):
|
||||
def __init__(self):
|
||||
super(RegistryClient, self).__init__(base_url=__registry_api__)
|
||||
|
||||
@ -30,7 +35,7 @@ class RegistryClient(RESTClient):
|
||||
token = AccountClient().fetch_authentication_token()
|
||||
headers["Authorization"] = "Bearer %s" % token
|
||||
kwargs["headers"] = headers
|
||||
return self.send_request(*args, **kwargs)
|
||||
return self.request_json_data(*args, **kwargs)
|
||||
|
||||
def publish_package(
|
||||
self, archive_path, owner=None, released_at=None, private=False, notify=True
|
||||
@ -41,7 +46,7 @@ class RegistryClient(RESTClient):
|
||||
account.get_account_info(offline=True).get("profile").get("username")
|
||||
)
|
||||
with open(archive_path, "rb") as fp:
|
||||
response = self.send_auth_request(
|
||||
return self.send_auth_request(
|
||||
"post",
|
||||
"/v3/packages/%s/%s" % (owner, PackageType.from_archive(archive_path)),
|
||||
params={
|
||||
@ -57,7 +62,6 @@ class RegistryClient(RESTClient):
|
||||
},
|
||||
data=fp,
|
||||
)
|
||||
return response
|
||||
|
||||
def unpublish_package( # pylint: disable=redefined-builtin
|
||||
self, type, name, owner=None, version=None, undo=False
|
||||
@ -70,10 +74,9 @@ class RegistryClient(RESTClient):
|
||||
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
|
||||
if version:
|
||||
path += "/" + version
|
||||
response = self.send_auth_request(
|
||||
return self.send_auth_request(
|
||||
"delete", path, params={"undo": 1 if undo else 0},
|
||||
)
|
||||
return response
|
||||
|
||||
def update_resource(self, urn, private):
|
||||
return self.send_auth_request(
|
||||
@ -96,3 +99,45 @@ class RegistryClient(RESTClient):
|
||||
return self.send_auth_request(
|
||||
"get", "/v3/resources", params={"owner": owner} if owner else None
|
||||
)
|
||||
|
||||
def list_packages(self, query=None, filters=None, page=None):
|
||||
assert query or filters
|
||||
search_query = []
|
||||
if filters:
|
||||
valid_filters = (
|
||||
"authors",
|
||||
"keywords",
|
||||
"frameworks",
|
||||
"platforms",
|
||||
"headers",
|
||||
"ids",
|
||||
"names",
|
||||
"owners",
|
||||
"types",
|
||||
)
|
||||
assert set(filters.keys()) <= set(valid_filters)
|
||||
for name, values in filters.items():
|
||||
for value in set(
|
||||
values if isinstance(values, (list, tuple)) else [values]
|
||||
):
|
||||
search_query.append('%s:"%s"' % (name[:-1], value))
|
||||
if query:
|
||||
search_query.append(query)
|
||||
params = dict(query=quote(" ".join(search_query)))
|
||||
if page:
|
||||
params["page"] = int(page)
|
||||
return self.request_json_data("get", "/v3/packages", params=params)
|
||||
|
||||
def get_package(self, type_, owner, name, version=None):
|
||||
try:
|
||||
return self.request_json_data(
|
||||
"get",
|
||||
"/v3/packages/{owner}/{type}/{name}".format(
|
||||
type=type_, owner=owner.lower(), name=quote(name.lower())
|
||||
),
|
||||
params=dict(version=version) if version else None,
|
||||
)
|
||||
except HTTPClientError as e:
|
||||
if e.response.status_code == 404:
|
||||
return None
|
||||
raise e
|
||||
|
13
platformio/commands/lib/__init__.py
Normal file
13
platformio/commands/lib/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
@ -18,16 +18,21 @@ import os
|
||||
import time
|
||||
|
||||
import click
|
||||
import semantic_version
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib.helpers import (
|
||||
get_builtin_libs,
|
||||
is_builtin_lib,
|
||||
save_project_libdeps,
|
||||
)
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.managers.lib import LibraryManager, get_builtin_libs, is_builtin_lib
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.meta import PackageSourceItem, PackageSpec
|
||||
from platformio.proc import is_ci
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import InvalidProjectConfError
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
|
||||
try:
|
||||
@ -124,89 +129,106 @@ def cli(ctx, **options):
|
||||
@cli.command("install", short_help="Install library")
|
||||
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
|
||||
@click.option(
|
||||
"--save",
|
||||
"--save/--no-save",
|
||||
is_flag=True,
|
||||
help="Save installed libraries into the `platformio.ini` dependency list",
|
||||
default=True,
|
||||
help="Save installed libraries into the `platformio.ini` dependency list"
|
||||
" (enabled by default)",
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"--interactive", is_flag=True, help="Allow to make a choice for all prompts"
|
||||
"--interactive",
|
||||
is_flag=True,
|
||||
help="Deprecated! Please use a strict dependency specification (owner/libname)",
|
||||
)
|
||||
@click.option(
|
||||
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments
|
||||
def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
ctx, libraries, save, silent, interactive, force
|
||||
):
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
storage_libdeps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, [])
|
||||
|
||||
installed_manifests = {}
|
||||
installed_pkgs = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not silent and (libraries or storage_dir in storage_libdeps):
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
|
||||
if libraries:
|
||||
for library in libraries:
|
||||
pkg_dir = lm.install(
|
||||
library, silent=silent, interactive=interactive, force=force
|
||||
)
|
||||
installed_manifests[library] = lm.load_manifest(pkg_dir)
|
||||
installed_pkgs = {
|
||||
library: lm.install(library, silent=silent, force=force)
|
||||
for library in libraries
|
||||
}
|
||||
|
||||
elif storage_dir in storage_libdeps:
|
||||
builtin_lib_storages = None
|
||||
for library in storage_libdeps[storage_dir]:
|
||||
try:
|
||||
pkg_dir = lm.install(
|
||||
library, silent=silent, interactive=interactive, force=force
|
||||
)
|
||||
installed_manifests[library] = lm.load_manifest(pkg_dir)
|
||||
except exception.LibNotFound as e:
|
||||
lm.install(library, silent=silent, force=force)
|
||||
except UnknownPackageError as e:
|
||||
if builtin_lib_storages is None:
|
||||
builtin_lib_storages = get_builtin_libs()
|
||||
if not silent or not is_builtin_lib(builtin_lib_storages, library):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
|
||||
if not save or not libraries:
|
||||
return
|
||||
if save and installed_pkgs:
|
||||
_save_deps(ctx, installed_pkgs)
|
||||
|
||||
|
||||
def _save_deps(ctx, pkgs, action="add"):
|
||||
specs = []
|
||||
for library, pkg in pkgs.items():
|
||||
spec = PackageSpec(library)
|
||||
if spec.external:
|
||||
specs.append(spec)
|
||||
else:
|
||||
specs.append(
|
||||
PackageSpec(
|
||||
owner=pkg.metadata.spec.owner,
|
||||
name=pkg.metadata.spec.name,
|
||||
requirements=spec.requirements
|
||||
or (
|
||||
("^%s" % pkg.metadata.version)
|
||||
if not pkg.metadata.version.build
|
||||
else pkg.metadata.version
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
|
||||
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
|
||||
for input_dir in input_dirs:
|
||||
config = ProjectConfig.get_instance(os.path.join(input_dir, "platformio.ini"))
|
||||
config.validate(project_environments)
|
||||
for env in config.envs():
|
||||
if project_environments and env not in project_environments:
|
||||
continue
|
||||
config.expand_interpolations = False
|
||||
try:
|
||||
lib_deps = config.get("env:" + env, "lib_deps")
|
||||
except InvalidProjectConfError:
|
||||
lib_deps = []
|
||||
for library in libraries:
|
||||
if library in lib_deps:
|
||||
continue
|
||||
manifest = installed_manifests[library]
|
||||
try:
|
||||
assert library.lower() == manifest["name"].lower()
|
||||
assert semantic_version.Version(manifest["version"])
|
||||
lib_deps.append("{name}@^{version}".format(**manifest))
|
||||
except (AssertionError, ValueError):
|
||||
lib_deps.append(library)
|
||||
config.set("env:" + env, "lib_deps", lib_deps)
|
||||
config.save()
|
||||
if not is_platformio_project(input_dir):
|
||||
continue
|
||||
save_project_libdeps(input_dir, specs, project_environments, action=action)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall libraries")
|
||||
@cli.command("uninstall", short_help="Remove libraries")
|
||||
@click.argument("libraries", nargs=-1, metavar="[LIBRARY...]")
|
||||
@click.option(
|
||||
"--save/--no-save",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Remove libraries from the `platformio.ini` dependency list and save changes"
|
||||
" (enabled by default)",
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.pass_context
|
||||
def lib_uninstall(ctx, libraries):
|
||||
def lib_uninstall(ctx, libraries, save, silent):
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
uninstalled_pkgs = {}
|
||||
for storage_dir in storage_dirs:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
for library in libraries:
|
||||
lm.uninstall(library)
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
uninstalled_pkgs = {
|
||||
library: lm.uninstall(library, silent=silent) for library in libraries
|
||||
}
|
||||
|
||||
if save and uninstalled_pkgs:
|
||||
_save_deps(ctx, uninstalled_pkgs, action="remove")
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed libraries")
|
||||
@ -220,42 +242,55 @@ def lib_uninstall(ctx, libraries):
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_update(ctx, libraries, only_check, dry_run, json_output):
|
||||
def lib_update( # pylint: disable=too-many-arguments
|
||||
ctx, libraries, only_check, dry_run, silent, json_output
|
||||
):
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
only_check = dry_run or only_check
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not json_output:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
|
||||
_libraries = libraries
|
||||
if not _libraries:
|
||||
_libraries = [manifest["__pkg_dir"] for manifest in lm.get_installed()]
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
_libraries = libraries or lm.get_installed()
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for library in _libraries:
|
||||
pkg_dir = library if os.path.isdir(library) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = lm.parse_pkg_uri(library)
|
||||
pkg_dir = lm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(library, PackageSourceItem):
|
||||
pkg = library
|
||||
else:
|
||||
spec = PackageSpec(library)
|
||||
pkg = lm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
latest = lm.outdated(pkg_dir, requirements)
|
||||
if not latest:
|
||||
outdated = lm.outdated(pkg, spec)
|
||||
if not outdated.is_outdated(allow_incompatible=True):
|
||||
continue
|
||||
manifest = lm.load_manifest(pkg_dir)
|
||||
manifest["versionLatest"] = latest
|
||||
manifest = lm.legacy_load_manifest(pkg)
|
||||
manifest["versionWanted"] = (
|
||||
str(outdated.wanted) if outdated.wanted else None
|
||||
)
|
||||
manifest["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
result.append(manifest)
|
||||
json_result[storage_dir] = result
|
||||
else:
|
||||
for library in _libraries:
|
||||
lm.update(library, only_check=only_check)
|
||||
to_spec = (
|
||||
None
|
||||
if isinstance(library, PackageSourceItem)
|
||||
else PackageSpec(library)
|
||||
)
|
||||
lm.update(
|
||||
library, to_spec=to_spec, only_check=only_check, silent=silent
|
||||
)
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
@ -276,8 +311,8 @@ def lib_list(ctx, json_output):
|
||||
for storage_dir in storage_dirs:
|
||||
if not json_output:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
items = lm.get_installed()
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
items = lm.legacy_get_installed()
|
||||
if json_output:
|
||||
json_result[storage_dir] = items
|
||||
elif items:
|
||||
@ -301,6 +336,7 @@ def lib_list(ctx, json_output):
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option("--page", type=click.INT, default=1)
|
||||
@click.option("--id", multiple=True)
|
||||
@click.option("-o", "--owner", multiple=True)
|
||||
@click.option("-n", "--name", multiple=True)
|
||||
@click.option("-a", "--author", multiple=True)
|
||||
@click.option("-k", "--keyword", multiple=True)
|
||||
@ -404,12 +440,8 @@ def lib_builtin(storage, json_output):
|
||||
@click.argument("library", metavar="[LIBRARY]")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_show(library, json_output):
|
||||
lm = LibraryManager()
|
||||
name, requirements, _ = lm.parse_pkg_uri(library)
|
||||
lib_id = lm.search_lib_id(
|
||||
{"name": name, "requirements": requirements},
|
||||
silent=json_output,
|
||||
interactive=not json_output,
|
||||
lib_id = LibraryPackageManager().reveal_registry_package_id(
|
||||
library, silent=json_output
|
||||
)
|
||||
lib = util.get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
|
||||
if json_output:
|
90
platformio/commands/lib/helpers.py
Normal file
90
platformio/commands/lib/helpers.py
Normal file
@ -0,0 +1,90 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio.compat import ci_strings_are_equal
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import InvalidProjectConfError
|
||||
|
||||
|
||||
def get_builtin_libs(storage_names=None):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
|
||||
items = []
|
||||
storage_names = storage_names or []
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
p = PlatformFactory.newPlatform(manifest["__pkg_dir"])
|
||||
for storage in p.get_lib_storages():
|
||||
if storage_names and storage["name"] not in storage_names:
|
||||
continue
|
||||
lm = LibraryPackageManager(storage["path"])
|
||||
items.append(
|
||||
{
|
||||
"name": storage["name"],
|
||||
"path": storage["path"],
|
||||
"items": lm.legacy_get_installed(),
|
||||
}
|
||||
)
|
||||
return items
|
||||
|
||||
|
||||
def is_builtin_lib(storages, name):
|
||||
for storage in storages or []:
|
||||
if any(lib.get("name") == name for lib in storage["items"]):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def ignore_deps_by_specs(deps, specs):
|
||||
result = []
|
||||
for dep in deps:
|
||||
depspec = PackageSpec(dep)
|
||||
if depspec.external:
|
||||
result.append(dep)
|
||||
continue
|
||||
ignore_conditions = []
|
||||
for spec in specs:
|
||||
if depspec.owner:
|
||||
ignore_conditions.append(
|
||||
ci_strings_are_equal(depspec.owner, spec.owner)
|
||||
and ci_strings_are_equal(depspec.name, spec.name)
|
||||
)
|
||||
else:
|
||||
ignore_conditions.append(ci_strings_are_equal(depspec.name, spec.name))
|
||||
if not any(ignore_conditions):
|
||||
result.append(dep)
|
||||
return result
|
||||
|
||||
|
||||
def save_project_libdeps(project_dir, specs, environments=None, action="add"):
|
||||
config = ProjectConfig.get_instance(os.path.join(project_dir, "platformio.ini"))
|
||||
config.validate(environments)
|
||||
for env in config.envs():
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
config.expand_interpolations = False
|
||||
lib_deps = []
|
||||
try:
|
||||
lib_deps = ignore_deps_by_specs(config.get("env:" + env, "lib_deps"), specs)
|
||||
except InvalidProjectConfError:
|
||||
pass
|
||||
if action == "add":
|
||||
lib_deps.extend(spec.as_dependency() for spec in specs)
|
||||
config.set("env:" + env, "lib_deps", lib_deps)
|
||||
config.save()
|
@ -18,8 +18,8 @@ from datetime import datetime
|
||||
import click
|
||||
|
||||
from platformio.clients.registry import RegistryClient
|
||||
from platformio.package.meta import PackageSpec, PackageType
|
||||
from platformio.package.pack import PackagePacker
|
||||
from platformio.package.spec import PackageSpec, PackageType
|
||||
|
||||
|
||||
def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
@ -106,7 +106,7 @@ def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
|
||||
response = RegistryClient().unpublish_package(
|
||||
type=type,
|
||||
name=spec.name,
|
||||
owner=spec.ownername,
|
||||
owner=spec.owner,
|
||||
version=spec.requirements,
|
||||
undo=undo,
|
||||
)
|
||||
|
@ -26,9 +26,9 @@ from platformio.commands.system.completion import (
|
||||
install_completion_code,
|
||||
uninstall_completion_code,
|
||||
)
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.package import PackageManager
|
||||
from platformio.managers.platform import PlatformManager
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
@ -73,7 +73,7 @@ def system_info(json_output):
|
||||
}
|
||||
data["global_lib_nums"] = {
|
||||
"title": "Global Libraries",
|
||||
"value": len(LibraryManager().get_installed()),
|
||||
"value": len(LibraryPackageManager().get_installed()),
|
||||
}
|
||||
data["dev_platform_nums"] = {
|
||||
"title": "Development Platforms",
|
||||
|
@ -15,11 +15,11 @@
|
||||
import click
|
||||
|
||||
from platformio import app
|
||||
from platformio.commands.lib import CTX_META_STORAGE_DIRS_KEY
|
||||
from platformio.commands.lib import lib_update as cmd_lib_update
|
||||
from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY
|
||||
from platformio.commands.lib.command import lib_update as cmd_lib_update
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.managers.core import update_core_packages
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
|
||||
|
||||
@click.command(
|
||||
@ -55,5 +55,5 @@ def cli(ctx, core_packages, only_check, dry_run):
|
||||
click.echo()
|
||||
click.echo("Library Manager")
|
||||
click.echo("===============")
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = [LibraryManager().package_dir]
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = [LibraryPackageManager().package_dir]
|
||||
ctx.invoke(cmd_lib_update, only_check=only_check)
|
||||
|
@ -50,6 +50,14 @@ def get_object_members(obj, ignore_private=True):
|
||||
}
|
||||
|
||||
|
||||
def ci_strings_are_equal(a, b):
|
||||
if a == b:
|
||||
return True
|
||||
if not a or not b:
|
||||
return False
|
||||
return a.strip().lower() == b.strip().lower()
|
||||
|
||||
|
||||
if PY2:
|
||||
import imp
|
||||
|
||||
|
@ -119,53 +119,11 @@ class PackageInstallError(PlatformIOPackageException):
|
||||
)
|
||||
|
||||
|
||||
class ExtractArchiveItemError(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = (
|
||||
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
|
||||
"tool or check this solution -> http://bit.ly/faq-package-manager"
|
||||
)
|
||||
|
||||
|
||||
class UnsupportedArchiveType(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = "Can not unpack file '{0}'"
|
||||
|
||||
|
||||
class FDUnrecognizedStatusCode(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = "Got an unrecognized status code '{0}' when downloaded {1}"
|
||||
|
||||
|
||||
class FDSizeMismatch(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = (
|
||||
"The size ({0:d} bytes) of downloaded file '{1}' "
|
||||
"is not equal to remote size ({2:d} bytes)"
|
||||
)
|
||||
|
||||
|
||||
class FDSHASumMismatch(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = (
|
||||
"The 'sha1' sum '{0}' of downloaded file '{1}' is not equal to remote '{2}'"
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# Library
|
||||
#
|
||||
|
||||
|
||||
class LibNotFound(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"Library `{0}` has not been found in PlatformIO Registry.\n"
|
||||
"You can ignore this message, if `{0}` is a built-in library "
|
||||
"(included in framework, SDK). E.g., SPI, Wire, etc."
|
||||
)
|
||||
|
||||
|
||||
class NotGlobalLibDir(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
|
@ -21,13 +21,13 @@ import semantic_version
|
||||
|
||||
from platformio import __version__, app, exception, fs, telemetry, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib import CTX_META_STORAGE_DIRS_KEY
|
||||
from platformio.commands.lib import lib_update as cmd_lib_update
|
||||
from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY
|
||||
from platformio.commands.lib.command import lib_update as cmd_lib_update
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.commands.upgrade import get_latest_version
|
||||
from platformio.managers.core import update_core_packages
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.proc import is_container
|
||||
|
||||
|
||||
@ -240,7 +240,7 @@ def check_platformio_upgrade():
|
||||
click.echo("")
|
||||
|
||||
|
||||
def check_internal_updates(ctx, what):
|
||||
def check_internal_updates(ctx, what): # pylint: disable=too-many-branches
|
||||
last_check = app.get_state_item("last_check", {})
|
||||
interval = int(app.get_setting("check_%s_interval" % what)) * 3600 * 24
|
||||
if (time() - interval) < last_check.get(what + "_update", 0):
|
||||
@ -251,20 +251,27 @@ def check_internal_updates(ctx, what):
|
||||
|
||||
util.internet_on(raise_exception=True)
|
||||
|
||||
pm = PlatformManager() if what == "platforms" else LibraryManager()
|
||||
outdated_items = []
|
||||
for manifest in pm.get_installed():
|
||||
if manifest["name"] in outdated_items:
|
||||
continue
|
||||
conds = [
|
||||
pm.outdated(manifest["__pkg_dir"]),
|
||||
what == "platforms"
|
||||
and PlatformFactory.newPlatform(
|
||||
manifest["__pkg_dir"]
|
||||
).are_outdated_packages(),
|
||||
]
|
||||
if any(conds):
|
||||
outdated_items.append(manifest["name"])
|
||||
pm = PlatformManager() if what == "platforms" else LibraryPackageManager()
|
||||
if isinstance(pm, PlatformManager):
|
||||
for manifest in pm.get_installed():
|
||||
if manifest["name"] in outdated_items:
|
||||
continue
|
||||
conds = [
|
||||
pm.outdated(manifest["__pkg_dir"]),
|
||||
what == "platforms"
|
||||
and PlatformFactory.newPlatform(
|
||||
manifest["__pkg_dir"]
|
||||
).are_outdated_packages(),
|
||||
]
|
||||
if any(conds):
|
||||
outdated_items.append(manifest["name"])
|
||||
else:
|
||||
for pkg in pm.get_installed():
|
||||
if pkg.metadata.name in outdated_items:
|
||||
continue
|
||||
if pm.outdated(pkg).is_outdated():
|
||||
outdated_items.append(pkg.metadata.name)
|
||||
|
||||
if not outdated_items:
|
||||
return
|
||||
|
@ -1,374 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
|
||||
# pylint: disable=too-many-return-statements
|
||||
|
||||
import json
|
||||
from glob import glob
|
||||
from os.path import isdir, join
|
||||
|
||||
import click
|
||||
import semantic_version
|
||||
|
||||
from platformio import app, exception, util
|
||||
from platformio.compat import glob_escape
|
||||
from platformio.managers.package import BasePkgManager
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
from platformio.package.exception import ManifestException
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class LibraryManager(BasePkgManager):
|
||||
|
||||
FILE_CACHE_VALID = "30d" # 1 month
|
||||
|
||||
def __init__(self, package_dir=None):
|
||||
self.config = ProjectConfig.get_instance()
|
||||
super(LibraryManager, self).__init__(
|
||||
package_dir or self.config.get_optional_dir("globallib")
|
||||
)
|
||||
|
||||
@property
|
||||
def manifest_names(self):
|
||||
return [".library.json", "library.json", "library.properties", "module.json"]
|
||||
|
||||
def get_manifest_path(self, pkg_dir):
|
||||
path = BasePkgManager.get_manifest_path(self, pkg_dir)
|
||||
if path:
|
||||
return path
|
||||
|
||||
# if library without manifest, returns first source file
|
||||
src_dir = join(glob_escape(pkg_dir))
|
||||
if isdir(join(pkg_dir, "src")):
|
||||
src_dir = join(src_dir, "src")
|
||||
chs_files = glob(join(src_dir, "*.[chS]"))
|
||||
if chs_files:
|
||||
return chs_files[0]
|
||||
cpp_files = glob(join(src_dir, "*.cpp"))
|
||||
if cpp_files:
|
||||
return cpp_files[0]
|
||||
|
||||
return None
|
||||
|
||||
def max_satisfying_repo_version(self, versions, requirements=None):
|
||||
def _cmp_dates(datestr1, datestr2):
|
||||
date1 = util.parse_date(datestr1)
|
||||
date2 = util.parse_date(datestr2)
|
||||
if date1 == date2:
|
||||
return 0
|
||||
return -1 if date1 < date2 else 1
|
||||
|
||||
semver_spec = None
|
||||
try:
|
||||
semver_spec = (
|
||||
semantic_version.SimpleSpec(requirements) if requirements else None
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
item = {}
|
||||
|
||||
for v in versions:
|
||||
semver_new = self.parse_semver_version(v["name"])
|
||||
if semver_spec:
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if not semver_new or semver_new not in semver_spec:
|
||||
continue
|
||||
if not item or self.parse_semver_version(item["name"]) < semver_new:
|
||||
item = v
|
||||
elif requirements:
|
||||
if requirements == v["name"]:
|
||||
return v
|
||||
|
||||
else:
|
||||
if not item or _cmp_dates(item["released"], v["released"]) == -1:
|
||||
item = v
|
||||
return item
|
||||
|
||||
def get_latest_repo_version(self, name, requirements, silent=False):
|
||||
item = self.max_satisfying_repo_version(
|
||||
util.get_api_result(
|
||||
"/lib/info/%d"
|
||||
% self.search_lib_id(
|
||||
{"name": name, "requirements": requirements}, silent=silent
|
||||
),
|
||||
cache_valid="1h",
|
||||
)["versions"],
|
||||
requirements,
|
||||
)
|
||||
return item["name"] if item else None
|
||||
|
||||
def _install_from_piorepo(self, name, requirements):
|
||||
assert name.startswith("id="), name
|
||||
version = self.get_latest_repo_version(name, requirements)
|
||||
if not version:
|
||||
raise exception.UndefinedPackageVersion(
|
||||
requirements or "latest", util.get_systype()
|
||||
)
|
||||
dl_data = util.get_api_result(
|
||||
"/lib/download/" + str(name[3:]), dict(version=version), cache_valid="30d"
|
||||
)
|
||||
assert dl_data
|
||||
|
||||
return self._install_from_url(
|
||||
name,
|
||||
dl_data["url"].replace("http://", "https://")
|
||||
if app.get_setting("strict_ssl")
|
||||
else dl_data["url"],
|
||||
requirements,
|
||||
)
|
||||
|
||||
def search_lib_id( # pylint: disable=too-many-branches
|
||||
self, filters, silent=False, interactive=False
|
||||
):
|
||||
assert isinstance(filters, dict)
|
||||
assert "name" in filters
|
||||
|
||||
# try to find ID within installed packages
|
||||
lib_id = self._get_lib_id_from_installed(filters)
|
||||
if lib_id:
|
||||
return lib_id
|
||||
|
||||
# looking in PIO Library Registry
|
||||
if not silent:
|
||||
click.echo(
|
||||
"Looking for %s library in registry"
|
||||
% click.style(filters["name"], fg="cyan")
|
||||
)
|
||||
query = []
|
||||
for key in filters:
|
||||
if key not in ("name", "authors", "frameworks", "platforms"):
|
||||
continue
|
||||
values = filters[key]
|
||||
if not isinstance(values, list):
|
||||
values = [v.strip() for v in values.split(",") if v]
|
||||
for value in values:
|
||||
query.append(
|
||||
'%s:"%s"' % (key[:-1] if key.endswith("s") else key, value)
|
||||
)
|
||||
|
||||
lib_info = None
|
||||
result = util.get_api_result(
|
||||
"/v2/lib/search", dict(query=" ".join(query)), cache_valid="1h"
|
||||
)
|
||||
if result["total"] == 1:
|
||||
lib_info = result["items"][0]
|
||||
elif result["total"] > 1:
|
||||
if silent and not interactive:
|
||||
lib_info = result["items"][0]
|
||||
else:
|
||||
click.secho(
|
||||
"Conflict: More than one library has been found "
|
||||
"by request %s:" % json.dumps(filters),
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.lib import print_lib_item
|
||||
|
||||
for item in result["items"]:
|
||||
print_lib_item(item)
|
||||
|
||||
if not interactive:
|
||||
click.secho(
|
||||
"Automatically chose the first available library "
|
||||
"(use `--interactive` option to make a choice)",
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
lib_info = result["items"][0]
|
||||
else:
|
||||
deplib_id = click.prompt(
|
||||
"Please choose library ID",
|
||||
type=click.Choice([str(i["id"]) for i in result["items"]]),
|
||||
)
|
||||
for item in result["items"]:
|
||||
if item["id"] == int(deplib_id):
|
||||
lib_info = item
|
||||
break
|
||||
|
||||
if not lib_info:
|
||||
if list(filters) == ["name"]:
|
||||
raise exception.LibNotFound(filters["name"])
|
||||
raise exception.LibNotFound(str(filters))
|
||||
if not silent:
|
||||
click.echo(
|
||||
"Found: %s"
|
||||
% click.style(
|
||||
"https://platformio.org/lib/show/{id}/{name}".format(**lib_info),
|
||||
fg="blue",
|
||||
)
|
||||
)
|
||||
return int(lib_info["id"])
|
||||
|
||||
def _get_lib_id_from_installed(self, filters):
|
||||
if filters["name"].startswith("id="):
|
||||
return int(filters["name"][3:])
|
||||
package_dir = self.get_package_dir(
|
||||
filters["name"], filters.get("requirements", filters.get("version"))
|
||||
)
|
||||
if not package_dir:
|
||||
return None
|
||||
manifest = self.load_manifest(package_dir)
|
||||
if "id" not in manifest:
|
||||
return None
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in filters:
|
||||
continue
|
||||
if key not in manifest:
|
||||
return None
|
||||
if not util.items_in_list(
|
||||
util.items_to_list(filters[key]), util.items_to_list(manifest[key])
|
||||
):
|
||||
return None
|
||||
|
||||
if "authors" in filters:
|
||||
if "authors" not in manifest:
|
||||
return None
|
||||
manifest_authors = manifest["authors"]
|
||||
if not isinstance(manifest_authors, list):
|
||||
manifest_authors = [manifest_authors]
|
||||
manifest_authors = [
|
||||
a["name"]
|
||||
for a in manifest_authors
|
||||
if isinstance(a, dict) and "name" in a
|
||||
]
|
||||
filter_authors = filters["authors"]
|
||||
if not isinstance(filter_authors, list):
|
||||
filter_authors = [filter_authors]
|
||||
if not set(filter_authors) <= set(manifest_authors):
|
||||
return None
|
||||
|
||||
return int(manifest["id"])
|
||||
|
||||
def install( # pylint: disable=arguments-differ
|
||||
self,
|
||||
name,
|
||||
requirements=None,
|
||||
silent=False,
|
||||
after_update=False,
|
||||
interactive=False,
|
||||
force=False,
|
||||
):
|
||||
_name, _requirements, _url = self.parse_pkg_uri(name, requirements)
|
||||
if not _url:
|
||||
name = "id=%d" % self.search_lib_id(
|
||||
{"name": _name, "requirements": _requirements},
|
||||
silent=silent,
|
||||
interactive=interactive,
|
||||
)
|
||||
requirements = _requirements
|
||||
pkg_dir = BasePkgManager.install(
|
||||
self,
|
||||
name,
|
||||
requirements,
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
force=force,
|
||||
)
|
||||
|
||||
if not pkg_dir:
|
||||
return None
|
||||
|
||||
manifest = None
|
||||
try:
|
||||
manifest = ManifestParserFactory.new_from_dir(pkg_dir).as_dict()
|
||||
except ManifestException:
|
||||
pass
|
||||
if not manifest or not manifest.get("dependencies"):
|
||||
return pkg_dir
|
||||
|
||||
if not silent:
|
||||
click.secho("Installing dependencies", fg="yellow")
|
||||
|
||||
builtin_lib_storages = None
|
||||
for filters in manifest["dependencies"]:
|
||||
assert "name" in filters
|
||||
|
||||
# avoid circle dependencies
|
||||
if not self.INSTALL_HISTORY:
|
||||
self.INSTALL_HISTORY = []
|
||||
history_key = str(filters)
|
||||
if history_key in self.INSTALL_HISTORY:
|
||||
continue
|
||||
self.INSTALL_HISTORY.append(history_key)
|
||||
|
||||
if any(s in filters.get("version", "") for s in ("\\", "/")):
|
||||
self.install(
|
||||
"{name}={version}".format(**filters),
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
interactive=interactive,
|
||||
force=force,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
lib_id = self.search_lib_id(filters, silent, interactive)
|
||||
except exception.LibNotFound as e:
|
||||
if builtin_lib_storages is None:
|
||||
builtin_lib_storages = get_builtin_libs()
|
||||
if not silent or is_builtin_lib(
|
||||
builtin_lib_storages, filters["name"]
|
||||
):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
continue
|
||||
|
||||
if filters.get("version"):
|
||||
self.install(
|
||||
lib_id,
|
||||
filters.get("version"),
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
interactive=interactive,
|
||||
force=force,
|
||||
)
|
||||
else:
|
||||
self.install(
|
||||
lib_id,
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
interactive=interactive,
|
||||
force=force,
|
||||
)
|
||||
return pkg_dir
|
||||
|
||||
|
||||
def get_builtin_libs(storage_names=None):
|
||||
items = []
|
||||
storage_names = storage_names or []
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
p = PlatformFactory.newPlatform(manifest["__pkg_dir"])
|
||||
for storage in p.get_lib_storages():
|
||||
if storage_names and storage["name"] not in storage_names:
|
||||
continue
|
||||
lm = LibraryManager(storage["path"])
|
||||
items.append(
|
||||
{
|
||||
"name": storage["name"],
|
||||
"path": storage["path"],
|
||||
"items": lm.get_installed(),
|
||||
}
|
||||
)
|
||||
return items
|
||||
|
||||
|
||||
def is_builtin_lib(storages, name):
|
||||
for storage in storages or []:
|
||||
if any(l.get("name") == name for l in storage["items"]):
|
||||
return True
|
||||
return False
|
@ -26,12 +26,12 @@ import semantic_version
|
||||
|
||||
from platformio import __version__, app, exception, fs, util
|
||||
from platformio.compat import hashlib_encode_data
|
||||
from platformio.downloader import FileDownloader
|
||||
from platformio.lockfile import LockFile
|
||||
from platformio.package.download import FileDownloader
|
||||
from platformio.package.exception import ManifestException
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.unpacker import FileUnpacker
|
||||
from platformio.vcsclient import VCSClientFactory
|
||||
from platformio.package.unpack import FileUnpacker
|
||||
from platformio.package.vcsclient import VCSClientFactory
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-return-statements
|
||||
|
||||
@ -482,7 +482,7 @@ class PkgInstallerMixin(object):
|
||||
self.unpack(dlpath, tmp_dir)
|
||||
os.remove(dlpath)
|
||||
else:
|
||||
vcs = VCSClientFactory.newClient(tmp_dir, url)
|
||||
vcs = VCSClientFactory.new(tmp_dir, url)
|
||||
assert vcs.export()
|
||||
src_manifest_dir = vcs.storage_dir
|
||||
src_manifest["version"] = vcs.get_current_revision()
|
||||
@ -628,9 +628,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
||||
|
||||
if "__src_url" in manifest:
|
||||
try:
|
||||
vcs = VCSClientFactory.newClient(
|
||||
pkg_dir, manifest["__src_url"], silent=True
|
||||
)
|
||||
vcs = VCSClientFactory.new(pkg_dir, manifest["__src_url"], silent=True)
|
||||
except (AttributeError, exception.PlatformioException):
|
||||
return None
|
||||
if not vcs.can_be_updated:
|
||||
@ -800,7 +798,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
||||
return True
|
||||
|
||||
if "__src_url" in manifest:
|
||||
vcs = VCSClientFactory.newClient(pkg_dir, manifest["__src_url"])
|
||||
vcs = VCSClientFactory.new(pkg_dir, manifest["__src_url"])
|
||||
assert vcs.update()
|
||||
self._update_src_manifest(
|
||||
dict(version=vcs.get_current_revision()), vcs.storage_dir
|
||||
|
@ -23,11 +23,7 @@ import click
|
||||
import requests
|
||||
|
||||
from platformio import app, fs, util
|
||||
from platformio.exception import (
|
||||
FDSHASumMismatch,
|
||||
FDSizeMismatch,
|
||||
FDUnrecognizedStatusCode,
|
||||
)
|
||||
from platformio.package.exception import PackageException
|
||||
|
||||
|
||||
class FileDownloader(object):
|
||||
@ -41,7 +37,11 @@ class FileDownloader(object):
|
||||
verify=sys.version_info >= (2, 7, 9),
|
||||
)
|
||||
if self._request.status_code != 200:
|
||||
raise FDUnrecognizedStatusCode(self._request.status_code, url)
|
||||
raise PackageException(
|
||||
"Got the unrecognized status code '{0}' when downloaded {1}".format(
|
||||
self._request.status_code, url
|
||||
)
|
||||
)
|
||||
|
||||
disposition = self._request.headers.get("content-disposition")
|
||||
if disposition and "filename=" in disposition:
|
||||
@ -74,21 +74,21 @@ class FileDownloader(object):
|
||||
def start(self, with_progress=True, silent=False):
|
||||
label = "Downloading"
|
||||
itercontent = self._request.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
||||
f = open(self._destination, "wb")
|
||||
fp = open(self._destination, "wb")
|
||||
try:
|
||||
if not with_progress or self.get_size() == -1:
|
||||
if not silent:
|
||||
click.echo("%s..." % label)
|
||||
for chunk in itercontent:
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
fp.write(chunk)
|
||||
else:
|
||||
chunks = int(math.ceil(self.get_size() / float(io.DEFAULT_BUFFER_SIZE)))
|
||||
with click.progressbar(length=chunks, label=label) as pb:
|
||||
for _ in pb:
|
||||
f.write(next(itercontent))
|
||||
fp.write(next(itercontent))
|
||||
finally:
|
||||
f.close()
|
||||
fp.close()
|
||||
self._request.close()
|
||||
|
||||
if self.get_lmtime():
|
||||
@ -96,15 +96,40 @@ class FileDownloader(object):
|
||||
|
||||
return True
|
||||
|
||||
def verify(self, sha1=None):
|
||||
def verify(self, checksum=None):
|
||||
_dlsize = getsize(self._destination)
|
||||
if self.get_size() != -1 and _dlsize != self.get_size():
|
||||
raise FDSizeMismatch(_dlsize, self._fname, self.get_size())
|
||||
if not sha1:
|
||||
return None
|
||||
checksum = fs.calculate_file_hashsum("sha1", self._destination)
|
||||
if sha1.lower() != checksum.lower():
|
||||
raise FDSHASumMismatch(checksum, self._fname, sha1)
|
||||
raise PackageException(
|
||||
(
|
||||
"The size ({0:d} bytes) of downloaded file '{1}' "
|
||||
"is not equal to remote size ({2:d} bytes)"
|
||||
).format(_dlsize, self._fname, self.get_size())
|
||||
)
|
||||
if not checksum:
|
||||
return True
|
||||
|
||||
checksum_len = len(checksum)
|
||||
hash_algo = None
|
||||
if checksum_len == 32:
|
||||
hash_algo = "md5"
|
||||
elif checksum_len == 40:
|
||||
hash_algo = "sha1"
|
||||
elif checksum_len == 64:
|
||||
hash_algo = "sha256"
|
||||
|
||||
if not hash_algo:
|
||||
raise PackageException(
|
||||
"Could not determine checksum algorithm by %s" % checksum
|
||||
)
|
||||
|
||||
dl_checksum = fs.calculate_file_hashsum(hash_algo, self._destination)
|
||||
if checksum.lower() != dl_checksum.lower():
|
||||
raise PackageException(
|
||||
"The checksum '{0}' of the downloaded file '{1}' "
|
||||
"does not match to the remote '{2}'".format(
|
||||
dl_checksum, self._fname, checksum
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def _preserve_filemtime(self, lmdate):
|
@ -12,7 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio import util
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
|
||||
class PackageException(PlatformioException):
|
||||
@ -44,3 +45,16 @@ class ManifestValidationError(ManifestException):
|
||||
"https://docs.platformio.org/page/librarymanager/config.html"
|
||||
% self.messages
|
||||
)
|
||||
|
||||
|
||||
class MissingPackageManifestError(ManifestException):
|
||||
|
||||
MESSAGE = "Could not find one of '{0}' manifest files in the package"
|
||||
|
||||
|
||||
class UnknownPackageError(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"Could not find a package with '{0}' requirements for your system '%s'"
|
||||
% util.get_systype()
|
||||
)
|
||||
|
13
platformio/package/manager/__init__.py
Normal file
13
platformio/package/manager/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
95
platformio/package/manager/_download.py
Normal file
95
platformio/package/manager/_download.py
Normal file
@ -0,0 +1,95 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from platformio import app, compat
|
||||
from platformio.package.download import FileDownloader
|
||||
from platformio.package.lockfile import LockFile
|
||||
|
||||
|
||||
class PackageManagerDownloadMixin(object):
|
||||
|
||||
DOWNLOAD_CACHE_EXPIRE = 86400 * 30 # keep package in a local cache for 1 month
|
||||
|
||||
def compute_download_path(self, *args):
|
||||
request_hash = hashlib.new("sha256")
|
||||
for arg in args:
|
||||
request_hash.update(compat.hashlib_encode_data(arg))
|
||||
dl_path = os.path.join(self.get_download_dir(), request_hash.hexdigest())
|
||||
return dl_path
|
||||
|
||||
def get_download_usagedb_path(self):
|
||||
return os.path.join(self.get_download_dir(), "usage.db")
|
||||
|
||||
def set_download_utime(self, path, utime=None):
|
||||
with app.State(self.get_download_usagedb_path(), lock=True) as state:
|
||||
state[os.path.basename(path)] = int(time.time() if not utime else utime)
|
||||
|
||||
def cleanup_expired_downloads(self):
|
||||
with app.State(self.get_download_usagedb_path(), lock=True) as state:
|
||||
# remove outdated
|
||||
for fname in list(state.keys()):
|
||||
if state[fname] > (time.time() - self.DOWNLOAD_CACHE_EXPIRE):
|
||||
continue
|
||||
del state[fname]
|
||||
dl_path = os.path.join(self.get_download_dir(), fname)
|
||||
if os.path.isfile(dl_path):
|
||||
os.remove(dl_path)
|
||||
|
||||
def download(self, url, checksum=None, silent=False):
|
||||
dl_path = self.compute_download_path(url, checksum or "")
|
||||
if os.path.isfile(dl_path):
|
||||
self.set_download_utime(dl_path)
|
||||
return dl_path
|
||||
|
||||
with_progress = not silent and not app.is_disabled_progressbar()
|
||||
tmp_fd, tmp_path = tempfile.mkstemp(dir=self.get_download_dir())
|
||||
try:
|
||||
with LockFile(dl_path):
|
||||
try:
|
||||
fd = FileDownloader(url)
|
||||
fd.set_destination(tmp_path)
|
||||
fd.start(with_progress=with_progress, silent=silent)
|
||||
except IOError as e:
|
||||
raise_error = not with_progress
|
||||
if with_progress:
|
||||
try:
|
||||
fd = FileDownloader(url)
|
||||
fd.set_destination(tmp_path)
|
||||
fd.start(with_progress=False, silent=silent)
|
||||
except IOError:
|
||||
raise_error = True
|
||||
if raise_error:
|
||||
self.print_message(
|
||||
"Error: Please read http://bit.ly/package-manager-ioerror",
|
||||
fg="red",
|
||||
err=True,
|
||||
)
|
||||
raise e
|
||||
if checksum:
|
||||
fd.verify(checksum)
|
||||
os.close(tmp_fd)
|
||||
os.rename(tmp_path, dl_path)
|
||||
finally:
|
||||
if os.path.isfile(tmp_path):
|
||||
os.close(tmp_fd)
|
||||
os.remove(tmp_path)
|
||||
|
||||
assert os.path.isfile(dl_path)
|
||||
self.set_download_utime(dl_path)
|
||||
return dl_path
|
256
platformio/package/manager/_install.py
Normal file
256
platformio/package/manager/_install.py
Normal file
@ -0,0 +1,256 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, compat, fs, util
|
||||
from platformio.package.exception import PackageException
|
||||
from platformio.package.meta import PackageSourceItem, PackageSpec
|
||||
from platformio.package.unpack import FileUnpacker
|
||||
from platformio.package.vcsclient import VCSClientFactory
|
||||
|
||||
|
||||
class PackageManagerInstallMixin(object):
|
||||
|
||||
_INSTALL_HISTORY = None # avoid circle dependencies
|
||||
|
||||
@staticmethod
|
||||
def unpack(src, dst):
|
||||
with_progress = not app.is_disabled_progressbar()
|
||||
try:
|
||||
with FileUnpacker(src) as fu:
|
||||
return fu.unpack(dst, with_progress=with_progress)
|
||||
except IOError as e:
|
||||
if not with_progress:
|
||||
raise e
|
||||
with FileUnpacker(src) as fu:
|
||||
return fu.unpack(dst, with_progress=False)
|
||||
|
||||
def install(self, spec, silent=False, force=False):
|
||||
try:
|
||||
self.lock()
|
||||
pkg = self._install(spec, silent=silent, force=force)
|
||||
self.memcache_reset()
|
||||
self.cleanup_expired_downloads()
|
||||
return pkg
|
||||
finally:
|
||||
self.unlock()
|
||||
|
||||
def _install(self, spec, search_filters=None, silent=False, force=False):
|
||||
spec = self.ensure_spec(spec)
|
||||
|
||||
# avoid circle dependencies
|
||||
if not self._INSTALL_HISTORY:
|
||||
self._INSTALL_HISTORY = {}
|
||||
if spec in self._INSTALL_HISTORY:
|
||||
return self._INSTALL_HISTORY[spec]
|
||||
|
||||
# check if package is already installed
|
||||
pkg = self.get_package(spec)
|
||||
|
||||
# if a forced installation
|
||||
if pkg and force:
|
||||
self.uninstall(pkg, silent=silent)
|
||||
pkg = None
|
||||
|
||||
if pkg:
|
||||
if not silent:
|
||||
self.print_message(
|
||||
"{name} @ {version} is already installed".format(
|
||||
**pkg.metadata.as_dict()
|
||||
),
|
||||
fg="yellow",
|
||||
)
|
||||
return pkg
|
||||
|
||||
if not silent:
|
||||
self.print_message(
|
||||
"Installing %s" % click.style(spec.humanize(), fg="cyan")
|
||||
)
|
||||
|
||||
if spec.external:
|
||||
pkg = self.install_from_url(spec.url, spec, silent=silent)
|
||||
else:
|
||||
pkg = self.install_from_registry(spec, search_filters, silent=silent)
|
||||
|
||||
if not pkg or not pkg.metadata:
|
||||
raise PackageException(
|
||||
"Could not install package '%s' for '%s' system"
|
||||
% (spec.humanize(), util.get_systype())
|
||||
)
|
||||
|
||||
if not silent:
|
||||
self.print_message(
|
||||
"{name} @ {version} has been successfully installed!".format(
|
||||
**pkg.metadata.as_dict()
|
||||
),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
self.memcache_reset()
|
||||
self._install_dependencies(pkg, silent)
|
||||
self._INSTALL_HISTORY[spec] = pkg
|
||||
return pkg
|
||||
|
||||
def _install_dependencies(self, pkg, silent=False):
|
||||
assert isinstance(pkg, PackageSourceItem)
|
||||
manifest = self.load_manifest(pkg)
|
||||
if not manifest.get("dependencies"):
|
||||
return
|
||||
if not silent:
|
||||
self.print_message("Installing dependencies...")
|
||||
for dependency in manifest.get("dependencies"):
|
||||
if not self._install_dependency(dependency, silent) and not silent:
|
||||
self.print_message(
|
||||
"Warning! Could not install dependency %s for package '%s'"
|
||||
% (dependency, pkg.metadata.name),
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
def _install_dependency(self, dependency, silent=False):
|
||||
spec = PackageSpec(
|
||||
name=dependency.get("name"), requirements=dependency.get("version")
|
||||
)
|
||||
search_filters = {
|
||||
key: value
|
||||
for key, value in dependency.items()
|
||||
if key in ("authors", "platforms", "frameworks")
|
||||
}
|
||||
return self._install(spec, search_filters=search_filters or None, silent=silent)
|
||||
|
||||
def install_from_url(self, url, spec, checksum=None, silent=False):
|
||||
spec = self.ensure_spec(spec)
|
||||
tmp_dir = tempfile.mkdtemp(prefix="pkg-installing-", dir=self.get_tmp_dir())
|
||||
vcs = None
|
||||
try:
|
||||
if url.startswith("file://"):
|
||||
_url = url[7:]
|
||||
if os.path.isfile(_url):
|
||||
self.unpack(_url, tmp_dir)
|
||||
else:
|
||||
fs.rmtree(tmp_dir)
|
||||
shutil.copytree(_url, tmp_dir, symlinks=True)
|
||||
elif url.startswith(("http://", "https://")):
|
||||
dl_path = self.download(url, checksum, silent=silent)
|
||||
assert os.path.isfile(dl_path)
|
||||
self.unpack(dl_path, tmp_dir)
|
||||
else:
|
||||
vcs = VCSClientFactory.new(tmp_dir, url)
|
||||
assert vcs.export()
|
||||
|
||||
root_dir = self.find_pkg_root(tmp_dir, spec)
|
||||
pkg_item = PackageSourceItem(
|
||||
root_dir,
|
||||
self.build_metadata(
|
||||
root_dir, spec, vcs.get_current_revision() if vcs else None
|
||||
),
|
||||
)
|
||||
pkg_item.dump_meta()
|
||||
return self._install_tmp_pkg(pkg_item)
|
||||
finally:
|
||||
if os.path.isdir(tmp_dir):
|
||||
fs.rmtree(tmp_dir)
|
||||
|
||||
def _install_tmp_pkg(self, tmp_pkg):
|
||||
assert isinstance(tmp_pkg, PackageSourceItem)
|
||||
# validate package version and declared requirements
|
||||
if (
|
||||
tmp_pkg.metadata.spec.requirements
|
||||
and tmp_pkg.metadata.version not in tmp_pkg.metadata.spec.requirements
|
||||
):
|
||||
raise PackageException(
|
||||
"Package version %s doesn't satisfy requirements %s based on %s"
|
||||
% (
|
||||
tmp_pkg.metadata.version,
|
||||
tmp_pkg.metadata.spec.requirements,
|
||||
tmp_pkg.metadata,
|
||||
)
|
||||
)
|
||||
dst_pkg = PackageSourceItem(
|
||||
os.path.join(self.package_dir, tmp_pkg.get_safe_dirname())
|
||||
)
|
||||
|
||||
# what to do with existing package?
|
||||
action = "overwrite"
|
||||
if tmp_pkg.metadata.spec.has_custom_name():
|
||||
action = "overwrite"
|
||||
dst_pkg = PackageSourceItem(
|
||||
os.path.join(self.package_dir, tmp_pkg.metadata.spec.name)
|
||||
)
|
||||
elif dst_pkg.metadata and dst_pkg.metadata.spec.external:
|
||||
if dst_pkg.metadata.spec.url != tmp_pkg.metadata.spec.url:
|
||||
action = "detach-existing"
|
||||
elif tmp_pkg.metadata.spec.external:
|
||||
action = "detach-new"
|
||||
elif dst_pkg.metadata and (
|
||||
dst_pkg.metadata.version != tmp_pkg.metadata.version
|
||||
or dst_pkg.metadata.spec.owner != tmp_pkg.metadata.spec.owner
|
||||
):
|
||||
action = (
|
||||
"detach-existing"
|
||||
if tmp_pkg.metadata.version > dst_pkg.metadata.version
|
||||
else "detach-new"
|
||||
)
|
||||
|
||||
def _cleanup_dir(path):
|
||||
if os.path.isdir(path):
|
||||
fs.rmtree(path)
|
||||
|
||||
if action == "detach-existing":
|
||||
target_dirname = "%s@%s" % (
|
||||
tmp_pkg.get_safe_dirname(),
|
||||
dst_pkg.metadata.version,
|
||||
)
|
||||
if dst_pkg.metadata.spec.url:
|
||||
target_dirname = "%s@src-%s" % (
|
||||
tmp_pkg.get_safe_dirname(),
|
||||
hashlib.md5(
|
||||
compat.hashlib_encode_data(dst_pkg.metadata.spec.url)
|
||||
).hexdigest(),
|
||||
)
|
||||
# move existing into the new place
|
||||
pkg_dir = os.path.join(self.package_dir, target_dirname)
|
||||
_cleanup_dir(pkg_dir)
|
||||
shutil.move(dst_pkg.path, pkg_dir)
|
||||
# move new source to the destination location
|
||||
_cleanup_dir(dst_pkg.path)
|
||||
shutil.move(tmp_pkg.path, dst_pkg.path)
|
||||
return PackageSourceItem(dst_pkg.path)
|
||||
|
||||
if action == "detach-new":
|
||||
target_dirname = "%s@%s" % (
|
||||
tmp_pkg.get_safe_dirname(),
|
||||
tmp_pkg.metadata.version,
|
||||
)
|
||||
if tmp_pkg.metadata.spec.external:
|
||||
target_dirname = "%s@src-%s" % (
|
||||
tmp_pkg.get_safe_dirname(),
|
||||
hashlib.md5(
|
||||
compat.hashlib_encode_data(tmp_pkg.metadata.spec.url)
|
||||
).hexdigest(),
|
||||
)
|
||||
pkg_dir = os.path.join(self.package_dir, target_dirname)
|
||||
_cleanup_dir(pkg_dir)
|
||||
shutil.move(tmp_pkg.path, pkg_dir)
|
||||
return PackageSourceItem(pkg_dir)
|
||||
|
||||
# otherwise, overwrite existing
|
||||
_cleanup_dir(dst_pkg.path)
|
||||
shutil.move(tmp_pkg.path, dst_pkg.path)
|
||||
return PackageSourceItem(dst_pkg.path)
|
57
platformio/package/manager/_legacy.py
Normal file
57
platformio/package/manager/_legacy.py
Normal file
@ -0,0 +1,57 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio import fs
|
||||
from platformio.package.meta import PackageSourceItem, PackageSpec
|
||||
|
||||
|
||||
class PackageManagerLegacyMixin(object):
|
||||
def build_legacy_spec(self, pkg_dir):
|
||||
# find src manifest
|
||||
src_manifest_name = ".piopkgmanager.json"
|
||||
src_manifest_path = None
|
||||
for name in os.listdir(pkg_dir):
|
||||
if not os.path.isfile(os.path.join(pkg_dir, name, src_manifest_name)):
|
||||
continue
|
||||
src_manifest_path = os.path.join(pkg_dir, name, src_manifest_name)
|
||||
break
|
||||
|
||||
if src_manifest_path:
|
||||
src_manifest = fs.load_json(src_manifest_path)
|
||||
return PackageSpec(
|
||||
name=src_manifest.get("name"),
|
||||
url=src_manifest.get("url"),
|
||||
requirements=src_manifest.get("requirements"),
|
||||
)
|
||||
|
||||
# fall back to a package manifest
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
return PackageSpec(name=manifest.get("name"))
|
||||
|
||||
def legacy_load_manifest(self, pkg):
|
||||
assert isinstance(pkg, PackageSourceItem)
|
||||
manifest = self.load_manifest(pkg)
|
||||
manifest["__pkg_dir"] = pkg.path
|
||||
for key in ("name", "version"):
|
||||
if not manifest.get(key):
|
||||
manifest[key] = str(getattr(pkg.metadata, key))
|
||||
if pkg.metadata and pkg.metadata.spec and pkg.metadata.spec.external:
|
||||
manifest["__src_url"] = pkg.metadata.spec.url
|
||||
manifest["version"] = str(pkg.metadata.version)
|
||||
return manifest
|
||||
|
||||
def legacy_get_installed(self):
|
||||
return [self.legacy_load_manifest(pkg) for pkg in self.get_installed()]
|
228
platformio/package/manager/_registry.py
Normal file
228
platformio/package/manager/_registry.py
Normal file
@ -0,0 +1,228 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import time
|
||||
|
||||
import click
|
||||
|
||||
from platformio.clients.http import HTTPClient
|
||||
from platformio.clients.registry import RegistryClient
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.meta import PackageMetaData, PackageSpec
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
|
||||
class RegistryFileMirrorsIterator(object):
|
||||
|
||||
HTTP_CLIENT_INSTANCES = {}
|
||||
|
||||
def __init__(self, download_url):
|
||||
self.download_url = download_url
|
||||
self._url_parts = urlparse(download_url)
|
||||
self._base_url = "%s://%s" % (self._url_parts.scheme, self._url_parts.netloc)
|
||||
self._visited_mirrors = []
|
||||
|
||||
def __iter__(self): # pylint: disable=non-iterator-returned
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
http = self.get_http_client()
|
||||
response = http.send_request(
|
||||
"head",
|
||||
self._url_parts.path,
|
||||
allow_redirects=False,
|
||||
params=dict(bypass=",".join(self._visited_mirrors))
|
||||
if self._visited_mirrors
|
||||
else None,
|
||||
)
|
||||
stop_conditions = [
|
||||
response.status_code not in (302, 307),
|
||||
not response.headers.get("Location"),
|
||||
not response.headers.get("X-PIO-Mirror"),
|
||||
response.headers.get("X-PIO-Mirror") in self._visited_mirrors,
|
||||
]
|
||||
if any(stop_conditions):
|
||||
raise StopIteration
|
||||
self._visited_mirrors.append(response.headers.get("X-PIO-Mirror"))
|
||||
return (
|
||||
response.headers.get("Location"),
|
||||
response.headers.get("X-PIO-Content-SHA256"),
|
||||
)
|
||||
|
||||
def next(self):
|
||||
""" For Python 2 compatibility """
|
||||
return self.__next__()
|
||||
|
||||
def get_http_client(self):
|
||||
if self._base_url not in RegistryFileMirrorsIterator.HTTP_CLIENT_INSTANCES:
|
||||
RegistryFileMirrorsIterator.HTTP_CLIENT_INSTANCES[
|
||||
self._base_url
|
||||
] = HTTPClient(self._base_url)
|
||||
return RegistryFileMirrorsIterator.HTTP_CLIENT_INSTANCES[self._base_url]
|
||||
|
||||
|
||||
class PackageManageRegistryMixin(object):
|
||||
def install_from_registry(self, spec, search_filters=None, silent=False):
|
||||
if spec.owner and spec.name and not search_filters:
|
||||
package = self.fetch_registry_package(spec)
|
||||
if not package:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
version = self.pick_best_registry_version(package["versions"], spec)
|
||||
else:
|
||||
packages = self.search_registry_packages(spec, search_filters)
|
||||
if not packages:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
if len(packages) > 1 and not silent:
|
||||
self.print_multi_package_issue(packages, spec)
|
||||
package, version = self.find_best_registry_version(packages, spec)
|
||||
|
||||
pkgfile = self._pick_compatible_pkg_file(version["files"]) if version else None
|
||||
if not pkgfile:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
|
||||
for url, checksum in RegistryFileMirrorsIterator(pkgfile["download_url"]):
|
||||
try:
|
||||
return self.install_from_url(
|
||||
url,
|
||||
PackageSpec(
|
||||
owner=package["owner"]["username"],
|
||||
id=package["id"],
|
||||
name=package["name"],
|
||||
),
|
||||
checksum or pkgfile["checksum"]["sha256"],
|
||||
silent=silent,
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
self.print_message("Warning! Package Mirror: %s" % e, fg="yellow")
|
||||
self.print_message("Looking for another mirror...", fg="yellow")
|
||||
|
||||
return None
|
||||
|
||||
def get_registry_client_instance(self):
|
||||
if not self._registry_client:
|
||||
self._registry_client = RegistryClient()
|
||||
return self._registry_client
|
||||
|
||||
def search_registry_packages(self, spec, filters=None):
|
||||
filters = filters or {}
|
||||
if spec.id:
|
||||
filters["ids"] = str(spec.id)
|
||||
else:
|
||||
filters["types"] = self.pkg_type
|
||||
filters["names"] = spec.name.lower()
|
||||
if spec.owner:
|
||||
filters["owners"] = spec.owner.lower()
|
||||
return self.get_registry_client_instance().list_packages(filters=filters)[
|
||||
"items"
|
||||
]
|
||||
|
||||
def fetch_registry_package(self, spec):
|
||||
result = None
|
||||
if spec.owner and spec.name:
|
||||
result = self.get_registry_client_instance().get_package(
|
||||
self.pkg_type, spec.owner, spec.name
|
||||
)
|
||||
if not result and (spec.id or (spec.name and not spec.owner)):
|
||||
packages = self.search_registry_packages(spec)
|
||||
if packages:
|
||||
result = self.get_registry_client_instance().get_package(
|
||||
self.pkg_type, packages[0]["owner"]["username"], packages[0]["name"]
|
||||
)
|
||||
if not result:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
return result
|
||||
|
||||
def reveal_registry_package_id(self, spec, silent=False):
|
||||
spec = self.ensure_spec(spec)
|
||||
if spec.id:
|
||||
return spec.id
|
||||
packages = self.search_registry_packages(spec)
|
||||
if not packages:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
if len(packages) > 1 and not silent:
|
||||
self.print_multi_package_issue(packages, spec)
|
||||
click.echo("")
|
||||
return packages[0]["id"]
|
||||
|
||||
def print_multi_package_issue(self, packages, spec):
|
||||
self.print_message(
|
||||
"Warning! More than one package has been found by ", fg="yellow", nl=False
|
||||
)
|
||||
click.secho(spec.humanize(), fg="cyan", nl=False)
|
||||
click.secho(" requirements:", fg="yellow")
|
||||
for item in packages:
|
||||
click.echo(
|
||||
" - {owner}/{name} @ {version}".format(
|
||||
owner=click.style(item["owner"]["username"], fg="cyan"),
|
||||
name=item["name"],
|
||||
version=item["version"]["name"],
|
||||
)
|
||||
)
|
||||
self.print_message(
|
||||
"Please specify detailed REQUIREMENTS using package owner and version "
|
||||
"(showed above) to avoid name conflicts",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
def find_best_registry_version(self, packages, spec):
|
||||
# find compatible version within the latest package versions
|
||||
for package in packages:
|
||||
version = self.pick_best_registry_version([package["version"]], spec)
|
||||
if version:
|
||||
return (package, version)
|
||||
|
||||
if not spec.requirements:
|
||||
return None
|
||||
|
||||
# if the custom version requirements, check ALL package versions
|
||||
for package in packages:
|
||||
version = self.pick_best_registry_version(
|
||||
self.fetch_registry_package(
|
||||
PackageSpec(
|
||||
id=package["id"],
|
||||
owner=package["owner"]["username"],
|
||||
name=package["name"],
|
||||
)
|
||||
).get("versions"),
|
||||
spec,
|
||||
)
|
||||
if version:
|
||||
return (package, version)
|
||||
time.sleep(1)
|
||||
return None
|
||||
|
||||
def pick_best_registry_version(self, versions, spec=None):
|
||||
assert not spec or isinstance(spec, PackageSpec)
|
||||
best = None
|
||||
for version in versions:
|
||||
semver = PackageMetaData.to_semver(version["name"])
|
||||
if spec and spec.requirements and semver not in spec.requirements:
|
||||
continue
|
||||
if not any(
|
||||
self.is_system_compatible(f.get("system")) for f in version["files"]
|
||||
):
|
||||
continue
|
||||
if not best or (semver > PackageMetaData.to_semver(best["name"])):
|
||||
best = version
|
||||
return best
|
||||
|
||||
def _pick_compatible_pkg_file(self, version_files):
|
||||
for item in version_files:
|
||||
if self.is_system_compatible(item.get("system")):
|
||||
return item
|
||||
return None
|
90
platformio/package/manager/_uninstall.py
Normal file
90
platformio/package/manager/_uninstall.py
Normal file
@ -0,0 +1,90 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.meta import PackageSourceItem, PackageSpec
|
||||
|
||||
|
||||
class PackageManagerUninstallMixin(object):
|
||||
def uninstall(self, spec, silent=False, skip_dependencies=False):
|
||||
try:
|
||||
self.lock()
|
||||
return self._uninstall(spec, silent, skip_dependencies)
|
||||
finally:
|
||||
self.unlock()
|
||||
|
||||
def _uninstall(self, spec, silent=False, skip_dependencies=False):
|
||||
pkg = self.get_package(spec)
|
||||
if not pkg or not pkg.metadata:
|
||||
raise UnknownPackageError(spec)
|
||||
|
||||
if not silent:
|
||||
self.print_message(
|
||||
"Removing %s @ %s: \t"
|
||||
% (click.style(pkg.metadata.name, fg="cyan"), pkg.metadata.version),
|
||||
nl=False,
|
||||
)
|
||||
|
||||
# firstly, remove dependencies
|
||||
if not skip_dependencies:
|
||||
self._uninstall_dependencies(pkg, silent)
|
||||
|
||||
if os.path.islink(pkg.path):
|
||||
os.unlink(pkg.path)
|
||||
else:
|
||||
fs.rmtree(pkg.path)
|
||||
self.memcache_reset()
|
||||
|
||||
# unfix detached-package with the same name
|
||||
detached_pkg = self.get_package(PackageSpec(name=pkg.metadata.name))
|
||||
if (
|
||||
detached_pkg
|
||||
and "@" in detached_pkg.path
|
||||
and not os.path.isdir(
|
||||
os.path.join(self.package_dir, detached_pkg.get_safe_dirname())
|
||||
)
|
||||
):
|
||||
shutil.move(
|
||||
detached_pkg.path,
|
||||
os.path.join(self.package_dir, detached_pkg.get_safe_dirname()),
|
||||
)
|
||||
self.memcache_reset()
|
||||
|
||||
if not silent:
|
||||
click.echo("[%s]" % click.style("OK", fg="green"))
|
||||
|
||||
return pkg
|
||||
|
||||
def _uninstall_dependencies(self, pkg, silent=False):
|
||||
assert isinstance(pkg, PackageSourceItem)
|
||||
manifest = self.load_manifest(pkg)
|
||||
if not manifest.get("dependencies"):
|
||||
return
|
||||
if not silent:
|
||||
self.print_message("Removing dependencies...", fg="yellow")
|
||||
for dependency in manifest.get("dependencies"):
|
||||
pkg = self.get_package(
|
||||
PackageSpec(
|
||||
name=dependency.get("name"), requirements=dependency.get("version")
|
||||
)
|
||||
)
|
||||
if not pkg:
|
||||
continue
|
||||
self._uninstall(pkg, silent=silent)
|
166
platformio/package/manager/_update.py
Normal file
166
platformio/package/manager/_update.py
Normal file
@ -0,0 +1,166 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio import util
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.meta import (
|
||||
PackageOutdatedResult,
|
||||
PackageSourceItem,
|
||||
PackageSpec,
|
||||
)
|
||||
from platformio.package.vcsclient import VCSBaseException, VCSClientFactory
|
||||
|
||||
|
||||
class PackageManagerUpdateMixin(object):
|
||||
def outdated(self, pkg, spec=None):
|
||||
assert isinstance(pkg, PackageSourceItem)
|
||||
assert not spec or isinstance(spec, PackageSpec)
|
||||
assert os.path.isdir(pkg.path) and pkg.metadata
|
||||
|
||||
# skip detached package to a specific version
|
||||
detached_conditions = [
|
||||
"@" in pkg.path,
|
||||
pkg.metadata.spec and not pkg.metadata.spec.external,
|
||||
not spec,
|
||||
]
|
||||
if all(detached_conditions):
|
||||
return PackageOutdatedResult(current=pkg.metadata.version, detached=True)
|
||||
|
||||
latest = None
|
||||
wanted = None
|
||||
if pkg.metadata.spec.external:
|
||||
latest = self._fetch_vcs_latest_version(pkg)
|
||||
else:
|
||||
try:
|
||||
reg_pkg = self.fetch_registry_package(pkg.metadata.spec)
|
||||
latest = (
|
||||
self.pick_best_registry_version(reg_pkg["versions"]) or {}
|
||||
).get("name")
|
||||
if spec:
|
||||
wanted = (
|
||||
self.pick_best_registry_version(reg_pkg["versions"], spec) or {}
|
||||
).get("name")
|
||||
if not wanted: # wrong library
|
||||
latest = None
|
||||
except UnknownPackageError:
|
||||
pass
|
||||
|
||||
return PackageOutdatedResult(
|
||||
current=pkg.metadata.version, latest=latest, wanted=wanted
|
||||
)
|
||||
|
||||
def _fetch_vcs_latest_version(self, pkg):
|
||||
vcs = None
|
||||
try:
|
||||
vcs = VCSClientFactory.new(pkg.path, pkg.metadata.spec.url, silent=True)
|
||||
except VCSBaseException:
|
||||
return None
|
||||
if not vcs.can_be_updated:
|
||||
return None
|
||||
return str(
|
||||
self.build_metadata(
|
||||
pkg.path, pkg.metadata.spec, vcs_revision=vcs.get_latest_revision()
|
||||
).version
|
||||
)
|
||||
|
||||
def update(self, from_spec, to_spec=None, only_check=False, silent=False):
|
||||
pkg = self.get_package(from_spec)
|
||||
if not pkg or not pkg.metadata:
|
||||
raise UnknownPackageError(from_spec)
|
||||
|
||||
if not silent:
|
||||
click.echo(
|
||||
"{} {:<45} {:<30}".format(
|
||||
"Checking" if only_check else "Updating",
|
||||
click.style(pkg.metadata.spec.humanize(), fg="cyan"),
|
||||
"%s (%s)" % (pkg.metadata.version, to_spec.requirements)
|
||||
if to_spec and to_spec.requirements
|
||||
else str(pkg.metadata.version),
|
||||
),
|
||||
nl=False,
|
||||
)
|
||||
if not util.internet_on():
|
||||
if not silent:
|
||||
click.echo("[%s]" % (click.style("Off-line", fg="yellow")))
|
||||
return pkg
|
||||
|
||||
outdated = self.outdated(pkg, to_spec)
|
||||
if not silent:
|
||||
self.print_outdated_state(outdated)
|
||||
|
||||
up_to_date = any(
|
||||
[
|
||||
outdated.detached,
|
||||
not outdated.latest,
|
||||
outdated.latest and outdated.current == outdated.latest,
|
||||
outdated.wanted and outdated.current == outdated.wanted,
|
||||
]
|
||||
)
|
||||
if only_check or up_to_date:
|
||||
return pkg
|
||||
|
||||
try:
|
||||
self.lock()
|
||||
return self._update(pkg, outdated, silent=silent)
|
||||
finally:
|
||||
self.unlock()
|
||||
|
||||
@staticmethod
|
||||
def print_outdated_state(outdated):
|
||||
if outdated.detached:
|
||||
return click.echo("[%s]" % (click.style("Detached", fg="yellow")))
|
||||
if not outdated.latest or outdated.current == outdated.latest:
|
||||
return click.echo("[%s]" % (click.style("Up-to-date", fg="green")))
|
||||
if outdated.wanted and outdated.current == outdated.wanted:
|
||||
return click.echo(
|
||||
"[%s]"
|
||||
% (click.style("Incompatible (%s)" % outdated.latest, fg="yellow"))
|
||||
)
|
||||
return click.echo(
|
||||
"[%s]" % (click.style(str(outdated.wanted or outdated.latest), fg="red"))
|
||||
)
|
||||
|
||||
def _update(self, pkg, outdated, silent=False):
|
||||
if pkg.metadata.spec.external:
|
||||
vcs = VCSClientFactory.new(pkg.path, pkg.metadata.spec.url)
|
||||
assert vcs.update()
|
||||
pkg.metadata.version = self._fetch_vcs_latest_version(pkg)
|
||||
pkg.dump_meta()
|
||||
return pkg
|
||||
|
||||
new_pkg = self.install(
|
||||
PackageSpec(
|
||||
id=pkg.metadata.spec.id,
|
||||
owner=pkg.metadata.spec.owner,
|
||||
name=pkg.metadata.spec.name,
|
||||
requirements=outdated.wanted or outdated.latest,
|
||||
),
|
||||
silent=silent,
|
||||
)
|
||||
if new_pkg:
|
||||
old_pkg = self.get_package(
|
||||
PackageSpec(
|
||||
id=pkg.metadata.spec.id,
|
||||
owner=pkg.metadata.spec.owner,
|
||||
name=pkg.metadata.name,
|
||||
requirements=pkg.metadata.version,
|
||||
)
|
||||
)
|
||||
if old_pkg:
|
||||
self.uninstall(old_pkg, silent=silent, skip_dependencies=True)
|
||||
return new_pkg
|
251
platformio/package/manager/base.py
Normal file
251
platformio/package/manager/base.py
Normal file
@ -0,0 +1,251 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
import semantic_version
|
||||
|
||||
from platformio import util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.compat import ci_strings_are_equal
|
||||
from platformio.package.exception import ManifestException, MissingPackageManifestError
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.package.manager._download import PackageManagerDownloadMixin
|
||||
from platformio.package.manager._install import PackageManagerInstallMixin
|
||||
from platformio.package.manager._legacy import PackageManagerLegacyMixin
|
||||
from platformio.package.manager._registry import PackageManageRegistryMixin
|
||||
from platformio.package.manager._uninstall import PackageManagerUninstallMixin
|
||||
from platformio.package.manager._update import PackageManagerUpdateMixin
|
||||
from platformio.package.manifest.parser import ManifestParserFactory
|
||||
from platformio.package.meta import (
|
||||
PackageMetaData,
|
||||
PackageSourceItem,
|
||||
PackageSpec,
|
||||
PackageType,
|
||||
)
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class BasePackageManager( # pylint: disable=too-many-public-methods
|
||||
PackageManagerDownloadMixin,
|
||||
PackageManageRegistryMixin,
|
||||
PackageManagerInstallMixin,
|
||||
PackageManagerUninstallMixin,
|
||||
PackageManagerUpdateMixin,
|
||||
PackageManagerLegacyMixin,
|
||||
):
|
||||
_MEMORY_CACHE = {}
|
||||
|
||||
def __init__(self, pkg_type, package_dir):
|
||||
self.pkg_type = pkg_type
|
||||
self.package_dir = self.ensure_dir_exists(package_dir)
|
||||
self._MEMORY_CACHE = {}
|
||||
|
||||
self._lockfile = None
|
||||
self._download_dir = None
|
||||
self._tmp_dir = None
|
||||
self._registry_client = None
|
||||
|
||||
def lock(self):
|
||||
if self._lockfile:
|
||||
return
|
||||
self._lockfile = LockFile(self.package_dir)
|
||||
self._lockfile.acquire()
|
||||
|
||||
def unlock(self):
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
self._lockfile.release()
|
||||
self._lockfile = None
|
||||
|
||||
def __del__(self):
|
||||
self.unlock()
|
||||
|
||||
def memcache_get(self, key, default=None):
|
||||
return self._MEMORY_CACHE.get(key, default)
|
||||
|
||||
def memcache_set(self, key, value):
|
||||
self._MEMORY_CACHE[key] = value
|
||||
|
||||
def memcache_reset(self):
|
||||
self._MEMORY_CACHE.clear()
|
||||
|
||||
@staticmethod
|
||||
def is_system_compatible(value):
|
||||
if not value or "*" in value:
|
||||
return True
|
||||
return util.items_in_list(value, util.get_systype())
|
||||
|
||||
@staticmethod
|
||||
def ensure_dir_exists(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
assert os.path.isdir(path)
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def ensure_spec(spec):
|
||||
return spec if isinstance(spec, PackageSpec) else PackageSpec(spec)
|
||||
|
||||
@property
|
||||
def manifest_names(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def print_message(self, message, **kwargs):
|
||||
click.echo(
|
||||
"%s: " % str(self.__class__.__name__).replace("Package", " "), nl=False
|
||||
)
|
||||
click.secho(message, **kwargs)
|
||||
|
||||
def get_download_dir(self):
|
||||
if not self._download_dir:
|
||||
self._download_dir = self.ensure_dir_exists(
|
||||
os.path.join(get_project_cache_dir(), "downloads")
|
||||
)
|
||||
return self._download_dir
|
||||
|
||||
def get_tmp_dir(self):
|
||||
if not self._tmp_dir:
|
||||
self._tmp_dir = self.ensure_dir_exists(
|
||||
os.path.join(get_project_cache_dir(), "tmp")
|
||||
)
|
||||
return self._tmp_dir
|
||||
|
||||
def find_pkg_root(self, path, spec): # pylint: disable=unused-argument
|
||||
if self.manifest_exists(path):
|
||||
return path
|
||||
for root, _, _ in os.walk(path):
|
||||
if self.manifest_exists(root):
|
||||
return root
|
||||
raise MissingPackageManifestError(", ".join(self.manifest_names))
|
||||
|
||||
def get_manifest_path(self, pkg_dir):
|
||||
if not os.path.isdir(pkg_dir):
|
||||
return None
|
||||
for name in self.manifest_names:
|
||||
manifest_path = os.path.join(pkg_dir, name)
|
||||
if os.path.isfile(manifest_path):
|
||||
return manifest_path
|
||||
return None
|
||||
|
||||
def manifest_exists(self, pkg_dir):
|
||||
return self.get_manifest_path(pkg_dir)
|
||||
|
||||
def load_manifest(self, src):
|
||||
path = src.path if isinstance(src, PackageSourceItem) else src
|
||||
cache_key = "load_manifest-%s" % path
|
||||
result = self.memcache_get(cache_key)
|
||||
if result:
|
||||
return result
|
||||
candidates = (
|
||||
[os.path.join(path, name) for name in self.manifest_names]
|
||||
if os.path.isdir(path)
|
||||
else [path]
|
||||
)
|
||||
for item in candidates:
|
||||
if not os.path.isfile(item):
|
||||
continue
|
||||
try:
|
||||
result = ManifestParserFactory.new_from_file(item).as_dict()
|
||||
self.memcache_set(cache_key, result)
|
||||
return result
|
||||
except ManifestException as e:
|
||||
if not PlatformioCLI.in_silence():
|
||||
self.print_message(str(e), fg="yellow")
|
||||
raise MissingPackageManifestError(", ".join(self.manifest_names))
|
||||
|
||||
@staticmethod
|
||||
def generate_rand_version():
|
||||
return datetime.now().strftime("0.0.0+%Y%m%d%H%M%S")
|
||||
|
||||
def build_metadata(self, pkg_dir, spec, vcs_revision=None):
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
metadata = PackageMetaData(
|
||||
type=self.pkg_type,
|
||||
name=manifest.get("name"),
|
||||
version=manifest.get("version"),
|
||||
spec=spec,
|
||||
)
|
||||
if not metadata.name or spec.has_custom_name():
|
||||
metadata.name = spec.name
|
||||
if vcs_revision:
|
||||
metadata.version = "%s+sha.%s" % (
|
||||
metadata.version if metadata.version else "0.0.0",
|
||||
vcs_revision,
|
||||
)
|
||||
if not metadata.version:
|
||||
metadata.version = self.generate_rand_version()
|
||||
return metadata
|
||||
|
||||
def get_installed(self):
|
||||
result = []
|
||||
for name in os.listdir(self.package_dir):
|
||||
pkg_dir = os.path.join(self.package_dir, name)
|
||||
if not os.path.isdir(pkg_dir):
|
||||
continue
|
||||
pkg = PackageSourceItem(pkg_dir)
|
||||
if not pkg.metadata:
|
||||
try:
|
||||
spec = self.build_legacy_spec(pkg_dir)
|
||||
pkg.metadata = self.build_metadata(pkg_dir, spec)
|
||||
except MissingPackageManifestError:
|
||||
pass
|
||||
if pkg.metadata:
|
||||
result.append(pkg)
|
||||
return result
|
||||
|
||||
def get_package(self, spec):
|
||||
if isinstance(spec, PackageSourceItem):
|
||||
return spec
|
||||
spec = self.ensure_spec(spec)
|
||||
best = None
|
||||
for pkg in self.get_installed():
|
||||
if not self._test_pkg_with_spec(pkg, spec):
|
||||
continue
|
||||
assert isinstance(pkg.metadata.version, semantic_version.Version)
|
||||
if spec.requirements and pkg.metadata.version not in spec.requirements:
|
||||
continue
|
||||
if not best or (pkg.metadata.version > best.metadata.version):
|
||||
best = pkg
|
||||
return best
|
||||
|
||||
def _test_pkg_with_spec(self, pkg, spec):
|
||||
# "id" mismatch
|
||||
if spec.id and spec.id != pkg.metadata.spec.id:
|
||||
return False
|
||||
|
||||
# "owner" mismatch
|
||||
if spec.owner and not ci_strings_are_equal(spec.owner, pkg.metadata.spec.owner):
|
||||
return False
|
||||
|
||||
# external "URL" mismatch
|
||||
if spec.external:
|
||||
# local folder mismatch
|
||||
if spec.url == pkg.path or (
|
||||
spec.url.startswith("file://") and pkg.path == spec.url[7:]
|
||||
):
|
||||
return True
|
||||
if spec.url != pkg.metadata.spec.url:
|
||||
return False
|
||||
|
||||
# "name" mismatch
|
||||
elif not spec.id and not ci_strings_are_equal(spec.name, pkg.metadata.name):
|
||||
return False
|
||||
|
||||
if self.pkg_type == PackageType.TOOL:
|
||||
# TODO: check "system" for pkg
|
||||
pass
|
||||
|
||||
return True
|
64
platformio/package/manager/library.py
Normal file
64
platformio/package/manager/library.py
Normal file
@ -0,0 +1,64 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from platformio.package.exception import MissingPackageManifestError
|
||||
from platformio.package.manager.base import BasePackageManager
|
||||
from platformio.package.meta import PackageSpec, PackageType
|
||||
from platformio.project.helpers import get_project_global_lib_dir
|
||||
|
||||
|
||||
class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-ancestors
|
||||
def __init__(self, package_dir=None):
|
||||
super(LibraryPackageManager, self).__init__(
|
||||
PackageType.LIBRARY, package_dir or get_project_global_lib_dir()
|
||||
)
|
||||
|
||||
@property
|
||||
def manifest_names(self):
|
||||
return PackageType.get_manifest_map()[PackageType.LIBRARY]
|
||||
|
||||
def find_pkg_root(self, path, spec):
|
||||
try:
|
||||
return super(LibraryPackageManager, self).find_pkg_root(path, spec)
|
||||
except MissingPackageManifestError:
|
||||
pass
|
||||
assert isinstance(spec, PackageSpec)
|
||||
|
||||
root_dir = self.find_library_root(path)
|
||||
|
||||
# automatically generate library manifest
|
||||
with open(os.path.join(root_dir, "library.json"), "w") as fp:
|
||||
json.dump(
|
||||
dict(name=spec.name, version=self.generate_rand_version(),),
|
||||
fp,
|
||||
indent=2,
|
||||
)
|
||||
|
||||
return root_dir
|
||||
|
||||
@staticmethod
|
||||
def find_library_root(path):
|
||||
for root, dirs, files in os.walk(path):
|
||||
if not files and len(dirs) == 1:
|
||||
continue
|
||||
for fname in files:
|
||||
if not fname.endswith((".c", ".cpp", ".h", ".S")):
|
||||
continue
|
||||
if os.path.isdir(os.path.join(os.path.dirname(root), "src")):
|
||||
return os.path.dirname(root)
|
||||
return root
|
||||
return path
|
30
platformio/package/manager/platform.py
Normal file
30
platformio/package/manager/platform.py
Normal file
@ -0,0 +1,30 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.package.manager.base import BasePackageManager
|
||||
from platformio.package.meta import PackageType
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-ancestors
|
||||
def __init__(self, package_dir=None):
|
||||
self.config = ProjectConfig.get_instance()
|
||||
super(PlatformPackageManager, self).__init__(
|
||||
PackageType.PLATFORM,
|
||||
package_dir or self.config.get_optional_dir("platforms"),
|
||||
)
|
||||
|
||||
@property
|
||||
def manifest_names(self):
|
||||
return PackageType.get_manifest_map()[PackageType.PLATFORM]
|
29
platformio/package/manager/tool.py
Normal file
29
platformio/package/manager/tool.py
Normal file
@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.package.manager.base import BasePackageManager
|
||||
from platformio.package.meta import PackageType
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class ToolPackageManager(BasePackageManager): # pylint: disable=too-many-ancestors
|
||||
def __init__(self, package_dir=None):
|
||||
self.config = ProjectConfig.get_instance()
|
||||
super(ToolPackageManager, self).__init__(
|
||||
PackageType.TOOL, package_dir or self.config.get_optional_dir("packages"),
|
||||
)
|
||||
|
||||
@property
|
||||
def manifest_names(self):
|
||||
return PackageType.get_manifest_map()[PackageType.TOOL]
|
@ -250,7 +250,7 @@ class ManifestSchema(BaseSchema):
|
||||
def load_spdx_licenses():
|
||||
r = requests.get(
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data"
|
||||
"/v3.9/json/licenses.json"
|
||||
"/v3.10/json/licenses.json"
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
446
platformio/package/meta.py
Normal file
446
platformio/package/meta.py
Normal file
@ -0,0 +1,446 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import tarfile
|
||||
from binascii import crc32
|
||||
|
||||
import semantic_version
|
||||
|
||||
from platformio.compat import get_object_members, hashlib_encode_data, string_types
|
||||
from platformio.package.manifest.parser import ManifestFileType
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
|
||||
class PackageType(object):
|
||||
LIBRARY = "library"
|
||||
PLATFORM = "platform"
|
||||
TOOL = "tool"
|
||||
|
||||
@classmethod
|
||||
def items(cls):
|
||||
return get_object_members(cls)
|
||||
|
||||
@classmethod
|
||||
def get_manifest_map(cls):
|
||||
return {
|
||||
cls.PLATFORM: (ManifestFileType.PLATFORM_JSON,),
|
||||
cls.LIBRARY: (
|
||||
ManifestFileType.LIBRARY_JSON,
|
||||
ManifestFileType.LIBRARY_PROPERTIES,
|
||||
ManifestFileType.MODULE_JSON,
|
||||
),
|
||||
cls.TOOL: (ManifestFileType.PACKAGE_JSON,),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_archive(cls, path):
|
||||
assert path.endswith("tar.gz")
|
||||
manifest_map = cls.get_manifest_map()
|
||||
with tarfile.open(path, mode="r:gz") as tf:
|
||||
for t in sorted(cls.items().values()):
|
||||
for manifest in manifest_map[t]:
|
||||
try:
|
||||
if tf.getmember(manifest):
|
||||
return t
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
class PackageOutdatedResult(object):
|
||||
def __init__(self, current, latest=None, wanted=None, detached=False):
|
||||
self.current = current
|
||||
self.latest = latest
|
||||
self.wanted = wanted
|
||||
self.detached = detached
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"PackageOutdatedResult <current={current} latest={latest} wanted={wanted} "
|
||||
"detached={detached}>".format(
|
||||
current=self.current,
|
||||
latest=self.latest,
|
||||
wanted=self.wanted,
|
||||
detached=self.detached,
|
||||
)
|
||||
)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if (
|
||||
value
|
||||
and name in ("current", "latest", "wanted")
|
||||
and not isinstance(value, semantic_version.Version)
|
||||
):
|
||||
value = semantic_version.Version(str(value))
|
||||
return super(PackageOutdatedResult, self).__setattr__(name, value)
|
||||
|
||||
def is_outdated(self, allow_incompatible=False):
|
||||
if self.detached or not self.latest or self.current == self.latest:
|
||||
return False
|
||||
if allow_incompatible:
|
||||
return self.current != self.latest
|
||||
if self.wanted:
|
||||
return self.current != self.wanted
|
||||
return True
|
||||
|
||||
|
||||
class PackageSpec(object): # pylint: disable=too-many-instance-attributes
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments
|
||||
self, raw=None, owner=None, id=None, name=None, requirements=None, url=None
|
||||
):
|
||||
self.owner = owner
|
||||
self.id = id
|
||||
self.name = name
|
||||
self._requirements = None
|
||||
self.url = url
|
||||
self.raw = raw
|
||||
if requirements:
|
||||
self.requirements = requirements
|
||||
self._name_is_custom = False
|
||||
self._parse(raw)
|
||||
|
||||
def __eq__(self, other):
|
||||
return all(
|
||||
[
|
||||
self.owner == other.owner,
|
||||
self.id == other.id,
|
||||
self.name == other.name,
|
||||
self.requirements == other.requirements,
|
||||
self.url == other.url,
|
||||
]
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return crc32(
|
||||
hashlib_encode_data(
|
||||
"%s-%s-%s-%s-%s"
|
||||
% (self.owner, self.id, self.name, self.requirements, self.url)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"PackageSpec <owner={owner} id={id} name={name} "
|
||||
"requirements={requirements} url={url}>".format(**self.as_dict())
|
||||
)
|
||||
|
||||
@property
|
||||
def external(self):
|
||||
return bool(self.url)
|
||||
|
||||
@property
|
||||
def requirements(self):
|
||||
return self._requirements
|
||||
|
||||
@requirements.setter
|
||||
def requirements(self, value):
|
||||
if not value:
|
||||
self._requirements = None
|
||||
return
|
||||
self._requirements = (
|
||||
value
|
||||
if isinstance(value, semantic_version.SimpleSpec)
|
||||
else semantic_version.SimpleSpec(str(value))
|
||||
)
|
||||
|
||||
def humanize(self):
|
||||
result = ""
|
||||
if self.url:
|
||||
result = self.url
|
||||
elif self.name:
|
||||
if self.owner:
|
||||
result = self.owner + "/"
|
||||
result += self.name
|
||||
elif self.id:
|
||||
result = "id:%d" % self.id
|
||||
if self.requirements:
|
||||
result += " @ " + str(self.requirements)
|
||||
return result
|
||||
|
||||
def has_custom_name(self):
|
||||
return self._name_is_custom
|
||||
|
||||
def as_dict(self):
|
||||
return dict(
|
||||
owner=self.owner,
|
||||
id=self.id,
|
||||
name=self.name,
|
||||
requirements=str(self.requirements) if self.requirements else None,
|
||||
url=self.url,
|
||||
)
|
||||
|
||||
def as_dependency(self):
|
||||
if self.url:
|
||||
return self.raw or self.url
|
||||
result = ""
|
||||
if self.name:
|
||||
result = "%s/%s" % (self.owner, self.name) if self.owner else self.name
|
||||
elif self.id:
|
||||
result = str(self.id)
|
||||
assert result
|
||||
if self.requirements:
|
||||
result = "%s@%s" % (result, self.requirements)
|
||||
return result
|
||||
|
||||
def _parse(self, raw):
|
||||
if raw is None:
|
||||
return
|
||||
if not isinstance(raw, string_types):
|
||||
raw = str(raw)
|
||||
raw = raw.strip()
|
||||
|
||||
parsers = (
|
||||
self._parse_requirements,
|
||||
self._parse_custom_name,
|
||||
self._parse_id,
|
||||
self._parse_owner,
|
||||
self._parse_url,
|
||||
)
|
||||
for parser in parsers:
|
||||
if raw is None:
|
||||
break
|
||||
raw = parser(raw)
|
||||
|
||||
# if name is not custom, parse it from URL
|
||||
if not self.name and self.url:
|
||||
self.name = self._parse_name_from_url(self.url)
|
||||
elif raw:
|
||||
# the leftover is a package name
|
||||
self.name = raw
|
||||
|
||||
def _parse_requirements(self, raw):
|
||||
if "@" not in raw:
|
||||
return raw
|
||||
tokens = raw.rsplit("@", 1)
|
||||
if any(s in tokens[1] for s in (":", "/")):
|
||||
return raw
|
||||
self.requirements = tokens[1].strip()
|
||||
return tokens[0].strip()
|
||||
|
||||
def _parse_custom_name(self, raw):
|
||||
if "=" not in raw or raw.startswith("id="):
|
||||
return raw
|
||||
tokens = raw.split("=", 1)
|
||||
if "/" in tokens[0]:
|
||||
return raw
|
||||
self.name = tokens[0].strip()
|
||||
self._name_is_custom = True
|
||||
return tokens[1].strip()
|
||||
|
||||
def _parse_id(self, raw):
|
||||
if raw.isdigit():
|
||||
self.id = int(raw)
|
||||
return None
|
||||
if raw.startswith("id="):
|
||||
return self._parse_id(raw[3:])
|
||||
return raw
|
||||
|
||||
def _parse_owner(self, raw):
|
||||
if raw.count("/") != 1 or "@" in raw:
|
||||
return raw
|
||||
tokens = raw.split("/", 1)
|
||||
self.owner = tokens[0].strip()
|
||||
self.name = tokens[1].strip()
|
||||
return None
|
||||
|
||||
def _parse_url(self, raw):
|
||||
if not any(s in raw for s in ("@", ":", "/")):
|
||||
return raw
|
||||
self.url = raw.strip()
|
||||
parts = urlparse(self.url)
|
||||
|
||||
# if local file or valid URL with scheme vcs+protocol://
|
||||
if parts.scheme == "file" or "+" in parts.scheme or self.url.startswith("git+"):
|
||||
return None
|
||||
|
||||
# parse VCS
|
||||
git_conditions = [
|
||||
parts.path.endswith(".git"),
|
||||
# Handle GitHub URL (https://github.com/user/package)
|
||||
parts.netloc in ("github.com", "gitlab.com", "bitbucket.com")
|
||||
and not parts.path.endswith((".zip", ".tar.gz")),
|
||||
]
|
||||
hg_conditions = [
|
||||
# Handle Developer Mbed URL
|
||||
# (https://developer.mbed.org/users/user/code/package/)
|
||||
# (https://os.mbed.com/users/user/code/package/)
|
||||
parts.netloc
|
||||
in ("mbed.com", "os.mbed.com", "developer.mbed.org")
|
||||
]
|
||||
if any(git_conditions):
|
||||
self.url = "git+" + self.url
|
||||
elif any(hg_conditions):
|
||||
self.url = "hg+" + self.url
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _parse_name_from_url(url):
|
||||
if url.endswith("/"):
|
||||
url = url[:-1]
|
||||
for c in ("#", "?"):
|
||||
if c in url:
|
||||
url = url[: url.index(c)]
|
||||
|
||||
# parse real repository name from Github
|
||||
parts = urlparse(url)
|
||||
if parts.netloc == "github.com" and parts.path.count("/") > 2:
|
||||
return parts.path.split("/")[2]
|
||||
|
||||
name = os.path.basename(url)
|
||||
if "." in name:
|
||||
return name.split(".", 1)[0].strip()
|
||||
return name
|
||||
|
||||
|
||||
class PackageMetaData(object):
|
||||
def __init__( # pylint: disable=redefined-builtin
|
||||
self, type, name, version, spec=None
|
||||
):
|
||||
assert type in PackageType.items().values()
|
||||
if spec:
|
||||
assert isinstance(spec, PackageSpec)
|
||||
self.type = type
|
||||
self.name = name
|
||||
self._version = None
|
||||
self.version = version
|
||||
self.spec = spec
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"PackageMetaData <type={type} name={name} version={version} "
|
||||
"spec={spec}".format(**self.as_dict())
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return all(
|
||||
[
|
||||
self.type == other.type,
|
||||
self.name == other.name,
|
||||
self.version == other.version,
|
||||
self.spec == other.spec,
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self._version
|
||||
|
||||
@version.setter
|
||||
def version(self, value):
|
||||
if not value:
|
||||
self._version = None
|
||||
return
|
||||
self._version = (
|
||||
value
|
||||
if isinstance(value, semantic_version.Version)
|
||||
else self.to_semver(value)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def to_semver(value, force=True, raise_exception=False):
|
||||
assert value
|
||||
try:
|
||||
return semantic_version.Version(value)
|
||||
except ValueError:
|
||||
pass
|
||||
if force:
|
||||
try:
|
||||
return semantic_version.Version.coerce(value)
|
||||
except ValueError:
|
||||
pass
|
||||
if raise_exception:
|
||||
raise ValueError("Invalid SemVer version %s" % value)
|
||||
# parse commit hash
|
||||
if re.match(r"^[\da-f]+$", value, flags=re.I):
|
||||
return semantic_version.Version("0.0.0+sha." + value)
|
||||
return semantic_version.Version("0.0.0+" + value)
|
||||
|
||||
def as_dict(self):
|
||||
return dict(
|
||||
type=self.type,
|
||||
name=self.name,
|
||||
version=str(self.version),
|
||||
spec=self.spec.as_dict() if self.spec else None,
|
||||
)
|
||||
|
||||
def dump(self, path):
|
||||
with open(path, "w") as fp:
|
||||
return json.dump(self.as_dict(), fp)
|
||||
|
||||
@staticmethod
|
||||
def load(path):
|
||||
with open(path) as fp:
|
||||
data = json.load(fp)
|
||||
if data["spec"]:
|
||||
data["spec"] = PackageSpec(**data["spec"])
|
||||
return PackageMetaData(**data)
|
||||
|
||||
|
||||
class PackageSourceItem(object):
|
||||
|
||||
METAFILE_NAME = ".piopm"
|
||||
|
||||
def __init__(self, path, metadata=None):
|
||||
self.path = path
|
||||
self.metadata = metadata
|
||||
if not self.metadata and self.exists():
|
||||
self.metadata = self.load_meta()
|
||||
|
||||
def __repr__(self):
|
||||
return "PackageSourceItem <path={path} metadata={metadata}".format(
|
||||
path=self.path, metadata=self.metadata
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return all([self.path == other.path, self.metadata == other.metadata])
|
||||
|
||||
def exists(self):
|
||||
return os.path.isdir(self.path)
|
||||
|
||||
def get_safe_dirname(self):
|
||||
assert self.metadata
|
||||
return re.sub(r"[^\da-z\_\-\. ]", "_", self.metadata.name, flags=re.I)
|
||||
|
||||
def get_metafile_locations(self):
|
||||
return [
|
||||
os.path.join(self.path, ".git"),
|
||||
os.path.join(self.path, ".hg"),
|
||||
os.path.join(self.path, ".svn"),
|
||||
self.path,
|
||||
]
|
||||
|
||||
def load_meta(self):
|
||||
assert self.exists()
|
||||
for location in self.get_metafile_locations():
|
||||
manifest_path = os.path.join(location, self.METAFILE_NAME)
|
||||
if os.path.isfile(manifest_path):
|
||||
return PackageMetaData.load(manifest_path)
|
||||
return None
|
||||
|
||||
def dump_meta(self):
|
||||
assert self.exists()
|
||||
location = None
|
||||
for location in self.get_metafile_locations():
|
||||
if os.path.isdir(location):
|
||||
break
|
||||
assert location
|
||||
return self.metadata.dump(os.path.join(location, self.METAFILE_NAME))
|
@ -23,7 +23,8 @@ from platformio import fs
|
||||
from platformio.package.exception import PackageException
|
||||
from platformio.package.manifest.parser import ManifestFileType, ManifestParserFactory
|
||||
from platformio.package.manifest.schema import ManifestSchema
|
||||
from platformio.unpacker import FileUnpacker
|
||||
from platformio.package.meta import PackageSourceItem
|
||||
from platformio.package.unpack import FileUnpacker
|
||||
|
||||
|
||||
class PackagePacker(object):
|
||||
@ -36,6 +37,7 @@ class PackagePacker(object):
|
||||
".svn/",
|
||||
".pio/",
|
||||
"**/.pio/",
|
||||
PackageSourceItem.METAFILE_NAME,
|
||||
]
|
||||
INCLUDE_DEFAULT = ManifestFileType.items().values()
|
||||
|
||||
|
@ -1,169 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import tarfile
|
||||
|
||||
from platformio.compat import get_object_members, string_types
|
||||
from platformio.package.manifest.parser import ManifestFileType
|
||||
|
||||
|
||||
class PackageType(object):
|
||||
LIBRARY = "library"
|
||||
PLATFORM = "platform"
|
||||
TOOL = "tool"
|
||||
|
||||
@classmethod
|
||||
def items(cls):
|
||||
return get_object_members(cls)
|
||||
|
||||
@classmethod
|
||||
def get_manifest_map(cls):
|
||||
return {
|
||||
cls.PLATFORM: (ManifestFileType.PLATFORM_JSON,),
|
||||
cls.LIBRARY: (
|
||||
ManifestFileType.LIBRARY_JSON,
|
||||
ManifestFileType.LIBRARY_PROPERTIES,
|
||||
ManifestFileType.MODULE_JSON,
|
||||
),
|
||||
cls.TOOL: (ManifestFileType.PACKAGE_JSON,),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_archive(cls, path):
|
||||
assert path.endswith("tar.gz")
|
||||
manifest_map = cls.get_manifest_map()
|
||||
with tarfile.open(path, mode="r:gz") as tf:
|
||||
for t in sorted(cls.items().values()):
|
||||
for manifest in manifest_map[t]:
|
||||
try:
|
||||
if tf.getmember(manifest):
|
||||
return t
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
class PackageSpec(object):
|
||||
def __init__( # pylint: disable=redefined-builtin,too-many-arguments
|
||||
self, raw=None, ownername=None, id=None, name=None, requirements=None, url=None
|
||||
):
|
||||
self.ownername = ownername
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.requirements = requirements
|
||||
self.url = url
|
||||
|
||||
self._parse(raw)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"PackageSpec <ownername={ownername} id={id} name={name} "
|
||||
"requirements={requirements} url={url}>".format(
|
||||
ownername=self.ownername,
|
||||
id=self.id,
|
||||
name=self.name,
|
||||
requirements=self.requirements,
|
||||
url=self.url,
|
||||
)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return all(
|
||||
[
|
||||
self.ownername == other.ownername,
|
||||
self.id == other.id,
|
||||
self.name == other.name,
|
||||
self.requirements == other.requirements,
|
||||
self.url == other.url,
|
||||
]
|
||||
)
|
||||
|
||||
def _parse(self, raw):
|
||||
if raw is None:
|
||||
return
|
||||
if not isinstance(raw, string_types):
|
||||
raw = str(raw)
|
||||
raw = raw.strip()
|
||||
|
||||
parsers = (
|
||||
self._parse_requirements,
|
||||
self._parse_fixed_name,
|
||||
self._parse_id,
|
||||
self._parse_ownername,
|
||||
self._parse_url,
|
||||
)
|
||||
for parser in parsers:
|
||||
if raw is None:
|
||||
break
|
||||
raw = parser(raw)
|
||||
|
||||
# if name is not fixed, parse it from URL
|
||||
if not self.name and self.url:
|
||||
self.name = self._parse_name_from_url(self.url)
|
||||
elif raw:
|
||||
# the leftover is a package name
|
||||
self.name = raw
|
||||
|
||||
def _parse_requirements(self, raw):
|
||||
if "@" not in raw:
|
||||
return raw
|
||||
tokens = raw.rsplit("@", 1)
|
||||
if any(s in tokens[1] for s in (":", "/")):
|
||||
return raw
|
||||
self.requirements = tokens[1].strip()
|
||||
return tokens[0].strip()
|
||||
|
||||
def _parse_fixed_name(self, raw):
|
||||
if "=" not in raw or raw.startswith("id="):
|
||||
return raw
|
||||
tokens = raw.split("=", 1)
|
||||
if "/" in tokens[0]:
|
||||
return raw
|
||||
self.name = tokens[0].strip()
|
||||
return tokens[1].strip()
|
||||
|
||||
def _parse_id(self, raw):
|
||||
if raw.isdigit():
|
||||
self.id = int(raw)
|
||||
return None
|
||||
if raw.startswith("id="):
|
||||
return self._parse_id(raw[3:])
|
||||
return raw
|
||||
|
||||
def _parse_ownername(self, raw):
|
||||
if raw.count("/") != 1 or "@" in raw:
|
||||
return raw
|
||||
tokens = raw.split("/", 1)
|
||||
self.ownername = tokens[0].strip()
|
||||
self.name = tokens[1].strip()
|
||||
return None
|
||||
|
||||
def _parse_url(self, raw):
|
||||
if not any(s in raw for s in ("@", ":", "/")):
|
||||
return raw
|
||||
self.url = raw.strip()
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _parse_name_from_url(url):
|
||||
if url.endswith("/"):
|
||||
url = url[:-1]
|
||||
for c in ("#", "?"):
|
||||
if c in url:
|
||||
url = url[: url.index(c)]
|
||||
name = os.path.basename(url)
|
||||
if "." in name:
|
||||
return name.split(".", 1)[0].strip()
|
||||
return name
|
@ -19,10 +19,19 @@ from zipfile import ZipFile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio import util
|
||||
from platformio.package.exception import PackageException
|
||||
|
||||
|
||||
class ArchiveBase(object):
|
||||
class ExtractArchiveItemError(PackageException):
|
||||
|
||||
MESSAGE = (
|
||||
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
|
||||
"tool or check this solution -> http://bit.ly/faq-package-manager"
|
||||
)
|
||||
|
||||
|
||||
class BaseArchiver(object):
|
||||
def __init__(self, arhfileobj):
|
||||
self._afo = arhfileobj
|
||||
|
||||
@ -46,9 +55,9 @@ class ArchiveBase(object):
|
||||
self._afo.close()
|
||||
|
||||
|
||||
class TARArchive(ArchiveBase):
|
||||
class TARArchiver(BaseArchiver):
|
||||
def __init__(self, archpath):
|
||||
super(TARArchive, self).__init__(tarfile_open(archpath))
|
||||
super(TARArchiver, self).__init__(tarfile_open(archpath))
|
||||
|
||||
def get_items(self):
|
||||
return self._afo.getmembers()
|
||||
@ -79,7 +88,7 @@ class TARArchive(ArchiveBase):
|
||||
self.is_link(item) and self.is_bad_link(item, dest_dir),
|
||||
]
|
||||
if not any(bad_conds):
|
||||
super(TARArchive, self).extract_item(item, dest_dir)
|
||||
super(TARArchiver, self).extract_item(item, dest_dir)
|
||||
else:
|
||||
click.secho(
|
||||
"Blocked insecure item `%s` from TAR archive" % item.name,
|
||||
@ -88,9 +97,9 @@ class TARArchive(ArchiveBase):
|
||||
)
|
||||
|
||||
|
||||
class ZIPArchive(ArchiveBase):
|
||||
class ZIPArchiver(BaseArchiver):
|
||||
def __init__(self, archpath):
|
||||
super(ZIPArchive, self).__init__(ZipFile(archpath))
|
||||
super(ZIPArchiver, self).__init__(ZipFile(archpath))
|
||||
|
||||
@staticmethod
|
||||
def preserve_permissions(item, dest_dir):
|
||||
@ -121,48 +130,59 @@ class ZIPArchive(ArchiveBase):
|
||||
|
||||
|
||||
class FileUnpacker(object):
|
||||
def __init__(self, archpath):
|
||||
self.archpath = archpath
|
||||
self._unpacker = None
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self._archiver = None
|
||||
|
||||
def _init_archiver(self):
|
||||
magic_map = {
|
||||
b"\x1f\x8b\x08": TARArchiver,
|
||||
b"\x42\x5a\x68": TARArchiver,
|
||||
b"\x50\x4b\x03\x04": ZIPArchiver,
|
||||
}
|
||||
magic_len = max(len(k) for k in magic_map)
|
||||
with open(self.path, "rb") as fp:
|
||||
data = fp.read(magic_len)
|
||||
for magic, archiver in magic_map.items():
|
||||
if data.startswith(magic):
|
||||
return archiver(self.path)
|
||||
raise PackageException("Unknown archive type '%s'" % self.path)
|
||||
|
||||
def __enter__(self):
|
||||
if self.archpath.lower().endswith((".gz", ".bz2", ".tar")):
|
||||
self._unpacker = TARArchive(self.archpath)
|
||||
elif self.archpath.lower().endswith(".zip"):
|
||||
self._unpacker = ZIPArchive(self.archpath)
|
||||
if not self._unpacker:
|
||||
raise exception.UnsupportedArchiveType(self.archpath)
|
||||
self._archiver = self._init_archiver()
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self._unpacker:
|
||||
self._unpacker.close()
|
||||
if self._archiver:
|
||||
self._archiver.close()
|
||||
|
||||
def unpack(
|
||||
self, dest_dir=".", with_progress=True, check_unpacked=True, silent=False
|
||||
self, dest_dir=None, with_progress=True, check_unpacked=True, silent=False
|
||||
):
|
||||
assert self._unpacker
|
||||
assert self._archiver
|
||||
if not dest_dir:
|
||||
dest_dir = os.getcwd()
|
||||
if not with_progress or silent:
|
||||
if not silent:
|
||||
click.echo("Unpacking...")
|
||||
for item in self._unpacker.get_items():
|
||||
self._unpacker.extract_item(item, dest_dir)
|
||||
for item in self._archiver.get_items():
|
||||
self._archiver.extract_item(item, dest_dir)
|
||||
else:
|
||||
items = self._unpacker.get_items()
|
||||
items = self._archiver.get_items()
|
||||
with click.progressbar(items, label="Unpacking") as pb:
|
||||
for item in pb:
|
||||
self._unpacker.extract_item(item, dest_dir)
|
||||
self._archiver.extract_item(item, dest_dir)
|
||||
|
||||
if not check_unpacked:
|
||||
return True
|
||||
|
||||
# check on disk
|
||||
for item in self._unpacker.get_items():
|
||||
filename = self._unpacker.get_item_filename(item)
|
||||
for item in self._archiver.get_items():
|
||||
filename = self._archiver.get_item_filename(item)
|
||||
item_path = os.path.join(dest_dir, filename)
|
||||
try:
|
||||
if not self._unpacker.is_link(item) and not os.path.exists(item_path):
|
||||
raise exception.ExtractArchiveItemError(filename, dest_dir)
|
||||
if not self._archiver.is_link(item) and not os.path.exists(item_path):
|
||||
raise ExtractArchiveItemError(filename, dest_dir)
|
||||
except NotImplementedError:
|
||||
pass
|
||||
return True
|
@ -17,7 +17,11 @@ from os.path import join
|
||||
from subprocess import CalledProcessError, check_call
|
||||
from sys import modules
|
||||
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.package.exception import (
|
||||
PackageException,
|
||||
PlatformioException,
|
||||
UserSideException,
|
||||
)
|
||||
from platformio.proc import exec_command
|
||||
|
||||
try:
|
||||
@ -26,9 +30,13 @@ except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
|
||||
class VCSBaseException(PackageException):
|
||||
pass
|
||||
|
||||
|
||||
class VCSClientFactory(object):
|
||||
@staticmethod
|
||||
def newClient(src_dir, remote_url, silent=False):
|
||||
def new(src_dir, remote_url, silent=False):
|
||||
result = urlparse(remote_url)
|
||||
type_ = result.scheme
|
||||
tag = None
|
||||
@ -41,12 +49,15 @@ class VCSClientFactory(object):
|
||||
if "#" in remote_url:
|
||||
remote_url, tag = remote_url.rsplit("#", 1)
|
||||
if not type_:
|
||||
raise PlatformioException("VCS: Unknown repository type %s" % remote_url)
|
||||
obj = getattr(modules[__name__], "%sClient" % type_.title())(
|
||||
src_dir, remote_url, tag, silent
|
||||
)
|
||||
assert isinstance(obj, VCSClientBase)
|
||||
return obj
|
||||
raise VCSBaseException("VCS: Unknown repository type %s" % remote_url)
|
||||
try:
|
||||
obj = getattr(modules[__name__], "%sClient" % type_.title())(
|
||||
src_dir, remote_url, tag, silent
|
||||
)
|
||||
assert isinstance(obj, VCSClientBase)
|
||||
return obj
|
||||
except (AttributeError, AssertionError):
|
||||
raise VCSBaseException("VCS: Unknown repository type %s" % remote_url)
|
||||
|
||||
|
||||
class VCSClientBase(object):
|
||||
@ -101,7 +112,7 @@ class VCSClientBase(object):
|
||||
check_call(args, **kwargs)
|
||||
return True
|
||||
except CalledProcessError as e:
|
||||
raise PlatformioException("VCS: Could not process command %s" % e.cmd)
|
||||
raise VCSBaseException("VCS: Could not process command %s" % e.cmd)
|
||||
|
||||
def get_cmd_output(self, args, **kwargs):
|
||||
args = [self.command] + args
|
||||
@ -110,7 +121,7 @@ class VCSClientBase(object):
|
||||
result = exec_command(args, **kwargs)
|
||||
if result["returncode"] == 0:
|
||||
return result["out"].strip()
|
||||
raise PlatformioException(
|
||||
raise VCSBaseException(
|
||||
"VCS: Could not receive an output from `%s` command (%s)" % (args, result)
|
||||
)
|
||||
|
||||
@ -227,7 +238,6 @@ class SvnClient(VCSClientBase):
|
||||
return self.run_cmd(args)
|
||||
|
||||
def update(self):
|
||||
|
||||
args = ["update"]
|
||||
return self.run_cmd(args)
|
||||
|
||||
@ -239,4 +249,4 @@ class SvnClient(VCSClientBase):
|
||||
line = line.strip()
|
||||
if line.startswith("Revision:"):
|
||||
return line.split(":", 1)[1].strip()
|
||||
raise PlatformioException("Could not detect current SVN revision")
|
||||
raise VCSBaseException("Could not detect current SVN revision")
|
@ -371,7 +371,7 @@ PING_REMOTE_HOSTS = [
|
||||
]
|
||||
|
||||
|
||||
@memoized(expire="5s")
|
||||
@memoized(expire="10s")
|
||||
def _internet_on():
|
||||
timeout = 2
|
||||
socket.setdefaulttimeout(timeout)
|
||||
|
@ -15,7 +15,7 @@
|
||||
from os.path import isfile, join
|
||||
|
||||
from platformio.commands.ci import cli as cmd_ci
|
||||
from platformio.commands.lib import cli as cmd_lib
|
||||
from platformio.commands.lib.command import cli as cmd_lib
|
||||
|
||||
|
||||
def test_ci_empty(clirunner):
|
||||
|
@ -13,332 +13,184 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import re
|
||||
import os
|
||||
|
||||
from platformio import exception
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib import cli as cmd_lib
|
||||
import semantic_version
|
||||
|
||||
PlatformioCLI.leftover_args = ["--json-output"] # hook for click
|
||||
from platformio.clients.registry import RegistryClient
|
||||
from platformio.commands.lib.command import cli as cmd_lib
|
||||
from platformio.package.meta import PackageType
|
||||
from platformio.package.vcsclient import VCSClientFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
def test_search(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["search", "DHT22"])
|
||||
def test_saving_deps(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory):
|
||||
regclient = RegistryClient()
|
||||
project_dir = tmpdir_factory.mktemp("project")
|
||||
project_dir.join("platformio.ini").write(
|
||||
"""
|
||||
[env]
|
||||
lib_deps = ArduinoJson
|
||||
|
||||
[env:one]
|
||||
board = devkit
|
||||
|
||||
[env:two]
|
||||
framework = foo
|
||||
lib_deps =
|
||||
CustomLib
|
||||
ArduinoJson @ 5.10.1
|
||||
"""
|
||||
)
|
||||
result = clirunner.invoke(cmd_lib, ["-d", str(project_dir), "install", "64"])
|
||||
validate_cliresult(result)
|
||||
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
|
||||
assert int(match.group(1)) > 2
|
||||
aj_pkg_data = regclient.get_package(PackageType.LIBRARY, "bblanchon", "ArduinoJson")
|
||||
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
|
||||
assert config.get("env:one", "lib_deps") == [
|
||||
"bblanchon/ArduinoJson@^%s" % aj_pkg_data["version"]["name"]
|
||||
]
|
||||
assert config.get("env:two", "lib_deps") == [
|
||||
"CustomLib",
|
||||
"bblanchon/ArduinoJson@^%s" % aj_pkg_data["version"]["name"],
|
||||
]
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["search", "DHT22", "--platform=timsp430"])
|
||||
# ensure "build" version without NPM spec
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
["-d", str(project_dir), "-e", "one", "install", "mbed-sam-grove/LinkedList"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
|
||||
assert int(match.group(1)) > 1
|
||||
ll_pkg_data = regclient.get_package(
|
||||
PackageType.LIBRARY, "mbed-sam-grove", "LinkedList"
|
||||
)
|
||||
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
|
||||
assert config.get("env:one", "lib_deps") == [
|
||||
"bblanchon/ArduinoJson@^%s" % aj_pkg_data["version"]["name"],
|
||||
"mbed-sam-grove/LinkedList@%s" % ll_pkg_data["version"]["name"],
|
||||
]
|
||||
|
||||
|
||||
def test_global_install_registry(clirunner, validate_cliresult, isolated_pio_core):
|
||||
# check external package via Git repo
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"-d",
|
||||
str(project_dir),
|
||||
"-e",
|
||||
"one",
|
||||
"install",
|
||||
"64",
|
||||
"ArduinoJson@~5.10.0",
|
||||
"547@2.2.4",
|
||||
"AsyncMqttClient@<=0.8.2",
|
||||
"Adafruit PN532@1.2.0",
|
||||
"https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3 @ 0.8.3",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
|
||||
assert len(config.get("env:one", "lib_deps")) == 3
|
||||
assert config.get("env:one", "lib_deps")[2] == (
|
||||
"https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3 @ 0.8.3"
|
||||
)
|
||||
|
||||
# install unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "install", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, exception.LibNotFound)
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"ArduinoJson",
|
||||
"ArduinoJson@5.10.1",
|
||||
"NeoPixelBus",
|
||||
"AsyncMqttClient",
|
||||
"ESPAsyncTCP",
|
||||
"AsyncTCP",
|
||||
"Adafruit PN532",
|
||||
"Adafruit BusIO",
|
||||
# test uninstalling
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-d", str(project_dir), "uninstall", "ArduinoJson"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
|
||||
assert len(config.get("env:one", "lib_deps")) == 2
|
||||
assert len(config.get("env:two", "lib_deps")) == 1
|
||||
assert config.get("env:one", "lib_deps") == [
|
||||
"mbed-sam-grove/LinkedList@%s" % ll_pkg_data["version"]["name"],
|
||||
"https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3 @ 0.8.3",
|
||||
]
|
||||
assert set(items1) == set(items2)
|
||||
|
||||
# test list
|
||||
result = clirunner.invoke(cmd_lib, ["-d", str(project_dir), "list"])
|
||||
validate_cliresult(result)
|
||||
assert "Version: 0.8.3+sha." in result.stdout
|
||||
assert (
|
||||
"Source: git+https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3"
|
||||
in result.stdout
|
||||
)
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-d", str(project_dir), "list", "--json-output"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
data = {}
|
||||
for key, value in json.loads(result.stdout).items():
|
||||
data[os.path.basename(key)] = value
|
||||
ame_lib = next(
|
||||
item for item in data["one"] if item["name"] == "AsyncMqttClient-esphome"
|
||||
)
|
||||
ame_vcs = VCSClientFactory.new(ame_lib["__pkg_dir"], ame_lib["__src_url"])
|
||||
assert data["two"] == []
|
||||
assert "__pkg_dir" in data["one"][0]
|
||||
assert (
|
||||
ame_lib["__src_url"]
|
||||
== "git+https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3"
|
||||
)
|
||||
assert ame_lib["version"] == ("0.8.3+sha.%s" % ame_vcs.get_current_revision())
|
||||
|
||||
|
||||
def test_global_install_archive(clirunner, validate_cliresult, isolated_pio_core):
|
||||
def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory):
|
||||
storage_dir = tmpdir_factory.mktemp("test-updates")
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
["-d", str(storage_dir), "install", "ArduinoJson @ 5.10.1", "Blynk @ ~0.5.0"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "--dry-run", "--json-output"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
outdated = json.loads(result.stdout)
|
||||
assert len(outdated) == 2
|
||||
# ArduinoJson
|
||||
assert outdated[0]["version"] == "5.10.1"
|
||||
assert outdated[0]["versionWanted"] is None
|
||||
assert semantic_version.Version(
|
||||
outdated[0]["versionLatest"]
|
||||
) > semantic_version.Version("6.16.0")
|
||||
# Blynk
|
||||
assert outdated[1]["version"] == "0.5.4"
|
||||
assert outdated[1]["versionWanted"] is None
|
||||
assert semantic_version.Version(
|
||||
outdated[1]["versionLatest"]
|
||||
) > semantic_version.Version("0.6.0")
|
||||
|
||||
# check with spec
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
|
||||
"SomeLib=http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
|
||||
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
"-d",
|
||||
str(storage_dir),
|
||||
"update",
|
||||
"--dry-run",
|
||||
"--json-output",
|
||||
"ArduinoJson @ ^5",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
# incorrect requirements
|
||||
outdated = json.loads(result.stdout)
|
||||
assert outdated[0]["version"] == "5.10.1"
|
||||
assert outdated[0]["versionWanted"] == "5.13.4"
|
||||
assert semantic_version.Version(
|
||||
outdated[0]["versionLatest"]
|
||||
) > semantic_version.Version("6.16.0")
|
||||
# update with spec
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@1.2.3",
|
||||
],
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "--silent", "ArduinoJson @ ^5.10.1"]
|
||||
)
|
||||
assert result.exit_code != 0
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = ["ArduinoJson", "SomeLib_ID54", "OneWire", "ESP32WebServer"]
|
||||
assert set(items1) >= set(items2)
|
||||
|
||||
|
||||
def test_global_install_repository(clirunner, validate_cliresult, isolated_pio_core):
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/gioblu/PJON.git#3.0",
|
||||
"https://github.com/gioblu/PJON.git#6.2",
|
||||
"https://github.com/bblanchon/ArduinoJson.git",
|
||||
"https://gitlab.com/ivankravets/rs485-nodeproto.git",
|
||||
"https://github.com/platformio/platformio-libmirror.git",
|
||||
# "https://developer.mbed.org/users/simon/code/TextLCD/",
|
||||
"knolleary/pubsubclient#bef58148582f956dfa772687db80c44e2279a163",
|
||||
],
|
||||
cmd_lib, ["-d", str(storage_dir), "list", "--json-output"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"PJON",
|
||||
"PJON@src-79de467ebe19de18287becff0a1fb42d",
|
||||
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
|
||||
"rs485-nodeproto",
|
||||
"platformio-libmirror",
|
||||
"PubSubClient",
|
||||
]
|
||||
assert set(items1) >= set(items2)
|
||||
items = json.loads(result.stdout)
|
||||
assert len(items) == 2
|
||||
assert items[0]["version"] == "5.13.4"
|
||||
assert items[1]["version"] == "0.5.4"
|
||||
|
||||
|
||||
def test_install_duplicates(clirunner, validate_cliresult, without_internet):
|
||||
# registry
|
||||
# Check incompatible
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
["-g", "install", "http://dl.platformio.org/libraries/archives/0/9540.tar.gz"],
|
||||
cmd_lib, ["-d", str(storage_dir), "update", "--dry-run", "ArduinoJson @ ^5"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# archive
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# repository
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
["-g", "install", "https://github.com/platformio/platformio-libmirror.git"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
|
||||
def test_global_lib_list(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[
|
||||
n in result.output
|
||||
for n in (
|
||||
"Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
"Version: 5.10.1",
|
||||
"Source: git+https://github.com/gioblu/PJON.git#3.0",
|
||||
"Version: 1fb26fd",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
assert all(
|
||||
[
|
||||
n in result.output
|
||||
for n in (
|
||||
"__pkg_dir",
|
||||
'"__src_url": "git+https://gitlab.com/ivankravets/rs485-nodeproto.git"',
|
||||
'"version": "5.10.1"',
|
||||
)
|
||||
]
|
||||
)
|
||||
items1 = [i["name"] for i in json.loads(result.output)]
|
||||
items2 = [
|
||||
"ESP32WebServer",
|
||||
"ArduinoJson",
|
||||
"ArduinoJson",
|
||||
"ArduinoJson",
|
||||
"ArduinoJson",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"SomeLib",
|
||||
"ESPAsyncTCP",
|
||||
"NeoPixelBus",
|
||||
"OneWire",
|
||||
"PJON",
|
||||
"PJON",
|
||||
"PubSubClient",
|
||||
"Adafruit PN532",
|
||||
"Adafruit BusIO",
|
||||
"platformio-libmirror",
|
||||
"rs485-nodeproto",
|
||||
]
|
||||
assert sorted(items1) == sorted(items2)
|
||||
|
||||
versions1 = [
|
||||
"{name}@{version}".format(**item) for item in json.loads(result.output)
|
||||
]
|
||||
versions2 = [
|
||||
"ArduinoJson@5.8.2",
|
||||
"ArduinoJson@5.10.1",
|
||||
"AsyncMqttClient@0.8.2",
|
||||
"NeoPixelBus@2.2.4",
|
||||
"PJON@07fe9aa",
|
||||
"PJON@1fb26fd",
|
||||
"PubSubClient@bef5814",
|
||||
"Adafruit PN532@1.2.0",
|
||||
]
|
||||
assert set(versions1) >= set(versions2)
|
||||
|
||||
|
||||
def test_global_lib_update_check(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-g", "update", "--only-check", "--json-output"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
output = json.loads(result.output)
|
||||
assert set(["ESPAsyncTCP", "NeoPixelBus"]) == set([l["name"] for l in output])
|
||||
|
||||
|
||||
def test_global_lib_update(clirunner, validate_cliresult):
|
||||
# update library using package directory
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-g", "update", "NeoPixelBus", "--only-check", "--json-output"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
oudated = json.loads(result.output)
|
||||
assert len(oudated) == 1
|
||||
assert "__pkg_dir" in oudated[0]
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", oudated[0]["__pkg_dir"]])
|
||||
validate_cliresult(result)
|
||||
assert "Uninstalling NeoPixelBus @ 2.2.4" in result.output
|
||||
|
||||
# update rest libraries
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update"])
|
||||
validate_cliresult(result)
|
||||
assert result.output.count("[Detached]") == 5
|
||||
assert result.output.count("[Up-to-date]") == 12
|
||||
|
||||
# update unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, exception.UnknownPackage)
|
||||
|
||||
|
||||
def test_global_lib_uninstall(clirunner, validate_cliresult, isolated_pio_core):
|
||||
# uninstall using package directory
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
items = json.loads(result.output)
|
||||
items = sorted(items, key=lambda item: item["__pkg_dir"])
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "uninstall", items[0]["__pkg_dir"]])
|
||||
validate_cliresult(result)
|
||||
assert ("Uninstalling %s" % items[0]["name"]) in result.output
|
||||
|
||||
# uninstall the rest libraries
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"uninstall",
|
||||
"OneWire",
|
||||
"https://github.com/bblanchon/ArduinoJson.git",
|
||||
"ArduinoJson@!=5.6.7",
|
||||
"Adafruit PN532",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"rs485-nodeproto",
|
||||
"platformio-libmirror",
|
||||
"PubSubClient",
|
||||
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
|
||||
"ESPAsyncTCP",
|
||||
"ESP32WebServer",
|
||||
"NeoPixelBus",
|
||||
"PJON",
|
||||
"AsyncMqttClient",
|
||||
"ArduinoJson",
|
||||
"SomeLib_ID54",
|
||||
"PJON@src-79de467ebe19de18287becff0a1fb42d",
|
||||
"AsyncTCP",
|
||||
]
|
||||
assert set(items1) == set(items2)
|
||||
|
||||
# uninstall unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "uninstall", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, exception.UnknownPackage)
|
||||
|
||||
|
||||
def test_lib_show(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["show", "64"])
|
||||
validate_cliresult(result)
|
||||
assert all([s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR")])
|
||||
result = clirunner.invoke(cmd_lib, ["show", "OneWire", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert "OneWire" in result.output
|
||||
|
||||
|
||||
def test_lib_builtin(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["builtin"])
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(cmd_lib, ["builtin", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_lib_stats(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["stats"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[
|
||||
s in result.output
|
||||
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
|
||||
]
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert set(
|
||||
[
|
||||
"dlweek",
|
||||
"added",
|
||||
"updated",
|
||||
"topkeywords",
|
||||
"dlmonth",
|
||||
"dlday",
|
||||
"lastkeywords",
|
||||
]
|
||||
) == set(json.loads(result.output).keys())
|
||||
assert "Incompatible" in result.stdout
|
||||
|
347
tests/commands/test_lib_complex.py
Normal file
347
tests/commands/test_lib_complex.py
Normal file
@ -0,0 +1,347 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib.command import cli as cmd_lib
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
|
||||
PlatformioCLI.leftover_args = ["--json-output"] # hook for click
|
||||
|
||||
|
||||
def test_search(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["search", "DHT22"])
|
||||
validate_cliresult(result)
|
||||
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
|
||||
assert int(match.group(1)) > 2
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["search", "DHT22", "--platform=timsp430"])
|
||||
validate_cliresult(result)
|
||||
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
|
||||
assert int(match.group(1)) > 1
|
||||
|
||||
|
||||
def test_global_install_registry(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"64",
|
||||
"ArduinoJson@~5.10.0",
|
||||
"547@2.2.4",
|
||||
"AsyncMqttClient@<=0.8.2",
|
||||
"Adafruit PN532@1.2.0",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
# install unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "install", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, UnknownPackageError)
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"ArduinoJson",
|
||||
"ArduinoJson@5.10.1",
|
||||
"NeoPixelBus",
|
||||
"AsyncMqttClient",
|
||||
"ESPAsyncTCP",
|
||||
"AsyncTCP",
|
||||
"Adafruit PN532",
|
||||
"Adafruit BusIO",
|
||||
]
|
||||
assert set(items1) == set(items2)
|
||||
|
||||
|
||||
def test_global_install_archive(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
|
||||
"SomeLib=https://dl.registry.platformio.org/download/milesburton/library/DallasTemperature/3.8.1/DallasTemperature-3.8.1.tar.gz",
|
||||
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
# incorrect requirements
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@1.2.3",
|
||||
],
|
||||
)
|
||||
assert result.exit_code != 0
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"ArduinoJson",
|
||||
"SomeLib",
|
||||
"OneWire",
|
||||
"ESP32WebServer@src-a1a3c75631882b35702e71966ea694e8",
|
||||
]
|
||||
assert set(items1) >= set(items2)
|
||||
|
||||
|
||||
def test_global_install_repository(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/gioblu/PJON.git#3.0",
|
||||
"https://github.com/gioblu/PJON.git#6.2",
|
||||
"https://github.com/bblanchon/ArduinoJson.git",
|
||||
"https://github.com/platformio/platformio-libmirror.git",
|
||||
# "https://developer.mbed.org/users/simon/code/TextLCD/",
|
||||
"https://github.com/knolleary/pubsubclient#bef58148582f956dfa772687db80c44e2279a163",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"PJON@src-1204e8bbd80de05e54e171b3a07bcc3f",
|
||||
"PJON@src-79de467ebe19de18287becff0a1fb42d",
|
||||
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
|
||||
"platformio-libmirror@src-b7e674cad84244c61b436fcea8f78377",
|
||||
"PubSubClient@src-98ec699a461a31615982e5adaaefadda",
|
||||
]
|
||||
assert set(items1) >= set(items2)
|
||||
|
||||
|
||||
def test_install_duplicates(clirunner, validate_cliresult, without_internet):
|
||||
# registry
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://dl.registry.platformio.org/download/milesburton/library/DallasTemperature/3.8.1/DallasTemperature-3.8.1.tar.gz",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# archive
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# repository
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
["-g", "install", "https://github.com/platformio/platformio-libmirror.git"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
|
||||
def test_global_lib_list(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[
|
||||
n in result.output
|
||||
for n in (
|
||||
"Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
"Version: 5.10.1",
|
||||
"Source: git+https://github.com/gioblu/PJON.git#3.0",
|
||||
"Version: 3.0.0+sha.1fb26fd",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
assert all(
|
||||
[
|
||||
n in result.output
|
||||
for n in (
|
||||
"__pkg_dir",
|
||||
'"__src_url": "git+https://github.com/gioblu/PJON.git#6.2"',
|
||||
'"version": "5.10.1"',
|
||||
)
|
||||
]
|
||||
)
|
||||
items1 = [i["name"] for i in json.loads(result.output)]
|
||||
items2 = [
|
||||
"Adafruit BusIO",
|
||||
"Adafruit PN532",
|
||||
"ArduinoJson",
|
||||
"ArduinoJson",
|
||||
"ArduinoJson",
|
||||
"ArduinoJson",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"DallasTemperature",
|
||||
"ESP32WebServer",
|
||||
"ESPAsyncTCP",
|
||||
"NeoPixelBus",
|
||||
"OneWire",
|
||||
"PJON",
|
||||
"PJON",
|
||||
"platformio-libmirror",
|
||||
"PubSubClient",
|
||||
]
|
||||
assert sorted(items1) == sorted(items2)
|
||||
|
||||
versions1 = [
|
||||
"{name}@{version}".format(**item) for item in json.loads(result.output)
|
||||
]
|
||||
versions2 = [
|
||||
"ArduinoJson@5.8.2",
|
||||
"ArduinoJson@5.10.1",
|
||||
"AsyncMqttClient@0.8.2",
|
||||
"NeoPixelBus@2.2.4",
|
||||
"PJON@6.2.0+sha.07fe9aa",
|
||||
"PJON@3.0.0+sha.1fb26fd",
|
||||
"PubSubClient@2.6.0+sha.bef5814",
|
||||
"Adafruit PN532@1.2.0",
|
||||
]
|
||||
assert set(versions1) >= set(versions2)
|
||||
|
||||
|
||||
def test_global_lib_update_check(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "--dry-run", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
output = json.loads(result.output)
|
||||
assert set(["ESPAsyncTCP", "NeoPixelBus"]) == set([lib["name"] for lib in output])
|
||||
|
||||
|
||||
def test_global_lib_update(clirunner, validate_cliresult):
|
||||
# update library using package directory
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-g", "update", "NeoPixelBus", "--dry-run", "--json-output"]
|
||||
)
|
||||
validate_cliresult(result)
|
||||
oudated = json.loads(result.output)
|
||||
assert len(oudated) == 1
|
||||
assert "__pkg_dir" in oudated[0]
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", oudated[0]["__pkg_dir"]])
|
||||
validate_cliresult(result)
|
||||
assert "Removing NeoPixelBus @ 2.2.4" in result.output
|
||||
|
||||
# update rest libraries
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update"])
|
||||
validate_cliresult(result)
|
||||
assert result.output.count("[Detached]") == 1
|
||||
assert result.output.count("[Up-to-date]") == 15
|
||||
|
||||
# update unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, UnknownPackageError)
|
||||
|
||||
|
||||
def test_global_lib_uninstall(clirunner, validate_cliresult, isolated_pio_core):
|
||||
# uninstall using package directory
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
items = json.loads(result.output)
|
||||
items = sorted(items, key=lambda item: item["__pkg_dir"])
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "uninstall", items[0]["__pkg_dir"]])
|
||||
validate_cliresult(result)
|
||||
assert ("Removing %s" % items[0]["name"]) in result.output
|
||||
|
||||
# uninstall the rest libraries
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
[
|
||||
"-g",
|
||||
"uninstall",
|
||||
"OneWire",
|
||||
"https://github.com/bblanchon/ArduinoJson.git",
|
||||
"ArduinoJson@!=5.6.7",
|
||||
"Adafruit PN532",
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()]
|
||||
items2 = [
|
||||
"ArduinoJson",
|
||||
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"ESP32WebServer@src-a1a3c75631882b35702e71966ea694e8",
|
||||
"ESPAsyncTCP",
|
||||
"NeoPixelBus",
|
||||
"PJON@src-1204e8bbd80de05e54e171b3a07bcc3f",
|
||||
"PJON@src-79de467ebe19de18287becff0a1fb42d",
|
||||
"platformio-libmirror@src-b7e674cad84244c61b436fcea8f78377",
|
||||
"PubSubClient@src-98ec699a461a31615982e5adaaefadda",
|
||||
"SomeLib",
|
||||
]
|
||||
assert set(items1) == set(items2)
|
||||
|
||||
# uninstall unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "uninstall", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, UnknownPackageError)
|
||||
|
||||
|
||||
def test_lib_show(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["show", "64"])
|
||||
validate_cliresult(result)
|
||||
assert all([s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR")])
|
||||
result = clirunner.invoke(cmd_lib, ["show", "OneWire", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert "OneWire" in result.output
|
||||
|
||||
|
||||
def test_lib_builtin(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["builtin"])
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(cmd_lib, ["builtin", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_lib_stats(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["stats"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[
|
||||
s in result.output
|
||||
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
|
||||
]
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert set(
|
||||
[
|
||||
"dlweek",
|
||||
"added",
|
||||
"updated",
|
||||
"topkeywords",
|
||||
"dlmonth",
|
||||
"dlday",
|
||||
"lastkeywords",
|
||||
]
|
||||
) == set(json.loads(result.output).keys())
|
427
tests/package/test_manager.py
Normal file
427
tests/package/test_manager.py
Normal file
@ -0,0 +1,427 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import pytest
|
||||
import semantic_version
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
)
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.package.pack import PackagePacker
|
||||
|
||||
|
||||
def test_download(isolated_pio_core):
|
||||
url = "https://github.com/platformio/platformio-core/archive/v4.3.4.zip"
|
||||
checksum = "69d59642cb91e64344f2cdc1d3b98c5cd57679b5f6db7accc7707bd4c5d9664a"
|
||||
lm = LibraryPackageManager()
|
||||
archive_path = lm.download(url, checksum, silent=True)
|
||||
assert fs.calculate_file_hashsum("sha256", archive_path) == checksum
|
||||
lm.cleanup_expired_downloads()
|
||||
assert os.path.isfile(archive_path)
|
||||
# test outdated downloads
|
||||
lm.set_download_utime(archive_path, time.time() - lm.DOWNLOAD_CACHE_EXPIRE - 1)
|
||||
lm.cleanup_expired_downloads()
|
||||
assert not os.path.isfile(archive_path)
|
||||
# check that key is deleted from DB
|
||||
with open(lm.get_download_usagedb_path()) as fp:
|
||||
assert os.path.basename(archive_path) not in fp.read()
|
||||
|
||||
|
||||
def test_find_pkg_root(isolated_pio_core, tmpdir_factory):
|
||||
# has manifest
|
||||
pkg_dir = tmpdir_factory.mktemp("package-has-manifest")
|
||||
root_dir = pkg_dir.join("nested").mkdir().join("folder").mkdir()
|
||||
root_dir.join("platform.json").write("")
|
||||
pm = PlatformPackageManager()
|
||||
found_dir = pm.find_pkg_root(str(pkg_dir), spec=None)
|
||||
assert os.path.realpath(str(root_dir)) == os.path.realpath(found_dir)
|
||||
|
||||
# does not have manifest
|
||||
pkg_dir = tmpdir_factory.mktemp("package-does-not-have-manifest")
|
||||
pkg_dir.join("nested").mkdir().join("folder").mkdir().join("readme.txt").write("")
|
||||
pm = PlatformPackageManager()
|
||||
with pytest.raises(MissingPackageManifestError):
|
||||
pm.find_pkg_root(str(pkg_dir), spec=None)
|
||||
|
||||
# library package without manifest, should find source root
|
||||
pkg_dir = tmpdir_factory.mktemp("library-package-without-manifest")
|
||||
root_dir = pkg_dir.join("nested").mkdir().join("folder").mkdir()
|
||||
root_dir.join("src").mkdir().join("main.cpp").write("")
|
||||
root_dir.join("include").mkdir().join("main.h").write("")
|
||||
assert os.path.realpath(str(root_dir)) == os.path.realpath(
|
||||
LibraryPackageManager.find_library_root(str(pkg_dir))
|
||||
)
|
||||
|
||||
# library manager should create "library.json"
|
||||
lm = LibraryPackageManager()
|
||||
spec = PackageSpec("custom-name@1.0.0")
|
||||
pkg_root = lm.find_pkg_root(str(pkg_dir), spec)
|
||||
manifest_path = os.path.join(pkg_root, "library.json")
|
||||
assert os.path.realpath(str(root_dir)) == os.path.realpath(pkg_root)
|
||||
assert os.path.isfile(manifest_path)
|
||||
manifest = lm.load_manifest(pkg_root)
|
||||
assert manifest["name"] == "custom-name"
|
||||
assert "0.0.0" in str(manifest["version"])
|
||||
|
||||
|
||||
def test_build_legacy_spec(isolated_pio_core, tmpdir_factory):
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
pm = PlatformPackageManager(str(storage_dir))
|
||||
# test src manifest
|
||||
pkg1_dir = storage_dir.join("pkg-1").mkdir()
|
||||
pkg1_dir.join(".pio").mkdir().join(".piopkgmanager.json").write(
|
||||
"""
|
||||
{
|
||||
"name": "StreamSpy-0.0.1.tar",
|
||||
"url": "https://dl.platformio.org/e8936b7/StreamSpy-0.0.1.tar.gz",
|
||||
"requirements": null
|
||||
}
|
||||
"""
|
||||
)
|
||||
assert pm.build_legacy_spec(str(pkg1_dir)) == PackageSpec(
|
||||
name="StreamSpy-0.0.1.tar",
|
||||
url="https://dl.platformio.org/e8936b7/StreamSpy-0.0.1.tar.gz",
|
||||
)
|
||||
|
||||
# without src manifest
|
||||
pkg2_dir = storage_dir.join("pkg-2").mkdir()
|
||||
pkg2_dir.join("main.cpp").write("")
|
||||
with pytest.raises(MissingPackageManifestError):
|
||||
pm.build_legacy_spec(str(pkg2_dir))
|
||||
|
||||
# with package manifest
|
||||
pkg3_dir = storage_dir.join("pkg-3").mkdir()
|
||||
pkg3_dir.join("platform.json").write('{"name": "pkg3", "version": "1.2.0"}')
|
||||
assert pm.build_legacy_spec(str(pkg3_dir)) == PackageSpec(name="pkg3")
|
||||
|
||||
|
||||
def test_build_metadata(isolated_pio_core, tmpdir_factory):
|
||||
pm = PlatformPackageManager()
|
||||
vcs_revision = "a2ebfd7c0f"
|
||||
pkg_dir = tmpdir_factory.mktemp("package")
|
||||
|
||||
# test package without manifest
|
||||
with pytest.raises(MissingPackageManifestError):
|
||||
pm.load_manifest(str(pkg_dir))
|
||||
with pytest.raises(MissingPackageManifestError):
|
||||
pm.build_metadata(str(pkg_dir), PackageSpec("MyLib"))
|
||||
|
||||
# with manifest
|
||||
pkg_dir.join("platform.json").write(
|
||||
'{"name": "Dev-Platform", "version": "1.2.3-alpha.1"}'
|
||||
)
|
||||
metadata = pm.build_metadata(str(pkg_dir), PackageSpec("owner/platform-name"))
|
||||
assert metadata.name == "Dev-Platform"
|
||||
assert str(metadata.version) == "1.2.3-alpha.1"
|
||||
|
||||
# with vcs
|
||||
metadata = pm.build_metadata(
|
||||
str(pkg_dir), PackageSpec("owner/platform-name"), vcs_revision
|
||||
)
|
||||
assert str(metadata.version) == ("1.2.3-alpha.1+sha." + vcs_revision)
|
||||
assert metadata.version.build[1] == vcs_revision
|
||||
|
||||
|
||||
def test_install_from_url(isolated_pio_core, tmpdir_factory):
|
||||
tmp_dir = tmpdir_factory.mktemp("tmp")
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
|
||||
# install from local directory
|
||||
src_dir = tmp_dir.join("local-lib-dir").mkdir()
|
||||
src_dir.join("main.cpp").write("")
|
||||
spec = PackageSpec("file://%s" % src_dir)
|
||||
pkg = lm.install(spec, silent=True)
|
||||
assert os.path.isfile(os.path.join(pkg.path, "main.cpp"))
|
||||
manifest = lm.load_manifest(pkg)
|
||||
assert manifest["name"] == "local-lib-dir"
|
||||
assert manifest["version"].startswith("0.0.0+")
|
||||
assert spec == pkg.metadata.spec
|
||||
|
||||
# install from local archive
|
||||
src_dir = tmp_dir.join("archive-src").mkdir()
|
||||
root_dir = src_dir.mkdir("root")
|
||||
root_dir.mkdir("src").join("main.cpp").write("#include <stdio.h>")
|
||||
root_dir.join("library.json").write(
|
||||
'{"name": "manifest-lib-name", "version": "2.0.0"}'
|
||||
)
|
||||
tarball_path = PackagePacker(str(src_dir)).pack(str(tmp_dir))
|
||||
spec = PackageSpec("file://%s" % tarball_path)
|
||||
pkg = lm.install(spec, silent=True)
|
||||
assert os.path.isfile(os.path.join(pkg.path, "src", "main.cpp"))
|
||||
assert pkg == lm.get_package(spec)
|
||||
assert spec == pkg.metadata.spec
|
||||
|
||||
# install from registry
|
||||
src_dir = tmp_dir.join("registry-1").mkdir()
|
||||
src_dir.join("library.properties").write(
|
||||
"""
|
||||
name = wifilib
|
||||
version = 5.2.7
|
||||
"""
|
||||
)
|
||||
spec = PackageSpec("company/wifilib @ ^5")
|
||||
pkg = lm.install_from_url("file://%s" % src_dir, spec)
|
||||
assert str(pkg.metadata.version) == "5.2.7"
|
||||
|
||||
|
||||
def test_install_from_registry(isolated_pio_core, tmpdir_factory):
|
||||
# Libraries
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
|
||||
# library with dependencies
|
||||
lm.install("AsyncMqttClient-esphome @ 0.8.4", silent=True)
|
||||
assert len(lm.get_installed()) == 3
|
||||
pkg = lm.get_package("AsyncTCP-esphome")
|
||||
assert pkg.metadata.spec.owner == "ottowinter"
|
||||
assert not lm.get_package("non-existing-package")
|
||||
# mbed library
|
||||
assert lm.install("wolfSSL", silent=True)
|
||||
assert len(lm.get_installed()) == 4
|
||||
# case sensitive author name
|
||||
assert lm.install("DallasTemperature", silent=True)
|
||||
assert lm.get_package("OneWire").metadata.version.major >= 2
|
||||
assert len(lm.get_installed()) == 6
|
||||
|
||||
# test conflicted names
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("conflicted-storage")))
|
||||
lm.install("4@2.6.1", silent=True)
|
||||
lm.install("5357@2.6.1", silent=True)
|
||||
assert len(lm.get_installed()) == 2
|
||||
|
||||
# Tools
|
||||
tm = ToolPackageManager(str(tmpdir_factory.mktemp("tool-storage")))
|
||||
pkg = tm.install("platformio/tool-stlink @ ~1.10400.0", silent=True)
|
||||
manifest = tm.load_manifest(pkg)
|
||||
assert tm.is_system_compatible(manifest.get("system"))
|
||||
assert util.get_systype() in manifest.get("system", [])
|
||||
|
||||
# Test unknown
|
||||
with pytest.raises(UnknownPackageError):
|
||||
tm.install("unknown-package-tool @ 9.1.1", silent=True)
|
||||
with pytest.raises(UnknownPackageError):
|
||||
tm.install("owner/unknown-package-tool", silent=True)
|
||||
|
||||
|
||||
def test_install_force(isolated_pio_core, tmpdir_factory):
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
|
||||
# install #64 ArduinoJson
|
||||
pkg = lm.install("64 @ ^5", silent=True)
|
||||
assert pkg.metadata.version.major == 5
|
||||
# try install the latest without specification
|
||||
pkg = lm.install("64", silent=True)
|
||||
assert pkg.metadata.version.major == 5
|
||||
assert len(lm.get_installed()) == 1
|
||||
# re-install the latest
|
||||
pkg = lm.install(64, silent=True, force=True)
|
||||
assert len(lm.get_installed()) == 1
|
||||
assert pkg.metadata.version.major > 5
|
||||
|
||||
|
||||
def test_get_installed(isolated_pio_core, tmpdir_factory):
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
|
||||
# VCS package
|
||||
(
|
||||
storage_dir.join("pkg-vcs")
|
||||
.mkdir()
|
||||
.join(".git")
|
||||
.mkdir()
|
||||
.join(".piopm")
|
||||
.write(
|
||||
"""
|
||||
{
|
||||
"name": "pkg-via-vcs",
|
||||
"spec": {
|
||||
"id": null,
|
||||
"name": "pkg-via-vcs",
|
||||
"owner": null,
|
||||
"requirements": null,
|
||||
"url": "git+https://github.com/username/repo.git"
|
||||
},
|
||||
"type": "library",
|
||||
"version": "0.0.0+sha.1ea4d5e"
|
||||
}
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# package without metadata file
|
||||
(
|
||||
storage_dir.join("foo@3.4.5")
|
||||
.mkdir()
|
||||
.join("library.json")
|
||||
.write('{"name": "foo", "version": "3.4.5"}')
|
||||
)
|
||||
|
||||
# package with metadata file
|
||||
foo_dir = storage_dir.join("foo").mkdir()
|
||||
foo_dir.join("library.json").write('{"name": "foo", "version": "3.6.0"}')
|
||||
foo_dir.join(".piopm").write(
|
||||
"""
|
||||
{
|
||||
"name": "foo",
|
||||
"spec": {
|
||||
"name": "foo",
|
||||
"owner": null,
|
||||
"requirements": "^3"
|
||||
},
|
||||
"type": "library",
|
||||
"version": "3.6.0"
|
||||
}
|
||||
"""
|
||||
)
|
||||
|
||||
# invalid package
|
||||
storage_dir.join("invalid-package").mkdir().join("package.json").write(
|
||||
'{"name": "tool-scons", "version": "4.0.0"}'
|
||||
)
|
||||
|
||||
installed = lm.get_installed()
|
||||
assert len(installed) == 3
|
||||
assert set(["pkg-via-vcs", "foo"]) == set(p.metadata.name for p in installed)
|
||||
assert str(lm.get_package("foo").metadata.version) == "3.6.0"
|
||||
|
||||
|
||||
def test_uninstall(isolated_pio_core, tmpdir_factory):
|
||||
tmp_dir = tmpdir_factory.mktemp("tmp")
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
|
||||
# foo @ 1.0.0
|
||||
pkg_dir = tmp_dir.join("foo").mkdir()
|
||||
pkg_dir.join("library.json").write('{"name": "foo", "version": "1.0.0"}')
|
||||
foo_1_0_0_pkg = lm.install_from_url("file://%s" % pkg_dir, "foo")
|
||||
# foo @ 1.3.0
|
||||
pkg_dir = tmp_dir.join("foo-1.3.0").mkdir()
|
||||
pkg_dir.join("library.json").write('{"name": "foo", "version": "1.3.0"}')
|
||||
lm.install_from_url("file://%s" % pkg_dir, "foo")
|
||||
# bar
|
||||
pkg_dir = tmp_dir.join("bar").mkdir()
|
||||
pkg_dir.join("library.json").write('{"name": "bar", "version": "1.0.0"}')
|
||||
bar_pkg = lm.install("file://%s" % pkg_dir, silent=True)
|
||||
|
||||
assert len(lm.get_installed()) == 3
|
||||
assert os.path.isdir(os.path.join(str(storage_dir), "foo"))
|
||||
assert os.path.isdir(os.path.join(str(storage_dir), "foo@1.0.0"))
|
||||
|
||||
# check detaching
|
||||
assert lm.uninstall("FOO", silent=True)
|
||||
assert len(lm.get_installed()) == 2
|
||||
assert os.path.isdir(os.path.join(str(storage_dir), "foo"))
|
||||
assert not os.path.isdir(os.path.join(str(storage_dir), "foo@1.0.0"))
|
||||
|
||||
# uninstall the rest
|
||||
assert lm.uninstall(foo_1_0_0_pkg.path, silent=True)
|
||||
assert lm.uninstall(bar_pkg, silent=True)
|
||||
|
||||
assert len(lm.get_installed()) == 0
|
||||
|
||||
# test uninstall dependencies
|
||||
assert lm.install("AsyncMqttClient-esphome @ 0.8.4", silent=True)
|
||||
assert len(lm.get_installed()) == 3
|
||||
assert lm.uninstall("AsyncMqttClient-esphome", silent=True, skip_dependencies=True)
|
||||
assert len(lm.get_installed()) == 2
|
||||
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
assert lm.install("AsyncMqttClient-esphome @ 0.8.4", silent=True)
|
||||
assert lm.uninstall("AsyncMqttClient-esphome", silent=True)
|
||||
assert len(lm.get_installed()) == 0
|
||||
|
||||
|
||||
def test_registry(isolated_pio_core):
|
||||
lm = LibraryPackageManager()
|
||||
|
||||
# reveal ID
|
||||
assert lm.reveal_registry_package_id(PackageSpec(id=13)) == 13
|
||||
assert lm.reveal_registry_package_id(PackageSpec(name="OneWire"), silent=True) == 1
|
||||
with pytest.raises(UnknownPackageError):
|
||||
lm.reveal_registry_package_id(PackageSpec(name="/non-existing-package/"))
|
||||
|
||||
# fetch package data
|
||||
assert lm.fetch_registry_package(PackageSpec(id=1))["name"] == "OneWire"
|
||||
assert lm.fetch_registry_package(PackageSpec(name="ArduinoJson"))["id"] == 64
|
||||
assert (
|
||||
lm.fetch_registry_package(
|
||||
PackageSpec(id=13, owner="adafruit", name="Renamed library")
|
||||
)["name"]
|
||||
== "Adafruit GFX Library"
|
||||
)
|
||||
with pytest.raises(UnknownPackageError):
|
||||
lm.fetch_registry_package(
|
||||
PackageSpec(owner="unknown<>owner", name="/non-existing-package/")
|
||||
)
|
||||
with pytest.raises(UnknownPackageError):
|
||||
lm.fetch_registry_package(PackageSpec(name="/non-existing-package/"))
|
||||
|
||||
|
||||
def test_update_with_metadata(isolated_pio_core, tmpdir_factory):
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
pkg = lm.install("ArduinoJson @ 5.10.1", silent=True)
|
||||
|
||||
# tesy latest
|
||||
outdated = lm.outdated(pkg)
|
||||
assert str(outdated.current) == "5.10.1"
|
||||
assert outdated.wanted is None
|
||||
assert outdated.latest > outdated.current
|
||||
assert outdated.latest > semantic_version.Version("5.99.99")
|
||||
|
||||
# test wanted
|
||||
outdated = lm.outdated(pkg, PackageSpec("ArduinoJson@~5"))
|
||||
assert str(outdated.current) == "5.10.1"
|
||||
assert str(outdated.wanted) == "5.13.4"
|
||||
assert outdated.latest > semantic_version.Version("6.16.0")
|
||||
|
||||
# update to the wanted 5.x
|
||||
new_pkg = lm.update("ArduinoJson@^5", PackageSpec("ArduinoJson@^5"), silent=True)
|
||||
assert str(new_pkg.metadata.version) == "5.13.4"
|
||||
# check that old version is removed
|
||||
assert len(lm.get_installed()) == 1
|
||||
|
||||
# update to the latest
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
pkg = lm.update("ArduinoJson", silent=True)
|
||||
assert pkg.metadata.version == outdated.latest
|
||||
|
||||
|
||||
def test_update_without_metadata(isolated_pio_core, tmpdir_factory):
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
storage_dir.join("legacy-package").mkdir().join("library.json").write(
|
||||
'{"name": "AsyncMqttClient-esphome", "version": "0.8.2"}'
|
||||
)
|
||||
storage_dir.join("legacy-dep").mkdir().join("library.json").write(
|
||||
'{"name": "AsyncTCP-esphome", "version": "1.1.1"}'
|
||||
)
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
pkg = lm.get_package("AsyncMqttClient-esphome")
|
||||
outdated = lm.outdated(pkg)
|
||||
assert len(lm.get_installed()) == 2
|
||||
assert str(pkg.metadata.version) == "0.8.2"
|
||||
assert outdated.latest > semantic_version.Version("0.8.2")
|
||||
|
||||
# update
|
||||
lm = LibraryPackageManager(str(storage_dir))
|
||||
new_pkg = lm.update(pkg, silent=True)
|
||||
assert len(lm.get_installed()) == 3
|
||||
assert new_pkg.metadata.spec.owner == "ottowinter"
|
296
tests/package/test_meta.py
Normal file
296
tests/package/test_meta.py
Normal file
@ -0,0 +1,296 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
import jsondiff
|
||||
import semantic_version
|
||||
|
||||
from platformio.package.meta import (
|
||||
PackageMetaData,
|
||||
PackageOutdatedResult,
|
||||
PackageSpec,
|
||||
PackageType,
|
||||
)
|
||||
|
||||
|
||||
def test_outdated_result():
|
||||
result = PackageOutdatedResult(current="1.2.3", latest="2.0.0")
|
||||
assert result.is_outdated()
|
||||
assert result.is_outdated(allow_incompatible=True)
|
||||
result = PackageOutdatedResult(current="1.2.3", latest="2.0.0", wanted="1.5.4")
|
||||
assert result.is_outdated()
|
||||
assert result.is_outdated(allow_incompatible=True)
|
||||
result = PackageOutdatedResult(current="1.2.3", latest="2.0.0", wanted="1.2.3")
|
||||
assert not result.is_outdated()
|
||||
assert result.is_outdated(allow_incompatible=True)
|
||||
result = PackageOutdatedResult(current="1.2.3", latest="2.0.0", detached=True)
|
||||
assert not result.is_outdated()
|
||||
assert not result.is_outdated(allow_incompatible=True)
|
||||
|
||||
|
||||
def test_spec_owner():
|
||||
assert PackageSpec("alice/foo library") == PackageSpec(
|
||||
owner="alice", name="foo library"
|
||||
)
|
||||
spec = PackageSpec(" Bob / BarUpper ")
|
||||
assert spec != PackageSpec(owner="BOB", name="BARUPPER")
|
||||
assert spec.owner == "Bob"
|
||||
assert spec.name == "BarUpper"
|
||||
|
||||
|
||||
def test_spec_id():
|
||||
assert PackageSpec(13) == PackageSpec(id=13)
|
||||
assert PackageSpec("20") == PackageSpec(id=20)
|
||||
spec = PackageSpec("id=199")
|
||||
assert spec == PackageSpec(id=199)
|
||||
assert isinstance(spec.id, int)
|
||||
|
||||
|
||||
def test_spec_name():
|
||||
assert PackageSpec("foo") == PackageSpec(name="foo")
|
||||
assert PackageSpec(" bar-24 ") == PackageSpec(name="bar-24")
|
||||
|
||||
|
||||
def test_spec_requirements():
|
||||
assert PackageSpec("foo@1.2.3") == PackageSpec(name="foo", requirements="1.2.3")
|
||||
assert PackageSpec(
|
||||
name="foo", requirements=semantic_version.Version("1.2.3")
|
||||
) == PackageSpec(name="foo", requirements="1.2.3")
|
||||
assert PackageSpec("bar @ ^1.2.3") == PackageSpec(name="bar", requirements="^1.2.3")
|
||||
assert PackageSpec("13 @ ~2.0") == PackageSpec(id=13, requirements="~2.0")
|
||||
assert PackageSpec(
|
||||
name="hello", requirements=semantic_version.SimpleSpec("~1.2.3")
|
||||
) == PackageSpec(name="hello", requirements="~1.2.3")
|
||||
spec = PackageSpec("id=20 @ !=1.2.3,<2.0")
|
||||
assert not spec.external
|
||||
assert isinstance(spec.requirements, semantic_version.SimpleSpec)
|
||||
assert semantic_version.Version("1.3.0-beta.1") in spec.requirements
|
||||
assert spec == PackageSpec(id=20, requirements="!=1.2.3,<2.0")
|
||||
|
||||
|
||||
def test_spec_local_urls():
|
||||
assert PackageSpec("file:///tmp/foo.tar.gz") == PackageSpec(
|
||||
url="file:///tmp/foo.tar.gz", name="foo"
|
||||
)
|
||||
assert PackageSpec("customName=file:///tmp/bar.zip") == PackageSpec(
|
||||
url="file:///tmp/bar.zip", name="customName"
|
||||
)
|
||||
assert PackageSpec("file:///tmp/some-lib/") == PackageSpec(
|
||||
url="file:///tmp/some-lib/", name="some-lib"
|
||||
)
|
||||
assert PackageSpec("file:///tmp/foo.tar.gz@~2.3.0-beta.1") == PackageSpec(
|
||||
url="file:///tmp/foo.tar.gz", name="foo", requirements="~2.3.0-beta.1"
|
||||
)
|
||||
|
||||
|
||||
def test_spec_external_urls():
|
||||
assert PackageSpec(
|
||||
"https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
) == PackageSpec(
|
||||
url="https://github.com/platformio/platformio-core/archive/develop.zip",
|
||||
name="platformio-core",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"https://github.com/platformio/platformio-core/archive/develop.zip?param=value"
|
||||
" @ !=2"
|
||||
) == PackageSpec(
|
||||
url="https://github.com/platformio/platformio-core/archive/"
|
||||
"develop.zip?param=value",
|
||||
name="platformio-core",
|
||||
requirements="!=2",
|
||||
)
|
||||
spec = PackageSpec(
|
||||
"Custom-Name="
|
||||
"https://github.com/platformio/platformio-core/archive/develop.tar.gz@4.4.0"
|
||||
)
|
||||
assert spec.external
|
||||
assert spec.has_custom_name()
|
||||
assert spec.name == "Custom-Name"
|
||||
assert spec == PackageSpec(
|
||||
url="https://github.com/platformio/platformio-core/archive/develop.tar.gz",
|
||||
name="Custom-Name",
|
||||
requirements="4.4.0",
|
||||
)
|
||||
|
||||
|
||||
def test_spec_vcs_urls():
|
||||
assert PackageSpec("https://github.com/platformio/platformio-core") == PackageSpec(
|
||||
name="platformio-core", url="git+https://github.com/platformio/platformio-core"
|
||||
)
|
||||
assert PackageSpec("https://gitlab.com/username/reponame") == PackageSpec(
|
||||
name="reponame", url="git+https://gitlab.com/username/reponame"
|
||||
)
|
||||
assert PackageSpec(
|
||||
"wolfSSL=https://os.mbed.com/users/wolfSSL/code/wolfSSL/"
|
||||
) == PackageSpec(
|
||||
name="wolfSSL", url="hg+https://os.mbed.com/users/wolfSSL/code/wolfSSL/"
|
||||
)
|
||||
assert PackageSpec(
|
||||
"https://github.com/platformio/platformio-core.git#master"
|
||||
) == PackageSpec(
|
||||
name="platformio-core",
|
||||
url="git+https://github.com/platformio/platformio-core.git#master",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"core=git+ssh://github.com/platformio/platformio-core.git#v4.4.0@4.4.0"
|
||||
) == PackageSpec(
|
||||
name="core",
|
||||
url="git+ssh://github.com/platformio/platformio-core.git#v4.4.0",
|
||||
requirements="4.4.0",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"username@github.com:platformio/platformio-core.git"
|
||||
) == PackageSpec(
|
||||
name="platformio-core",
|
||||
url="git+username@github.com:platformio/platformio-core.git",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"pkg=git+git@github.com:platformio/platformio-core.git @ ^1.2.3,!=5"
|
||||
) == PackageSpec(
|
||||
name="pkg",
|
||||
url="git+git@github.com:platformio/platformio-core.git",
|
||||
requirements="^1.2.3,!=5",
|
||||
)
|
||||
|
||||
|
||||
def test_spec_as_dict():
|
||||
assert not jsondiff.diff(
|
||||
PackageSpec("bob/foo@1.2.3").as_dict(),
|
||||
{
|
||||
"owner": "bob",
|
||||
"id": None,
|
||||
"name": "foo",
|
||||
"requirements": "1.2.3",
|
||||
"url": None,
|
||||
},
|
||||
)
|
||||
assert not jsondiff.diff(
|
||||
PackageSpec(
|
||||
"https://github.com/platformio/platformio-core/archive/develop.zip?param=value"
|
||||
" @ !=2"
|
||||
).as_dict(),
|
||||
{
|
||||
"owner": None,
|
||||
"id": None,
|
||||
"name": "platformio-core",
|
||||
"requirements": "!=2",
|
||||
"url": "https://github.com/platformio/platformio-core/archive/develop.zip?param=value",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def test_spec_as_dependency():
|
||||
assert PackageSpec("owner/pkgname").as_dependency() == "owner/pkgname"
|
||||
assert PackageSpec(owner="owner", name="pkgname").as_dependency() == "owner/pkgname"
|
||||
assert PackageSpec("bob/foo @ ^1.2.3").as_dependency() == "bob/foo@^1.2.3"
|
||||
assert (
|
||||
PackageSpec(
|
||||
"https://github.com/o/r/a/develop.zip?param=value @ !=2"
|
||||
).as_dependency()
|
||||
== "https://github.com/o/r/a/develop.zip?param=value @ !=2"
|
||||
)
|
||||
assert (
|
||||
PackageSpec(
|
||||
"wolfSSL=https://os.mbed.com/users/wolfSSL/code/wolfSSL/"
|
||||
).as_dependency()
|
||||
== "wolfSSL=https://os.mbed.com/users/wolfSSL/code/wolfSSL/"
|
||||
)
|
||||
|
||||
|
||||
def test_metadata_as_dict():
|
||||
metadata = PackageMetaData(PackageType.LIBRARY, "foo", "1.2.3")
|
||||
# test setter
|
||||
metadata.version = "0.1.2+12345"
|
||||
assert metadata.version == semantic_version.Version("0.1.2+12345")
|
||||
assert not jsondiff.diff(
|
||||
metadata.as_dict(),
|
||||
{
|
||||
"type": PackageType.LIBRARY,
|
||||
"name": "foo",
|
||||
"version": "0.1.2+12345",
|
||||
"spec": None,
|
||||
},
|
||||
)
|
||||
|
||||
assert not jsondiff.diff(
|
||||
PackageMetaData(
|
||||
PackageType.TOOL,
|
||||
"toolchain",
|
||||
"2.0.5",
|
||||
PackageSpec("platformio/toolchain@~2.0.0"),
|
||||
).as_dict(),
|
||||
{
|
||||
"type": PackageType.TOOL,
|
||||
"name": "toolchain",
|
||||
"version": "2.0.5",
|
||||
"spec": {
|
||||
"owner": "platformio",
|
||||
"id": None,
|
||||
"name": "toolchain",
|
||||
"requirements": "~2.0.0",
|
||||
"url": None,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def test_metadata_dump(tmpdir_factory):
|
||||
pkg_dir = tmpdir_factory.mktemp("package")
|
||||
metadata = PackageMetaData(
|
||||
PackageType.TOOL,
|
||||
"toolchain",
|
||||
"2.0.5",
|
||||
PackageSpec("platformio/toolchain@~2.0.0"),
|
||||
)
|
||||
|
||||
dst = pkg_dir.join(".piopm")
|
||||
metadata.dump(str(dst))
|
||||
assert os.path.isfile(str(dst))
|
||||
contents = dst.read()
|
||||
assert all(s in contents for s in ("null", '"~2.0.0"'))
|
||||
|
||||
|
||||
def test_metadata_load(tmpdir_factory):
|
||||
contents = """
|
||||
{
|
||||
"name": "foo",
|
||||
"spec": {
|
||||
"name": "foo",
|
||||
"owner": "username",
|
||||
"requirements": "!=3.4.5"
|
||||
},
|
||||
"type": "platform",
|
||||
"version": "0.1.3"
|
||||
}
|
||||
"""
|
||||
pkg_dir = tmpdir_factory.mktemp("package")
|
||||
dst = pkg_dir.join(".piopm")
|
||||
dst.write(contents)
|
||||
metadata = PackageMetaData.load(str(dst))
|
||||
assert metadata.version == semantic_version.Version("0.1.3")
|
||||
assert metadata == PackageMetaData(
|
||||
PackageType.PLATFORM,
|
||||
"foo",
|
||||
"0.1.3",
|
||||
spec=PackageSpec(owner="username", name="foo", requirements="!=3.4.5"),
|
||||
)
|
||||
|
||||
piopm_path = pkg_dir.join(".piopm")
|
||||
metadata = PackageMetaData(
|
||||
PackageType.LIBRARY, "mylib", version="1.2.3", spec=PackageSpec("mylib")
|
||||
)
|
||||
metadata.dump(str(piopm_path))
|
||||
restored_metadata = PackageMetaData.load(str(piopm_path))
|
||||
assert metadata == restored_metadata
|
@ -1,119 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.package.spec import PackageSpec
|
||||
|
||||
|
||||
def test_ownername():
|
||||
assert PackageSpec("alice/foo library") == PackageSpec(
|
||||
ownername="alice", name="foo library"
|
||||
)
|
||||
assert PackageSpec(" bob / bar ") == PackageSpec(ownername="bob", name="bar")
|
||||
|
||||
|
||||
def test_id():
|
||||
assert PackageSpec(13) == PackageSpec(id=13)
|
||||
assert PackageSpec("20") == PackageSpec(id=20)
|
||||
assert PackageSpec("id=199") == PackageSpec(id=199)
|
||||
|
||||
|
||||
def test_name():
|
||||
assert PackageSpec("foo") == PackageSpec(name="foo")
|
||||
assert PackageSpec(" bar-24 ") == PackageSpec(name="bar-24")
|
||||
|
||||
|
||||
def test_requirements():
|
||||
assert PackageSpec("foo@1.2.3") == PackageSpec(name="foo", requirements="1.2.3")
|
||||
assert PackageSpec("bar @ ^1.2.3") == PackageSpec(name="bar", requirements="^1.2.3")
|
||||
assert PackageSpec("13 @ ~2.0") == PackageSpec(id=13, requirements="~2.0")
|
||||
assert PackageSpec("id=20 @ !=1.2.3,<2.0") == PackageSpec(
|
||||
id=20, requirements="!=1.2.3,<2.0"
|
||||
)
|
||||
|
||||
|
||||
def test_local_urls():
|
||||
assert PackageSpec("file:///tmp/foo.tar.gz") == PackageSpec(
|
||||
url="file:///tmp/foo.tar.gz", name="foo"
|
||||
)
|
||||
assert PackageSpec("customName=file:///tmp/bar.zip") == PackageSpec(
|
||||
url="file:///tmp/bar.zip", name="customName"
|
||||
)
|
||||
assert PackageSpec("file:///tmp/some-lib/") == PackageSpec(
|
||||
url="file:///tmp/some-lib/", name="some-lib"
|
||||
)
|
||||
assert PackageSpec("file:///tmp/foo.tar.gz@~2.3.0-beta.1") == PackageSpec(
|
||||
url="file:///tmp/foo.tar.gz", name="foo", requirements="~2.3.0-beta.1"
|
||||
)
|
||||
|
||||
|
||||
def test_external_urls():
|
||||
assert PackageSpec(
|
||||
"https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
) == PackageSpec(
|
||||
url="https://github.com/platformio/platformio-core/archive/develop.zip",
|
||||
name="develop",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"https://github.com/platformio/platformio-core/archive/develop.zip?param=value"
|
||||
" @ !=2"
|
||||
) == PackageSpec(
|
||||
url="https://github.com/platformio/platformio-core/archive/"
|
||||
"develop.zip?param=value",
|
||||
name="develop",
|
||||
requirements="!=2",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"platformio-core="
|
||||
"https://github.com/platformio/platformio-core/archive/develop.tar.gz@4.4.0"
|
||||
) == PackageSpec(
|
||||
url="https://github.com/platformio/platformio-core/archive/develop.tar.gz",
|
||||
name="platformio-core",
|
||||
requirements="4.4.0",
|
||||
)
|
||||
|
||||
|
||||
def test_vcs_urls():
|
||||
assert PackageSpec(
|
||||
"https://github.com/platformio/platformio-core.git"
|
||||
) == PackageSpec(
|
||||
name="platformio-core", url="https://github.com/platformio/platformio-core.git",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"wolfSSL=https://os.mbed.com/users/wolfSSL/code/wolfSSL/"
|
||||
) == PackageSpec(
|
||||
name="wolfSSL", url="https://os.mbed.com/users/wolfSSL/code/wolfSSL/",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"git+https://github.com/platformio/platformio-core.git#master"
|
||||
) == PackageSpec(
|
||||
name="platformio-core",
|
||||
url="git+https://github.com/platformio/platformio-core.git#master",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"core=git+ssh://github.com/platformio/platformio-core.git#v4.4.0@4.4.0"
|
||||
) == PackageSpec(
|
||||
name="core",
|
||||
url="git+ssh://github.com/platformio/platformio-core.git#v4.4.0",
|
||||
requirements="4.4.0",
|
||||
)
|
||||
assert PackageSpec("git@github.com:platformio/platformio-core.git") == PackageSpec(
|
||||
name="platformio-core", url="git@github.com:platformio/platformio-core.git",
|
||||
)
|
||||
assert PackageSpec(
|
||||
"pkg=git+git@github.com:platformio/platformio-core.git @ ^1.2.3,!=5"
|
||||
) == PackageSpec(
|
||||
name="pkg",
|
||||
url="git+git@github.com:platformio/platformio-core.git",
|
||||
requirements="^1.2.3,!=5",
|
||||
)
|
@ -88,7 +88,9 @@ def test_check_and_update_libraries(clirunner, isolated_pio_core, validate_clire
|
||||
validate_cliresult(result)
|
||||
assert "There are the new updates for libraries (ArduinoJson)" in result.output
|
||||
assert "Please wait while updating libraries" in result.output
|
||||
assert re.search(r"Updating ArduinoJson\s+@ 6.12.0\s+\[[\d\.]+\]", result.output)
|
||||
assert re.search(
|
||||
r"Updating bblanchon/ArduinoJson\s+6\.12\.0\s+\[[\d\.]+\]", result.output
|
||||
)
|
||||
|
||||
# check updated version
|
||||
result = clirunner.invoke(cli_pio, ["lib", "-g", "list", "--json-output"])
|
||||
|
Reference in New Issue
Block a user