Significantly improve Package Manager // Resolve #913

* Handle dependencies when installing non-registry package/library (VCS, archive, local folder)
This commit is contained in:
Ivan Kravets
2017-03-08 17:24:58 +02:00
parent 41cea76603
commit 58942c3f38
13 changed files with 657 additions and 433 deletions

View File

@ -38,6 +38,7 @@ PlatformIO 3.0
* Escape project path when Glob matching is used
* Do not overwrite project configuration variables when system environment
variables are set
* Handle dependencies when installing non-registry package/library (VCS, archive, local folder)
* Fixed package installing with VCS branch for Python 2.7.3
(`issue #885 <https://github.com/platformio/platformio-core/issues/885>`_)

View File

@ -15,7 +15,7 @@
# pylint: disable=too-many-branches, too-many-locals
import json
from os.path import join
from os.path import isdir, join
from time import sleep
from urllib import quote
@ -116,22 +116,23 @@ def lib_uninstall(lm, libraries):
@click.pass_obj
def lib_update(lm, libraries, only_check, json_output):
if not libraries:
libraries = []
for manifest in lm.get_installed():
if "@vcs-" in manifest['__pkg_dir']:
libraries.append("%s=%s" % (manifest['name'], manifest['url']))
else:
libraries.append(str(manifest.get("id", manifest['name'])))
libraries = [manifest['__pkg_dir'] for manifest in lm.get_installed()]
if only_check and json_output:
result = []
for library in libraries:
name, requirements, url = lm.parse_pkg_name(library)
latest = lm.outdated(name, requirements, url)
pkg_dir = library if isdir(library) else None
requirements = None
url = None
if not pkg_dir:
name, requirements, url = lm.parse_pkg_input(library)
pkg_dir = lm.get_package_dir(name, requirements, url)
if not pkg_dir:
continue
latest = lm.outdated(pkg_dir, requirements)
if not latest:
continue
manifest = lm.load_manifest(
lm.get_package_dir(name, requirements, url))
manifest = lm.load_manifest(pkg_dir)
manifest['versionLatest'] = latest
result.append(manifest)
return click.echo(json.dumps(result))
@ -167,6 +168,9 @@ def print_lib_item(item):
click.echo("Authors: %s" % ", ".join(
item.get("authornames",
[a.get("name", "") for a in item.get("authors", [])])))
if "__src_url" in item:
click.secho("Source: %s" % item['__src_url'])
click.echo()
@ -270,8 +274,7 @@ def get_builtin_libs(storage_names=None):
storage_names = storage_names or []
pm = PlatformManager()
for manifest in pm.get_installed():
p = PlatformFactory.newPlatform(
pm.get_manifest_path(manifest['__pkg_dir']))
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
for storage in p.get_lib_storages():
if storage_names and storage['name'] not in storage_names:
continue
@ -308,7 +311,7 @@ def lib_builtin(storage, json_output):
@click.option("--json-output", is_flag=True)
def lib_show(library, json_output):
lm = LibraryManager()
name, requirements, _ = lm.parse_pkg_name(library)
name, requirements, _ = lm.parse_pkg_input(library)
lib_id = lm.get_pkg_id_by_name(
name, requirements, silent=json_output, interactive=not json_output)
lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")

View File

@ -13,7 +13,7 @@
# limitations under the License.
import json
from os.path import dirname, isfile, join
from os.path import dirname, isdir
import click
@ -94,10 +94,12 @@ def _get_installed_platform_data(platform,
# del data['version']
# return data
data['__pkg_dir'] = dirname(p.manifest_path)
# if VCS cloned platform
if not isfile(join(data['__pkg_dir'], "platform.json")):
data['__pkg_dir'] = dirname(data['__pkg_dir'])
# overwrite VCS version and add extra fields
manifest = PlatformManager().load_manifest(dirname(p.manifest_path))
assert manifest
for key in manifest:
if key == "version" or key.startswith("__"):
data[key] = manifest[key]
if with_boards:
data['boards'] = [c.get_brief_data() for c in p.get_boards().values()]
@ -214,7 +216,7 @@ def platform_list(json_output):
for manifest in pm.get_installed():
platforms.append(
_get_installed_platform_data(
pm.get_manifest_path(manifest['__pkg_dir']),
manifest['__pkg_dir'],
with_boards=False,
expose_packages=False))
if json_output:
@ -336,22 +338,25 @@ def platform_update(platforms, only_packages, only_check, json_output):
pm = PlatformManager()
if not platforms:
platforms = []
for manifest in pm.get_installed():
if "@vcs-" in manifest['__pkg_dir']:
platforms.append("%s=%s" % (manifest['name'], manifest['url']))
else:
platforms.append(manifest['name'])
platforms = [manifest['__pkg_dir'] for manifest in pm.get_installed()]
if only_check and json_output:
result = []
for platform in platforms:
name, requirements, url = pm.parse_pkg_name(platform)
latest = pm.outdated(name, requirements, url)
pkg_dir = platform if isdir(platform) else None
requirements = None
url = None
if not pkg_dir:
name, requirements, url = pm.parse_pkg_input(platform)
pkg_dir = pm.get_package_dir(name, requirements, url)
if not pkg_dir:
continue
latest = pm.outdated(pkg_dir, requirements)
if not latest:
continue
data = _get_installed_platform_data(
name, with_boards=False, expose_packages=False)
data['versionLatest'] = latest or "Unknown"
pkg_dir, with_boards=False, expose_packages=False)
data['versionLatest'] = latest
result.append(data)
return click.echo(json.dumps(result))
else:

View File

@ -163,8 +163,7 @@ def after_upgrade(ctx):
# update development platforms
pm = PlatformManager()
for manifest in pm.get_installed():
# pm.update(manifest['name'], "^" + manifest['version'])
pm.update(manifest['name'])
pm.update(manifest['__pkg_dir'])
# update PlatformIO Plus tool if installed
pioplus_update()
@ -262,7 +261,7 @@ def check_internal_updates(ctx, what):
outdated_items = []
for manifest in pm.get_installed():
if manifest['name'] not in outdated_items and \
pm.outdated(manifest['name']):
pm.outdated(manifest['__pkg_dir']):
outdated_items.append(manifest['name'])
if not outdated_items:

View File

@ -15,10 +15,8 @@
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
import json
import os
import re
from glob import glob
from hashlib import md5
from os.path import isdir, join
import arrow
@ -61,8 +59,8 @@ class LibraryManager(BasePkgManager):
return None
def load_manifest(self, path):
manifest = BasePkgManager.load_manifest(self, path)
def load_manifest(self, pkg_dir):
manifest = BasePkgManager.load_manifest(self, pkg_dir)
if not manifest:
return manifest
@ -76,6 +74,9 @@ class LibraryManager(BasePkgManager):
manifest['authors'] = [{"name": manifest['author']}]
del manifest['author']
if "authors" in manifest and not isinstance(manifest['authors'], list):
manifest['authors'] = [manifest['authors']]
if "keywords" not in manifest:
keywords = []
for keyword in re.split(r"[\s/]+",
@ -123,31 +124,6 @@ class LibraryManager(BasePkgManager):
return manifest
def check_pkg_structure(self, pkg_dir):
try:
return BasePkgManager.check_pkg_structure(self, pkg_dir)
except exception.MissingPackageManifest:
# we will generate manifest automatically
# if library doesn't contain any
pass
manifest = {
"name": "Library_" + md5(pkg_dir).hexdigest()[:5],
"version": "0.0.0"
}
for root, dirs, files in os.walk(pkg_dir):
if len(dirs) == 1 and not files:
manifest['name'] = dirs[0]
continue
if dirs or files:
pkg_dir = root
break
with open(join(pkg_dir, self.manifest_names[0]), "w") as fp:
json.dump(manifest, fp)
return pkg_dir
@staticmethod
def normalize_dependencies(dependencies):
if not dependencies:
@ -239,7 +215,7 @@ class LibraryManager(BasePkgManager):
}, silent, interactive)['id'])
def _install_from_piorepo(self, name, requirements):
assert name.startswith("id=")
assert name.startswith("id="), name
version = self.get_latest_repo_version(name, requirements)
if not version:
raise exception.UndefinedPackageVersion(requirements or "latest",
@ -260,28 +236,23 @@ class LibraryManager(BasePkgManager):
silent=False,
trigger_event=True,
interactive=False):
already_installed = False
_name, _requirements, _url = self.parse_pkg_name(name, requirements)
try:
_name, _requirements, _url = self.parse_pkg_input(name,
requirements)
if not _url:
_name = "id=%d" % self.get_pkg_id_by_name(
name = "id=%d" % self.get_pkg_id_by_name(
_name,
_requirements,
silent=silent,
interactive=interactive)
already_installed = self.get_package(_name, _requirements, _url)
pkg_dir = BasePkgManager.install(
self, _name
if not _url else name, _requirements, silent, trigger_event)
requirements = _requirements
pkg_dir = BasePkgManager.install(self, name, requirements, silent,
trigger_event)
except exception.InternetIsOffline as e:
if not silent:
click.secho(str(e), fg="yellow")
return
if already_installed:
return
manifest = self.load_manifest(pkg_dir)
if "dependencies" not in manifest:
return pkg_dir

View File

@ -17,7 +17,7 @@ import hashlib
import json
import os
import shutil
from os.path import basename, dirname, getsize, isdir, isfile, islink, join
from os.path import basename, getsize, isdir, isfile, islink, join
from tempfile import mkdtemp
import click
@ -127,16 +127,36 @@ class PkgRepoMixin(object):
class PkgInstallerMixin(object):
VCS_MANIFEST_NAME = ".piopkgmanager.json"
SRC_MANIFEST_NAME = ".piopkgmanager.json"
FILE_CACHE_VALID = "1m" # 1 month
FILE_CACHE_MAX_SIZE = 1024 * 1024
_INSTALLED_CACHE = {}
MEMORY_CACHE = {}
def reset_cache(self):
if self.package_dir in PkgInstallerMixin._INSTALLED_CACHE:
del PkgInstallerMixin._INSTALLED_CACHE[self.package_dir]
@staticmethod
def cache_get(key, default=None):
return PkgInstallerMixin.MEMORY_CACHE.get(key, default)
@staticmethod
def cache_set(key, value):
PkgInstallerMixin.MEMORY_CACHE[key] = value
@staticmethod
def cache_reset():
PkgInstallerMixin.MEMORY_CACHE = {}
def read_dirs(self, src_dir):
cache_key = "read_dirs-%s" % src_dir
result = self.cache_get(cache_key)
if result:
return result
result = [
join(src_dir, name) for name in sorted(os.listdir(src_dir))
if isdir(join(src_dir, name))
]
self.cache_set(cache_key, result)
return result
def download(self, url, dest_dir, sha1=None):
cache_key_fname = app.ContentCache.key_from_args(url, "fname")
@ -171,26 +191,23 @@ class PkgInstallerMixin(object):
return fu.start()
@staticmethod
def generate_install_dirname(manifest):
def get_install_dirname(manifest):
name = manifest['name']
if "id" in manifest:
name += "_ID%d" % manifest['id']
return name
def get_vcs_manifest_path(self, pkg_dir):
def get_src_manifest_path(self, pkg_dir):
for item in os.listdir(pkg_dir):
if not isdir(join(pkg_dir, item)):
continue
if isfile(join(pkg_dir, item, self.VCS_MANIFEST_NAME)):
return join(pkg_dir, item, self.VCS_MANIFEST_NAME)
if isfile(join(pkg_dir, item, self.SRC_MANIFEST_NAME)):
return join(pkg_dir, item, self.SRC_MANIFEST_NAME)
return None
def get_manifest_path(self, pkg_dir):
if not isdir(pkg_dir):
return None
manifest_path = self.get_vcs_manifest_path(pkg_dir)
if manifest_path:
return manifest_path
for name in self.manifest_names:
manifest_path = join(pkg_dir, name)
if isfile(manifest_path):
@ -198,73 +215,104 @@ class PkgInstallerMixin(object):
return None
def manifest_exists(self, pkg_dir):
return self.get_manifest_path(pkg_dir) is not None
return self.get_manifest_path(pkg_dir) or \
self.get_src_manifest_path(pkg_dir)
def load_manifest(self, path): # pylint: disable=too-many-branches
assert path
pkg_dir = path
if isdir(path):
path = self.get_manifest_path(path)
if not path:
return None
else:
pkg_dir = dirname(pkg_dir)
def load_manifest(self, pkg_dir):
cache_key = "load_manifest-%s" % pkg_dir
result = self.cache_get(cache_key)
if result:
return result
is_vcs_pkg = False
if isfile(path) and path.endswith(self.VCS_MANIFEST_NAME):
is_vcs_pkg = True
pkg_dir = dirname(dirname(path))
manifest_path = self.get_manifest_path(pkg_dir)
if not manifest_path:
return None
# return from cache
if self.package_dir in PkgInstallerMixin._INSTALLED_CACHE:
for manifest in PkgInstallerMixin._INSTALLED_CACHE[
self.package_dir]:
if not is_vcs_pkg and manifest['__pkg_dir'] == pkg_dir:
return manifest
# if non-registry packages: VCS or archive
src_manifest_path = self.get_src_manifest_path(pkg_dir)
src_manifest = None
if src_manifest_path:
src_manifest = util.load_json(src_manifest_path)
manifest = {}
if path.endswith(".json"):
manifest = util.load_json(path)
elif path.endswith(".properties"):
with codecs.open(path, encoding="utf-8") as fp:
if manifest_path.endswith(".json"):
manifest = util.load_json(manifest_path)
elif manifest_path.endswith(".properties"):
with codecs.open(manifest_path, encoding="utf-8") as fp:
for line in fp.readlines():
if "=" not in line:
continue
key, value = line.split("=", 1)
manifest[key.strip()] = value.strip()
else:
if src_manifest:
if "name" not in manifest:
manifest['name'] = basename(pkg_dir)
if "version" not in manifest:
manifest['version'] = "0.0.0"
manifest['name'] = src_manifest['name']
if "version" in src_manifest:
manifest['version'] = src_manifest['version']
manifest['__src_url'] = src_manifest['url']
if "name" not in manifest:
manifest['name'] = basename(pkg_dir)
if "version" not in manifest:
manifest['version'] = "0.0.0"
manifest['__pkg_dir'] = pkg_dir
self.cache_set(cache_key, manifest)
return manifest
def get_installed(self):
if self.package_dir in PkgInstallerMixin._INSTALLED_CACHE:
return PkgInstallerMixin._INSTALLED_CACHE[self.package_dir]
items = []
for p in sorted(os.listdir(self.package_dir)):
pkg_dir = join(self.package_dir, p)
if not isdir(pkg_dir):
continue
for pkg_dir in self.read_dirs(self.package_dir):
manifest = self.load_manifest(pkg_dir)
if not manifest:
continue
assert "name" in manifest
items.append(manifest)
PkgInstallerMixin._INSTALLED_CACHE[self.package_dir] = items
return items
def check_pkg_structure(self, pkg_dir):
if self.manifest_exists(pkg_dir):
return pkg_dir
def get_package(self, name, requirements=None, url=None):
pkg_id = int(name[3:]) if name.startswith("id=") else 0
best = None
for manifest in self.get_installed():
if url:
if manifest.get("__src_url") != url:
continue
elif pkg_id and manifest.get("id") != pkg_id:
continue
elif not pkg_id and manifest['name'] != name:
continue
for root, _, _ in os.walk(pkg_dir):
# strict version or VCS HASH
if requirements and requirements == manifest['version']:
return manifest
try:
if requirements and not semantic_version.Spec(
requirements).match(
semantic_version.Version(
manifest['version'], partial=True)):
continue
elif not best or (semantic_version.Version(
manifest['version'], partial=True) >
semantic_version.Version(
best['version'], partial=True)):
best = manifest
except ValueError:
pass
return best
def get_package_dir(self, name, requirements=None, url=None):
manifest = self.get_package(name, requirements, url)
return manifest.get("__pkg_dir") if manifest else None
def find_pkg_root(self, src_dir):
if self.manifest_exists(src_dir):
return src_dir
for root, _, _ in os.walk(src_dir):
if self.manifest_exists(root):
return root
raise exception.MissingPackageManifest(", ".join(self.manifest_names))
def _install_from_piorepo(self, name, requirements):
@ -291,18 +339,25 @@ class PkgInstallerMixin(object):
util.get_systype())
return pkg_dir
def _install_from_url(self, name, url, requirements=None, sha1=None):
def _install_from_url(self,
name,
url,
requirements=None,
sha1=None,
track=False):
pkg_dir = None
tmp_dir = mkdtemp("-package", "installing-", self.package_dir)
src_manifest_dir = None
src_manifest = {"name": name, "url": url, "requirements": requirements}
try:
if url.startswith("file://"):
url = url[7:]
if isfile(url):
self.unpack(url, tmp_dir)
_url = url[7:]
if isfile(_url):
self.unpack(_url, tmp_dir)
else:
util.rmtree_(tmp_dir)
shutil.copytree(url, tmp_dir)
shutil.copytree(_url, tmp_dir)
elif url.startswith(("http://", "https://")):
dlpath = self.download(url, tmp_dir, sha1)
assert isfile(dlpath)
@ -311,29 +366,52 @@ class PkgInstallerMixin(object):
else:
vcs = VCSClientFactory.newClient(tmp_dir, url)
assert vcs.export()
with open(join(vcs.storage_dir, self.VCS_MANIFEST_NAME),
"w") as fp:
json.dump({
"name": name,
"version": vcs.get_current_revision(),
"url": url,
"requirements": requirements
}, fp)
src_manifest_dir = vcs.storage_dir
src_manifest['version'] = vcs.get_current_revision()
pkg_dir = self.find_pkg_root(tmp_dir)
# write source data to a special manifest
if track:
if not src_manifest_dir:
src_manifest_dir = join(pkg_dir, ".pio")
self._update_src_manifest(src_manifest, src_manifest_dir)
pkg_dir = self.check_pkg_structure(tmp_dir)
pkg_dir = self._install_from_tmp_dir(pkg_dir, requirements)
finally:
if isdir(tmp_dir):
util.rmtree_(tmp_dir)
return pkg_dir
def _install_from_tmp_dir(self, tmp_dir, requirements=None):
tmp_manifest_path = self.get_manifest_path(tmp_dir)
tmp_manifest = self.load_manifest(tmp_manifest_path)
def _update_src_manifest(self, data, src_dir):
if not isdir(src_dir):
os.makedirs(src_dir)
src_manifest_path = join(src_dir, self.SRC_MANIFEST_NAME)
_data = data
if isfile(src_manifest_path):
_data.update(util.load_json(src_manifest_path))
with open(src_manifest_path, "w") as fp:
json.dump(_data, fp)
def _install_from_tmp_dir( # pylint: disable=too-many-branches
self, tmp_dir, requirements=None):
tmp_manifest = self.load_manifest(tmp_dir)
assert set(["name", "version"]) <= set(tmp_manifest.keys())
name = self.generate_install_dirname(tmp_manifest)
pkg_dir = join(self.package_dir, name)
pkg_dirname = self.get_install_dirname(tmp_manifest)
pkg_dir = join(self.package_dir, pkg_dirname)
cur_manifest = self.load_manifest(pkg_dir)
tmp_semver = None
cur_semver = None
try:
tmp_semver = semantic_version.Version(
tmp_manifest['version'], partial=True)
if cur_manifest:
cur_semver = semantic_version.Version(
cur_manifest['version'], partial=True)
except ValueError:
pass
# package should satisfy requirements
if requirements:
@ -341,45 +419,51 @@ class PkgInstallerMixin(object):
"Package version %s doesn't satisfy requirements %s" % (
tmp_manifest['version'], requirements))
try:
reqspec = semantic_version.Spec(requirements)
tmp_version = semantic_version.Version(
tmp_manifest['version'], partial=True)
assert tmp_version in reqspec, mismatch_error
except ValueError:
assert tmp_semver and tmp_semver in semantic_version.Spec(
requirements), mismatch_error
except (AssertionError, ValueError):
assert tmp_manifest['version'] == requirements, mismatch_error
if self.manifest_exists(pkg_dir):
cur_manifest_path = self.get_manifest_path(pkg_dir)
cur_manifest = self.load_manifest(cur_manifest_path)
if tmp_manifest_path.endswith(self.VCS_MANIFEST_NAME):
if cur_manifest.get("url") != tmp_manifest['url']:
pkg_dir = join(self.package_dir, "%s@vcs-%s" % (
name, hashlib.md5(tmp_manifest['url']).hexdigest()))
# check if package already exists
if cur_manifest:
# 0-overwrite, 1-rename, 2-fix to a version
action = 0
if "__src_url" in cur_manifest:
if cur_manifest['__src_url'] != tmp_manifest.get("__src_url"):
action = 1
elif "__src_url" in tmp_manifest:
action = 2
else:
try:
tmp_version = semantic_version.Version(
tmp_manifest['version'], partial=True)
cur_version = semantic_version.Version(
cur_manifest['version'], partial=True)
if tmp_semver and (not cur_semver or tmp_semver > cur_semver):
action = 1
elif tmp_semver and cur_semver and tmp_semver != cur_semver:
action = 2
# if current package version < new package, backup it
if tmp_version > cur_version:
os.rename(pkg_dir,
join(self.package_dir, "%s@%s" %
(name, cur_manifest['version'])))
elif tmp_version < cur_version:
pkg_dir = join(self.package_dir, "%s@%s" %
(name, tmp_manifest['version']))
except ValueError:
pkg_dir = join(self.package_dir,
"%s@%s" % (name, tmp_manifest['version']))
# rename
if action == 1:
target_dirname = "%s@%s" % (pkg_dirname,
cur_manifest['version'])
if "__src_url" in cur_manifest:
target_dirname = "%s@src-%s" % (
pkg_dirname,
hashlib.md5(cur_manifest['__src_url']).hexdigest())
os.rename(pkg_dir, join(self.package_dir, target_dirname))
# fix to a version
elif action == 2:
target_dirname = "%s@%s" % (pkg_dirname,
tmp_manifest['version'])
if "__src_url" in tmp_manifest:
target_dirname = "%s@src-%s" % (
pkg_dirname,
hashlib.md5(tmp_manifest['__src_url']).hexdigest())
pkg_dir = join(self.package_dir, target_dirname)
# remove previous/not-satisfied package
if isdir(pkg_dir):
util.rmtree_(pkg_dir)
os.rename(tmp_dir, pkg_dir)
assert isdir(pkg_dir)
self.cache_reset()
return pkg_dir
@ -400,14 +484,20 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
click.echo("%s: %s" % (self.__class__.__name__, message), nl=nl)
@staticmethod
def parse_pkg_name( # pylint: disable=too-many-branches
def parse_pkg_input( # pylint: disable=too-many-branches
text, requirements=None):
text = str(text)
url_marker = "://"
if not any([
requirements, "@" not in text, text.startswith("git@"),
url_marker in text
]):
# git@github.com:user/package.git
url_marker = text[:4]
if url_marker not in ("git@", "git+") or ":" not in text:
url_marker = "://"
req_conditions = [
not requirements, "@" in text,
(url_marker != "git@" and "://git@" not in text) or
text.count("@") > 1
]
if all(req_conditions):
text, requirements = text.rsplit("@", 1)
if text.isdigit():
text = "id=" + text
@ -423,22 +513,18 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
url.startswith("http") and
(url.split("#", 1)[0] if "#" in url else url).endswith(".git")
]
if any(git_conditions):
url = "git+" + url
# Handle Developer Mbed URL
# (https://developer.mbed.org/users/user/code/package/)
elif url.startswith("https://developer.mbed.org"):
if url.startswith("https://developer.mbed.org"):
url = "hg+" + url
# git@github.com:user/package.git
if url.startswith("git@"):
url_marker = "git@"
if any([s in url for s in ("\\", "/")]) and url_marker not in url:
if isfile(url) or isdir(url):
url = "file://" + url
elif url.count("/") == 1 and not url.startswith("git@"):
elif url.count("/") == 1 and "git" not in url_marker:
url = "git+https://github.com/" + url
# determine name
@ -448,55 +534,13 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
_url = _url[:-1]
name = basename(_url)
if "." in name and not name.startswith("."):
name = name.split(".", 1)[0]
name = name.rsplit(".", 1)[0]
if url_marker not in url:
url = None
return (name or text, requirements, url)
def get_package(self, name, requirements=None, url=None):
pkg_id = int(name[3:]) if name.startswith("id=") else 0
best = None
reqspec = None
if requirements:
try:
reqspec = semantic_version.Spec(requirements)
except ValueError:
pass
for manifest in self.get_installed():
if pkg_id and manifest.get("id") != pkg_id:
continue
elif not pkg_id and manifest['name'] != name:
continue
elif not reqspec and (requirements or url):
conds = [
requirements == manifest['version'], url and
url in manifest.get("url", "")
]
if not best or any(conds):
best = manifest
continue
try:
if reqspec and not reqspec.match(
semantic_version.Version(
manifest['version'], partial=True)):
continue
elif not best or (semantic_version.Version(
manifest['version'], partial=True) >
semantic_version.Version(
best['version'], partial=True)):
best = manifest
except ValueError:
pass
return best
def get_package_dir(self, name, requirements=None, url=None):
package = self.get_package(name, requirements, url)
return package.get("__pkg_dir") if package else None
def outdated(self, name, requirements=None, url=None):
def outdated(self, pkg_dir, requirements=None):
"""
Has 3 different results:
`None` - unknown package, VCS is fixed to commit
@ -504,27 +548,26 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
`String` - a found latest version
"""
latest = None
package_dir = self.get_package_dir(name, requirements, url)
if not package_dir or ("@" in package_dir and
"@vcs-" not in package_dir):
manifest = self.load_manifest(pkg_dir)
# skip a fixed package to a specific version
if "@" in pkg_dir and "__src_url" not in manifest:
return None
is_vcs_pkg = False
manifest_path = self.get_vcs_manifest_path(package_dir)
if manifest_path:
is_vcs_pkg = True
manifest = self.load_manifest(manifest_path)
else:
manifest = self.load_manifest(package_dir)
if is_vcs_pkg:
vcs = VCSClientFactory.newClient(
package_dir, manifest['url'], silent=True)
if "__src_url" in manifest:
try:
vcs = VCSClientFactory.newClient(
pkg_dir, manifest['__src_url'], silent=True)
except (AttributeError, exception.PlatformioException):
return None
if not vcs.can_be_updated:
return None
latest = vcs.get_latest_revision()
else:
try:
latest = self.get_latest_repo_version(
name, requirements, silent=True)
"id=%d" % manifest['id']
if "id" in manifest else manifest['name'],
requirements,
silent=True)
except (exception.PlatformioException, ValueError):
return None
if not latest:
@ -543,7 +586,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
silent=False,
trigger_event=True,
interactive=False): # pylint: disable=unused-argument
name, requirements, url = self.parse_pkg_name(name, requirements)
name, requirements, url = self.parse_pkg_input(name, requirements)
package_dir = self.get_package_dir(name, requirements, url)
if not package_dir or not silent:
@ -560,15 +603,16 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
return package_dir
if url:
pkg_dir = self._install_from_url(name, url, requirements)
pkg_dir = self._install_from_url(
name, url, requirements, track=True)
else:
pkg_dir = self._install_from_piorepo(name, requirements)
if not pkg_dir or not self.manifest_exists(pkg_dir):
raise exception.PackageInstallError(name, requirements or "*",
util.get_systype())
self.reset_cache()
manifest = self.load_manifest(pkg_dir)
assert manifest
if trigger_event:
telemetry.on_event(
@ -576,42 +620,45 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
action="Install",
label=manifest['name'])
click.secho(
"{name} @ {version} has been successfully installed!".format(
**manifest),
fg="green")
if not silent:
click.secho(
"{name} @ {version} has been successfully installed!".format(
**manifest),
fg="green")
return pkg_dir
def uninstall(self, name, requirements=None, trigger_event=True):
name, requirements, url = self.parse_pkg_name(name, requirements)
package_dir = self.get_package_dir(name, requirements, url)
if not package_dir:
click.secho(
"%s @ %s is not installed" % (name, requirements or "*"),
fg="yellow")
return
def uninstall(self, package, requirements=None, trigger_event=True):
if isdir(package):
pkg_dir = package
else:
name, requirements, url = self.parse_pkg_input(package,
requirements)
pkg_dir = self.get_package_dir(name, requirements, url)
manifest = self.load_manifest(package_dir)
if not pkg_dir:
raise exception.UnknownPackage("%s @ %s" %
(package, requirements or "*"))
manifest = self.load_manifest(pkg_dir)
click.echo(
"Uninstalling %s @ %s: \t" % (click.style(
manifest['name'], fg="cyan"), manifest['version']),
nl=False)
if isdir(package_dir):
if islink(package_dir):
os.unlink(package_dir)
else:
util.rmtree_(package_dir)
self.reset_cache()
if islink(pkg_dir):
os.unlink(pkg_dir)
else:
util.rmtree_(pkg_dir)
self.cache_reset()
# unfix package with the same name
package_dir = self.get_package_dir(manifest['name'])
if package_dir and "@" in package_dir:
os.rename(package_dir,
join(self.package_dir,
self.generate_install_dirname(manifest)))
self.reset_cache()
pkg_dir = self.get_package_dir(manifest['name'])
if pkg_dir and "@" in pkg_dir:
os.rename(
pkg_dir,
join(self.package_dir, self.get_install_dirname(manifest)))
self.cache_reset()
click.echo("[%s]" % click.style("OK", fg="green"))
@ -624,25 +671,23 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
def update( # pylint: disable=too-many-return-statements
self,
name,
package,
requirements=None,
only_check=False):
name, requirements, url = self.parse_pkg_name(name, requirements)
package_dir = self.get_package_dir(name, requirements, url)
if not package_dir:
click.secho(
"%s @ %s is not installed" % (name, requirements or "*"),
fg="yellow")
return
is_vcs_pkg = False
if self.get_vcs_manifest_path(package_dir):
is_vcs_pkg = True
manifest_path = self.get_vcs_manifest_path(package_dir)
if isdir(package):
pkg_dir = package
else:
manifest_path = self.get_manifest_path(package_dir)
name, requirements, url = self.parse_pkg_input(package,
requirements)
pkg_dir = self.get_package_dir(name, requirements, url)
if not pkg_dir:
raise exception.UnknownPackage("%s @ %s" %
(package, requirements or "*"))
manifest = self.load_manifest(pkg_dir)
name = manifest['name']
manifest = self.load_manifest(manifest_path)
click.echo(
"{} {:<40} @ {:<15}".format(
"Checking" if only_check else "Updating",
@ -651,7 +696,8 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
if not util.internet_on():
click.echo("[%s]" % (click.style("Off-line", fg="yellow")))
return
latest = self.outdated(name, requirements, url)
latest = self.outdated(pkg_dir, requirements)
if latest:
click.echo("[%s]" % (click.style(latest, fg="red")))
elif latest is False:
@ -659,26 +705,18 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
else:
click.echo("[%s]" % (click.style("Skip", fg="yellow")))
if only_check or latest is False or (not is_vcs_pkg and not latest):
if only_check or not latest:
return
if is_vcs_pkg:
vcs = VCSClientFactory.newClient(package_dir, manifest['url'])
if not vcs.can_be_updated:
click.secho(
"Skip update because repository is fixed "
"to %s revision" % manifest['version'],
fg="yellow")
return
if "__src_url" in manifest:
vcs = VCSClientFactory.newClient(pkg_dir, manifest['__vcs_url'])
assert vcs.update()
with open(manifest_path, "w") as fp:
manifest['version'] = vcs.get_current_revision()
json.dump(manifest, fp)
self._update_src_manifest(
dict(version=vcs.get_current_revision()), vcs.storage_dir)
else:
self.uninstall(name, manifest['version'], trigger_event=False)
self.uninstall(pkg_dir, trigger_event=False)
self.install(name, latest, trigger_event=False)
self.reset_cache()
telemetry.on_event(
category=self.__class__.__name__,
action="Update",

View File

@ -63,7 +63,7 @@ class PlatformManager(BasePkgManager):
trigger_event=True,
**_): # pylint: disable=too-many-arguments
platform_dir = BasePkgManager.install(self, name, requirements)
p = PlatformFactory.newPlatform(self.get_manifest_path(platform_dir))
p = PlatformFactory.newPlatform(platform_dir)
# @Hook: when 'update' operation (trigger_event is False),
# don't cleanup packages or install them
@ -75,10 +75,16 @@ class PlatformManager(BasePkgManager):
self.cleanup_packages(p.packages.keys())
return True
def uninstall(self, name, requirements=None, trigger_event=True):
name, requirements, _ = self.parse_pkg_name(name, requirements)
p = PlatformFactory.newPlatform(name, requirements)
BasePkgManager.uninstall(self, name, requirements)
def uninstall(self, package, requirements=None, trigger_event=True):
if isdir(package):
pkg_dir = package
else:
name, requirements, url = self.parse_pkg_input(package,
requirements)
pkg_dir = self.get_package_dir(name, requirements, url)
p = PlatformFactory.newPlatform(pkg_dir)
BasePkgManager.uninstall(self, pkg_dir, requirements)
# @Hook: when 'update' operation (trigger_event is False),
# don't cleanup packages or install them
@ -90,18 +96,23 @@ class PlatformManager(BasePkgManager):
def update( # pylint: disable=arguments-differ
self,
name,
package,
requirements=None,
only_packages=False,
only_check=False):
name, requirements, _ = self.parse_pkg_name(name, requirements)
only_check=False,
only_packages=False):
if isdir(package):
pkg_dir = package
else:
name, requirements, url = self.parse_pkg_input(package,
requirements)
pkg_dir = self.get_package_dir(name, requirements, url)
p = PlatformFactory.newPlatform(name, requirements)
p = PlatformFactory.newPlatform(pkg_dir)
pkgs_before = pkgs_after = p.get_installed_packages().keys()
if not only_packages:
BasePkgManager.update(self, name, requirements, only_check)
p = PlatformFactory.newPlatform(name, requirements)
BasePkgManager.update(self, pkg_dir, requirements, only_check)
p = PlatformFactory.newPlatform(pkg_dir)
pkgs_after = p.get_installed_packages().keys()
p.update_packages(only_check)
@ -115,11 +126,10 @@ class PlatformManager(BasePkgManager):
return True
def cleanup_packages(self, names):
self.reset_cache()
self.cache_reset()
deppkgs = {}
for manifest in PlatformManager().get_installed():
p = PlatformFactory.newPlatform(manifest['name'],
manifest['version'])
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
for pkgname, pkgmanifest in p.get_installed_packages().items():
if pkgname not in deppkgs:
deppkgs[pkgname] = set()
@ -131,17 +141,15 @@ class PlatformManager(BasePkgManager):
continue
if (manifest['name'] not in deppkgs or
manifest['version'] not in deppkgs[manifest['name']]):
pm.uninstall(
manifest['name'], manifest['version'], trigger_event=False)
pm.uninstall(manifest['__pkg_dir'], trigger_event=False)
self.reset_cache()
self.cache_reset()
return True
def get_installed_boards(self):
boards = []
for manifest in self.get_installed():
p = PlatformFactory.newPlatform(
self.get_manifest_path(manifest['__pkg_dir']))
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
for config in p.get_boards().values():
board = config.get_brief_data()
if board not in boards:
@ -183,7 +191,10 @@ class PlatformFactory(object):
@classmethod
def newPlatform(cls, name, requirements=None):
platform_dir = None
if name.endswith("platform.json") and isfile(name):
if isdir(name):
platform_dir = name
name = PlatformManager().load_manifest(platform_dir)['name']
elif name.endswith("platform.json") and isfile(name):
platform_dir = dirname(name)
name = util.load_json(name)['name']
else:
@ -249,8 +260,8 @@ class PlatformPackagesMixin(object):
if self.is_valid_requirements(version):
package = self.pm.get_package(name, version)
else:
package = self.pm.get_package(*self._parse_pkg_name(name,
version))
package = self.pm.get_package(*self._parse_pkg_input(name,
version))
if package:
items[name] = package
return items
@ -267,46 +278,47 @@ class PlatformPackagesMixin(object):
self.pm.update("%s=%s" % (name, version), requirements,
only_check)
def are_outdated_packages(self):
latest = None
for name in self.get_installed_packages():
version = self.packages[name].get("version", "")
if self.is_valid_requirements(version):
latest = self.pm.outdated(name, version)
else:
requirements = None
if "@" in version:
version, requirements = version.rsplit("@", 1)
latest = self.pm.outdated(name, requirements, version)
if latest or latest is None:
return True
return False
# def are_outdated_packages(self):
# latest = None
# for name in self.get_installed_packages():
# version = self.packages[name].get("version", "")
# if self.is_valid_requirements(version):
# latest = self.pm.outdated(name, version)
# else:
# requirements = None
# if "@" in version:
# version, requirements = version.rsplit("@", 1)
# latest = self.pm.outdated(name, requirements, version)
# if latest or latest is None:
# return True
# return False
def get_package_dir(self, name):
version = self.packages[name].get("version", "")
if self.is_valid_requirements(version):
return self.pm.get_package_dir(name, version)
else:
return self.pm.get_package_dir(*self._parse_pkg_name(name,
version))
return self.pm.get_package_dir(*self._parse_pkg_input(name,
version))
def get_package_version(self, name):
version = self.packages[name].get("version", "")
if self.is_valid_requirements(version):
package = self.pm.get_package(name, version)
else:
package = self.pm.get_package(*self._parse_pkg_name(name, version))
package = self.pm.get_package(*self._parse_pkg_input(name,
version))
return package['version'] if package else None
@staticmethod
def is_valid_requirements(requirements):
return requirements and "://" not in requirements
def _parse_pkg_name(self, name, version):
def _parse_pkg_input(self, name, version):
requirements = None
if "@" in version:
version, requirements = version.rsplit("@", 1)
return self.pm.parse_pkg_name("%s=%s" % (name, version), requirements)
return self.pm.parse_pkg_input("%s=%s" % (name, version), requirements)
class PlatformRunMixin(object):

View File

@ -141,7 +141,12 @@ class memoized(object):
def __get__(self, obj, objtype):
'''Support instance methods.'''
return functools.partial(self.__call__, obj)
fn = functools.partial(self.__call__, obj)
fn.reset = self._reset
return fn
def _reset(self):
self.cache = {}
def singleton(cls):

View File

@ -16,7 +16,7 @@ import json
import re
from os.path import basename
from platformio import util
from platformio import exception, util
from platformio.commands.init import cli as cmd_init
from platformio.commands.lib import cli as cmd_lib
@ -37,15 +37,35 @@ def test_search(clirunner, validate_cliresult):
def test_global_install_registry(clirunner, validate_cliresult,
isolated_pio_home):
result = clirunner.invoke(cmd_lib, [
"-g", "install", "58", "OneWire",
"-g", "install", "58", "547@2.2.4", "DallasTemperature",
"http://dl.platformio.org/libraries/archives/3/5174.tar.gz",
"ArduinoJson@5.6.7", "ArduinoJson@~5.7.0"
"ArduinoJson@5.6.7", "ArduinoJson@~5.7.0", "1089@fee16e880b"
])
validate_cliresult(result)
# check lib with duplicate URL
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"http://dl.platformio.org/libraries/archives/3/5174.tar.gz"
])
validate_cliresult(result)
assert "is already installed" in result.output
# check lib with duplicate ID
result = clirunner.invoke(cmd_lib, ["-g", "install", "305"])
validate_cliresult(result)
assert "is already installed" in result.output
# install unknown library
result = clirunner.invoke(cmd_lib, ["-g", "install", "Unknown"])
assert result.exit_code != 0
assert isinstance(result.exception, exception.LibNotFound)
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"DHT22_ID58", "ArduinoJson_ID64", "ArduinoJson_ID64@5.6.7",
"OneWire_ID1", "ESPAsyncTCP_ID305"
"ArduinoJson_ID64", "ArduinoJson_ID64@5.6.7", "DallasTemperature_ID54",
"DHT22_ID58", "ESPAsyncTCP_ID305", "NeoPixelBus_ID547", "OneWire_ID1",
"IRremoteESP8266_ID1089"
]
assert set(items1) == set(items2)
@ -55,11 +75,29 @@ def test_global_install_archive(clirunner, validate_cliresult,
result = clirunner.invoke(cmd_lib, [
"-g", "install", "https://github.com/adafruit/Adafruit-ST7735-Library/"
"archive/master.zip",
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2"
])
validate_cliresult(result)
# incorrect requirements
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@1.2.3"
])
assert result.exit_code != 0
# check lib with duplicate URL
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip"
])
validate_cliresult(result)
assert "is already installed" in result.output
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = ["Adafruit ST7735 Library", "RadioHead"]
items2 = ["Adafruit ST7735 Library", "RadioHead-1.62"]
assert set(items1) >= set(items2)
@ -71,14 +109,20 @@ def test_global_install_repository(clirunner, validate_cliresult,
"-g",
"install",
"https://github.com/gioblu/PJON.git#3.0",
"https://github.com/gioblu/PJON.git#6.2",
"https://github.com/bblanchon/ArduinoJson.git",
"https://gitlab.com/ivankravets/rs485-nodeproto.git",
# "https://developer.mbed.org/users/simon/code/TextLCD/",
"knolleary/pubsubclient"
])
validate_cliresult(result)
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = ["PJON", "ESPAsyncTCP", "PubSubClient"]
assert set(items2) & set(items1)
items2 = [
"PJON", "PJON@src-79de467ebe19de18287becff0a1fb42d",
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81", "rs485-nodeproto",
"PubSubClient"
]
assert set(items1) >= set(items2)
def test_global_lib_list(clirunner, validate_cliresult, isolated_pio_home):
@ -89,13 +133,15 @@ def test_global_lib_list(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
assert all([
n in result.output
for n in ("PJON", "git+https://github.com/knolleary/pubsubclient")
for n in ("PJON", "git+https://github.com/knolleary/pubsubclient",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip"
)
])
items1 = [i['name'] for i in json.loads(result.output)]
items2 = [
"OneWire", "DHT22", "PJON", "ESPAsyncTCP", "ArduinoJson",
"pubsubclient", "rs485-nodeproto", "Adafruit ST7735 Library",
"RadioHead"
"PubSubClient", "rs485-nodeproto", "Adafruit ST7735 Library",
"RadioHead-1.62", "DallasTemperature", "NeoPixelBus", "IRremoteESP8266"
]
assert set(items1) == set(items2)
@ -106,31 +152,71 @@ def test_global_lib_update_check(clirunner, validate_cliresult,
cmd_lib, ["-g", "update", "--only-check", "--json-output"])
validate_cliresult(result)
output = json.loads(result.output)
assert set(["ArduinoJson", "ESPAsyncTCP", "RadioHead"]) == set(
assert set(["ArduinoJson", "IRremoteESP8266", "NeoPixelBus"]) == set(
[l['name'] for l in output])
def test_global_lib_update(clirunner, validate_cliresult, isolated_pio_home):
# update library using package directory
result = clirunner.invoke(
cmd_lib,
["-g", "update", "NeoPixelBus", "--only-check", "--json-output"])
validate_cliresult(result)
oudated = json.loads(result.output)
assert len(oudated) == 1
assert "__pkg_dir" in oudated[0]
result = clirunner.invoke(cmd_lib,
["-g", "update", oudated[0]['__pkg_dir']])
validate_cliresult(result)
assert "Uninstalling NeoPixelBus @ 2.2.4" in result.output
# update rest libraries
result = clirunner.invoke(cmd_lib, ["-g", "update"])
validate_cliresult(result)
assert "[Up-to-date]" in result.output
validate_cliresult(result)
assert result.output.count("[Skip]") == 5
assert result.output.count("[Up-to-date]") == 9
assert "Uninstalling ArduinoJson @ 5.7.3" in result.output
assert "Uninstalling IRremoteESP8266 @ fee16e880b" in result.output
# update unknown library
result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"])
assert result.exit_code != 0
assert isinstance(result.exception, exception.UnknownPackage)
def test_global_lib_uninstall(clirunner, validate_cliresult,
isolated_pio_home):
# uninstall using package directory
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
validate_cliresult(result)
items = json.loads(result.output)
result = clirunner.invoke(cmd_lib,
["-g", "uninstall", items[0]['__pkg_dir']])
validate_cliresult(result)
assert "Uninstalling Adafruit ST7735 Library" in result.output
# uninstall the rest libraries
result = clirunner.invoke(cmd_lib, [
"-g", "uninstall", "1", "ArduinoJson@!=5.6.7", "TextLCD",
"Adafruit ST7735 Library"
"-g", "uninstall", "1", "ArduinoJson@!=5.6.7",
"https://github.com/bblanchon/ArduinoJson.git", "IRremoteESP8266@>=0.2"
])
validate_cliresult(result)
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"DHT22_ID58", "ArduinoJson_ID64", "ESPAsyncTCP_ID305",
"pubsubclient", "PJON", "rs485-nodeproto", "RadioHead_ID124"
"ArduinoJson", "ArduinoJson_ID64@5.6.7", "DallasTemperature_ID54",
"DHT22_ID58", "ESPAsyncTCP_ID305", "NeoPixelBus_ID547", "PJON",
"PJON@src-79de467ebe19de18287becff0a1fb42d", "PubSubClient",
"RadioHead-1.62", "rs485-nodeproto"
]
assert set(items1) == set(items2)
# uninstall unknown library
result = clirunner.invoke(cmd_lib, ["-g", "uninstall", "Unknown"])
assert result.exit_code != 0
assert isinstance(result.exception, exception.UnknownPackage)
def test_lib_show(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cmd_lib, ["show", "64"])
@ -142,31 +228,24 @@ def test_lib_show(clirunner, validate_cliresult, isolated_pio_home):
assert "OneWire" in result.output
def test_project_lib_complex(clirunner, validate_cliresult, tmpdir):
with tmpdir.as_cwd():
# init
result = clirunner.invoke(cmd_init)
validate_cliresult(result)
def test_lib_builtin(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cmd_lib, ["builtin"])
validate_cliresult(result)
result = clirunner.invoke(cmd_lib, ["builtin", "--json-output"])
validate_cliresult(result)
# isntall
result = clirunner.invoke(cmd_lib, ["install", "54", "ArduinoJson"])
validate_cliresult(result)
items1 = [
d.basename
for d in tmpdir.join(basename(util.get_projectlibdeps_dir()))
.listdir()
]
items2 = ["DallasTemperature_ID54", "OneWire_ID1", "ArduinoJson_ID64"]
assert set(items1) == set(items2)
# list
result = clirunner.invoke(cmd_lib, ["list", "--json-output"])
validate_cliresult(result)
items1 = [i['name'] for i in json.loads(result.output)]
items2 = ["DallasTemperature", "OneWire", "ArduinoJson"]
assert set(items1) == set(items2)
def test_lib_stats(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cmd_lib, ["stats"])
validate_cliresult(result)
assert all([
s in result.output
for s in ("UPDATED", "ago", "http://platformio.org/lib/show")
])
# update
result = clirunner.invoke(cmd_lib, ["update"])
validate_cliresult(result)
assert "[Up-to-date]" in result.output
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
validate_cliresult(result)
assert set([
"dlweek", "added", "updated", "topkeywords", "dlmonth", "dlday",
"lastkeywords"
]) == set(json.loads(result.output).keys())

View File

@ -156,7 +156,7 @@ def test_check_platform_updates(clirunner, validate_cliresult,
manifest['version'] = "0.0.0"
manifest_path.write(json.dumps(manifest))
# reset cached manifests
PlatformManager().reset_cache()
PlatformManager().cache_reset()
# reset check time
interval = int(app.get_setting("check_platforms_interval")) * 3600 * 24

View File

@ -12,70 +12,118 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from os.path import join
from platformio import util
from platformio.managers.package import BasePkgManager
from platformio.managers.package import PackageManager
def test_pkg_name_parser():
def test_pkg_input_parser():
items = [
["PkgName", ("PkgName", None, None)],
[("PkgName", "!=1.2.3,<2.0"), ("PkgName", "!=1.2.3,<2.0", None)],
["PkgName@1.2.3", ("PkgName", "1.2.3", None)],
[("PkgName@1.2.3", "1.2.5"), ("PkgName@1.2.3", "1.2.5", None)],
["id:13", ("id:13", None, None)],
["id:13@~1.2.3", ("id:13", "~1.2.3", None)], [
["id:13@~1.2.3", ("id:13", "~1.2.3", None)],
[
util.get_home_dir(),
(".platformio", None, "file://" + util.get_home_dir())
], [
],
[
"LocalName=" + util.get_home_dir(),
("LocalName", None, "file://" + util.get_home_dir())
], [
],
[
"LocalName=%s@>2.3.0" % util.get_home_dir(),
("LocalName", ">2.3.0", "file://" + util.get_home_dir())
],
[
"https://github.com/user/package.git",
("package", None, "git+https://github.com/user/package.git")
], [
"https://gitlab.com/user/package.git",
("package", None, "git+https://gitlab.com/user/package.git")
], [
],
[
"MyPackage=https://gitlab.com/user/package.git",
("MyPackage", None, "git+https://gitlab.com/user/package.git")
],
[
"MyPackage=https://gitlab.com/user/package.git@3.2.1,!=2",
("MyPackage", "3.2.1,!=2",
"git+https://gitlab.com/user/package.git")
],
[
"https://somedomain.com/path/LibraryName-1.2.3.zip",
("LibraryName-1.2.3", None,
"https://somedomain.com/path/LibraryName-1.2.3.zip")
],
[
"https://github.com/user/package/archive/branch.zip",
("branch", None,
"https://github.com/user/package/archive/branch.zip")
], [
],
[
"https://github.com/user/package/archive/branch.zip@~1.2.3",
("branch", "~1.2.3",
"https://github.com/user/package/archive/branch.zip")
],
[
"https://github.com/user/package/archive/branch.tar.gz",
("branch", None,
("branch.tar", None,
"https://github.com/user/package/archive/branch.tar.gz")
], [
],
[
"https://github.com/user/package/archive/branch.tar.gz@!=5",
("branch.tar", "!=5",
"https://github.com/user/package/archive/branch.tar.gz")
],
[
"https://developer.mbed.org/users/user/code/package/",
("package", None,
"hg+https://developer.mbed.org/users/user/code/package/")
], [
],
[
"https://github.com/user/package#v1.2.3",
("package", None, "git+https://github.com/user/package#v1.2.3")
], [
],
[
"https://github.com/user/package.git#branch",
("package", None, "git+https://github.com/user/package.git#branch")
], [
],
[
"PkgName=https://github.com/user/package.git#a13d344fg56",
("PkgName", None,
"git+https://github.com/user/package.git#a13d344fg56")
], [
],
[
"user/package",
("package", None, "git+https://github.com/user/package")
],
[
"PkgName=user/package",
("PkgName", None, "git+https://github.com/user/package")
], [
],
[
"PkgName=user/package#master",
("PkgName", None, "git+https://github.com/user/package#master")
], [
],
[
"git+https://github.com/user/package",
("package", None, "git+https://github.com/user/package")
], [
],
[
"hg+https://example.com/user/package",
("package", None, "hg+https://example.com/user/package")
], [
],
[
"git@github.com:user/package.git",
("package", None, "git@github.com:user/package.git")
], [
],
[
"git@github.com:user/package.git#v1.2.0",
("package", None, "git@github.com:user/package.git#v1.2.0")
], [
],
[
"git+ssh://git@gitlab.private-server.com/user/package#1.2.0",
("package", None,
"git+ssh://git@gitlab.private-server.com/user/package#1.2.0")
@ -83,6 +131,72 @@ def test_pkg_name_parser():
]
for params, result in items:
if isinstance(params, tuple):
assert BasePkgManager.parse_pkg_name(*params) == result
assert PackageManager.parse_pkg_input(*params) == result
else:
assert BasePkgManager.parse_pkg_name(params) == result
assert PackageManager.parse_pkg_input(params) == result
def test_install_packages(isolated_pio_home, tmpdir):
packages = [
dict(id=1, name="name_1", version="shasum"),
dict(id=1, name="name_1", version="2.0.0"),
dict(id=1, name="name_1", version="2.1.0"),
dict(id=1, name="name_1", version="1.2.0"),
dict(id=1, name="name_1", version="1.0.0"),
dict(name="name_2", version="1.0.0"),
dict(name="name_2", version="2.0.0",
__src_url="git+https://github.com"),
dict(name="name_2", version="3.0.0",
__src_url="git+https://github2.com"),
dict(name="name_2", version="4.0.0",
__src_url="git+https://github2.com")
]
pm = PackageManager(join(util.get_home_dir(), "packages"))
for package in packages:
tmp_dir = tmpdir.mkdir("tmp-package")
tmp_dir.join("package.json").write(json.dumps(package))
pm._install_from_url(package['name'], "file://%s" % str(tmp_dir))
tmp_dir.remove(rec=1)
assert len(pm.get_installed()) == len(packages) - 1
pkg_dirnames = [
'name_1_ID1', 'name_1_ID1@1.0.0', 'name_1_ID1@1.2.0',
'name_1_ID1@2.0.0', 'name_1_ID1@shasum', 'name_2',
'name_2@src-177cbce1f0705580d17790fda1cc2ef5',
'name_2@src-f863b537ab00f4c7b5011fc44b120e1f'
]
assert set([p.basename for p in isolated_pio_home.join(
"packages").listdir()]) == set(pkg_dirnames)
def test_get_package(isolated_pio_home):
tests = [
[("unknown", ), None],
[("1", ), None],
[("id=1", "shasum"), dict(id=1, name="name_1", version="shasum")],
[("id=1", "*"), dict(id=1, name="name_1", version="2.1.0")],
[("id=1", "^1"), dict(id=1, name="name_1", version="1.2.0")],
[("id=1", "^1"), dict(id=1, name="name_1", version="1.2.0")],
[("name_1", "<2"), dict(id=1, name="name_1", version="1.2.0")],
[("name_1", ">2"), None],
[("name_1", "2-0-0"), dict(id=1, name="name_1", version="2.1.0")],
[("name_1", "2-0-0"), dict(id=1, name="name_1", version="2.1.0")],
[("name_2", ), dict(name="name_2", version="4.0.0")],
[("url_has_higher_priority", None, "git+https://github.com"),
dict(name="name_2", version="2.0.0",
__src_url="git+https://github.com")],
[("name_2", None, "git+https://github.com"),
dict(name="name_2", version="2.0.0",
__src_url="git+https://github.com")],
]
pm = PackageManager(join(util.get_home_dir(), "packages"))
for test in tests:
manifest = pm.get_package(*test[0])
if test[1] is None:
assert manifest is None, test
continue
for key, value in test[1].items():
assert manifest[key] == value, test

View File

@ -16,19 +16,6 @@ import pytest
import requests
def pytest_generate_tests(metafunc):
if "package_data" not in metafunc.fixturenames:
return
pkgs_manifest = requests.get(
"https://dl.bintray.com/platformio/dl-packages/manifest.json").json()
assert isinstance(pkgs_manifest, dict)
packages = []
for _, variants in pkgs_manifest.iteritems():
for item in variants:
packages.append(item)
metafunc.parametrize("package_data", packages)
def validate_response(req):
assert req.status_code == 200
assert int(req.headers['Content-Length']) > 0
@ -36,13 +23,22 @@ def validate_response(req):
"application/octet-stream")
def test_package(package_data):
assert package_data['url'].endswith(".tar.gz")
def test_packages():
pkgs_manifest = requests.get(
"https://dl.bintray.com/platformio/dl-packages/manifest.json").json()
assert isinstance(pkgs_manifest, dict)
items = []
for _, variants in pkgs_manifest.iteritems():
for item in variants:
items.append(item)
r = requests.head(package_data['url'], allow_redirects=True)
validate_response(r)
for item in items:
assert item['url'].endswith(".tar.gz"), item
if "X-Checksum-Sha1" not in r.headers:
return pytest.skip("X-Checksum-Sha1 is not provided")
r = requests.head(item['url'], allow_redirects=True)
validate_response(r)
assert package_data['sha1'] == r.headers.get("X-Checksum-Sha1")
if "X-Checksum-Sha1" not in r.headers:
return pytest.skip("X-Checksum-Sha1 is not provided")
assert item['sha1'] == r.headers.get("X-Checksum-Sha1"), item

View File

@ -24,6 +24,7 @@ deps =
yapf
pylint
pytest
show
commands = python --version
[testenv:docs]