2017-06-05 16:02:39 +03:00
|
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
2015-11-18 17:16:17 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2017-01-15 00:12:41 +02:00
|
|
|
import codecs
|
2017-01-24 22:07:45 +02:00
|
|
|
import hashlib
|
2016-07-18 01:38:35 +03:00
|
|
|
import json
|
2016-05-26 19:43:36 +03:00
|
|
|
import os
|
2017-06-24 16:07:40 +03:00
|
|
|
import re
|
2016-12-05 18:51:25 +02:00
|
|
|
import shutil
|
2018-06-21 21:33:56 +03:00
|
|
|
from os.path import abspath, basename, getsize, isdir, isfile, islink, join
|
2016-05-31 00:22:25 +03:00
|
|
|
from tempfile import mkdtemp
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2014-12-03 20:16:50 +02:00
|
|
|
import click
|
2015-05-28 13:44:10 +03:00
|
|
|
import requests
|
2016-05-26 19:43:36 +03:00
|
|
|
import semantic_version
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2017-01-25 15:33:40 +02:00
|
|
|
from platformio import __version__, app, exception, telemetry, util
|
2014-06-07 13:34:31 +03:00
|
|
|
from platformio.downloader import FileDownloader
|
2018-07-14 22:10:56 +03:00
|
|
|
from platformio.lockfile import LockFile
|
2014-06-07 13:34:31 +03:00
|
|
|
from platformio.unpacker import FileUnpacker
|
2016-05-31 00:22:25 +03:00
|
|
|
from platformio.vcsclient import VCSClientFactory
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2017-09-06 20:57:56 +03:00
|
|
|
# pylint: disable=too-many-arguments, too-many-return-statements
|
2016-08-26 11:46:59 +03:00
|
|
|
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
class PackageRepoIterator(object):
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
_MANIFEST_CACHE = {}
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
def __init__(self, package, repositories):
|
|
|
|
assert isinstance(repositories, list)
|
|
|
|
self.package = package
|
|
|
|
self.repositories = iter(repositories)
|
2016-05-26 22:33:17 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
def __iter__(self):
|
|
|
|
return self
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
def __next__(self):
|
|
|
|
return self.next()
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
def next(self):
|
|
|
|
manifest = {}
|
|
|
|
repo = next(self.repositories)
|
|
|
|
if isinstance(repo, dict):
|
|
|
|
manifest = repo
|
|
|
|
elif repo in PackageRepoIterator._MANIFEST_CACHE:
|
|
|
|
manifest = PackageRepoIterator._MANIFEST_CACHE[repo]
|
|
|
|
else:
|
|
|
|
r = None
|
|
|
|
try:
|
|
|
|
r = requests.get(repo, headers=util.get_request_defheaders())
|
|
|
|
r.raise_for_status()
|
|
|
|
manifest = r.json()
|
|
|
|
except: # pylint: disable=bare-except
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
if r:
|
|
|
|
r.close()
|
|
|
|
PackageRepoIterator._MANIFEST_CACHE[repo] = manifest
|
2016-05-26 22:33:17 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
if self.package in manifest:
|
|
|
|
return manifest[self.package]
|
2017-04-15 16:36:59 +03:00
|
|
|
return self.next()
|
2016-05-26 22:33:17 +03:00
|
|
|
|
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
class PkgRepoMixin(object):
|
2016-05-26 22:33:17 +03:00
|
|
|
|
2017-01-25 15:33:40 +02:00
|
|
|
PIO_VERSION = semantic_version.Version(util.pepver_to_semver(__version__))
|
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
@staticmethod
|
2017-12-27 21:36:45 +02:00
|
|
|
def is_system_compatible(valid_systems):
|
|
|
|
if valid_systems in (None, "all", "*"):
|
|
|
|
return True
|
|
|
|
if not isinstance(valid_systems, list):
|
|
|
|
valid_systems = list([valid_systems])
|
|
|
|
return util.get_systype() in valid_systems
|
|
|
|
|
|
|
|
def max_satisfying_repo_version(self, versions, requirements=None):
|
2016-05-26 19:43:36 +03:00
|
|
|
item = None
|
2016-08-01 17:05:48 +03:00
|
|
|
reqspec = None
|
|
|
|
if requirements:
|
|
|
|
try:
|
2018-07-03 14:55:48 +03:00
|
|
|
reqspec = self.parse_semver_spec(
|
|
|
|
requirements, raise_exception=True)
|
2016-08-01 17:05:48 +03:00
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
for v in versions:
|
2017-12-27 21:36:45 +02:00
|
|
|
if not self.is_system_compatible(v.get("system")):
|
2016-05-26 19:43:36 +03:00
|
|
|
continue
|
2017-01-25 15:33:40 +02:00
|
|
|
if "platformio" in v.get("engines", {}):
|
2018-07-03 14:55:48 +03:00
|
|
|
if PkgRepoMixin.PIO_VERSION not in self.parse_semver_spec(
|
|
|
|
v['engines']['platformio'], raise_exception=True):
|
2017-01-25 15:33:40 +02:00
|
|
|
continue
|
2016-08-01 17:05:48 +03:00
|
|
|
specver = semantic_version.Version(v['version'])
|
|
|
|
if reqspec and specver not in reqspec:
|
2016-05-26 19:43:36 +03:00
|
|
|
continue
|
2016-08-01 17:05:48 +03:00
|
|
|
if not item or semantic_version.Version(item['version']) < specver:
|
2016-05-26 19:43:36 +03:00
|
|
|
item = v
|
|
|
|
return item
|
|
|
|
|
2017-01-30 20:21:16 +02:00
|
|
|
def get_latest_repo_version( # pylint: disable=unused-argument
|
|
|
|
self,
|
|
|
|
name,
|
|
|
|
requirements,
|
|
|
|
silent=False):
|
2016-05-29 23:28:50 +03:00
|
|
|
version = None
|
|
|
|
for versions in PackageRepoIterator(name, self.repositories):
|
|
|
|
pkgdata = self.max_satisfying_repo_version(versions, requirements)
|
|
|
|
if not pkgdata:
|
|
|
|
continue
|
2016-08-01 17:05:48 +03:00
|
|
|
if not version or semantic_version.compare(pkgdata['version'],
|
|
|
|
version) == 1:
|
2016-05-29 23:28:50 +03:00
|
|
|
version = pkgdata['version']
|
|
|
|
return version
|
|
|
|
|
2017-03-03 23:29:17 +02:00
|
|
|
def get_all_repo_versions(self, name):
|
|
|
|
result = []
|
|
|
|
for versions in PackageRepoIterator(name, self.repositories):
|
2017-12-14 21:20:08 +02:00
|
|
|
result.extend(
|
|
|
|
[semantic_version.Version(v['version']) for v in versions])
|
2017-12-16 20:50:25 +02:00
|
|
|
return [str(v) for v in sorted(set(result))]
|
2017-03-03 23:29:17 +02:00
|
|
|
|
2016-05-29 17:59:08 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
class PkgInstallerMixin(object):
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
SRC_MANIFEST_NAME = ".piopkgmanager.json"
|
2018-01-10 15:23:56 +02:00
|
|
|
TMP_FOLDER_PREFIX = "_tmp_installing-"
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2017-01-30 01:04:06 +02:00
|
|
|
FILE_CACHE_VALID = "1m" # 1 month
|
|
|
|
FILE_CACHE_MAX_SIZE = 1024 * 1024
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
MEMORY_CACHE = {}
|
2017-01-30 01:04:06 +02:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
@staticmethod
|
|
|
|
def cache_get(key, default=None):
|
|
|
|
return PkgInstallerMixin.MEMORY_CACHE.get(key, default)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def cache_set(key, value):
|
|
|
|
PkgInstallerMixin.MEMORY_CACHE[key] = value
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def cache_reset():
|
|
|
|
PkgInstallerMixin.MEMORY_CACHE = {}
|
|
|
|
|
|
|
|
def read_dirs(self, src_dir):
|
|
|
|
cache_key = "read_dirs-%s" % src_dir
|
|
|
|
result = self.cache_get(cache_key)
|
|
|
|
if result:
|
|
|
|
return result
|
|
|
|
result = [
|
|
|
|
join(src_dir, name) for name in sorted(os.listdir(src_dir))
|
|
|
|
if isdir(join(src_dir, name))
|
|
|
|
]
|
|
|
|
self.cache_set(cache_key, result)
|
|
|
|
return result
|
2017-01-30 01:04:06 +02:00
|
|
|
|
|
|
|
def download(self, url, dest_dir, sha1=None):
|
|
|
|
cache_key_fname = app.ContentCache.key_from_args(url, "fname")
|
|
|
|
cache_key_data = app.ContentCache.key_from_args(url, "data")
|
|
|
|
if self.FILE_CACHE_VALID:
|
|
|
|
with app.ContentCache() as cc:
|
|
|
|
fname = cc.get(cache_key_fname)
|
|
|
|
cache_path = cc.get_cache_path(cache_key_data)
|
|
|
|
if fname and isfile(cache_path):
|
|
|
|
dst_path = join(dest_dir, fname)
|
|
|
|
shutil.copy(cache_path, dst_path)
|
|
|
|
return dst_path
|
|
|
|
|
2018-04-05 22:10:28 -07:00
|
|
|
with_progress = not app.is_disabled_progressbar()
|
|
|
|
try:
|
|
|
|
fd = FileDownloader(url, dest_dir)
|
|
|
|
fd.start(with_progress=with_progress)
|
|
|
|
except IOError as e:
|
|
|
|
raise_error = not with_progress
|
|
|
|
if with_progress:
|
|
|
|
try:
|
|
|
|
fd = FileDownloader(url, dest_dir)
|
|
|
|
fd.start(with_progress=False)
|
|
|
|
except IOError:
|
|
|
|
raise_error = True
|
|
|
|
if raise_error:
|
|
|
|
click.secho(
|
|
|
|
"Error: Please read http://bit.ly/package-manager-ioerror",
|
|
|
|
fg="red",
|
|
|
|
err=True)
|
|
|
|
raise e
|
|
|
|
|
2017-01-30 01:04:06 +02:00
|
|
|
if sha1:
|
|
|
|
fd.verify(sha1)
|
|
|
|
dst_path = fd.get_filepath()
|
|
|
|
if not self.FILE_CACHE_VALID or getsize(
|
|
|
|
dst_path) > PkgInstallerMixin.FILE_CACHE_MAX_SIZE:
|
|
|
|
return dst_path
|
|
|
|
|
|
|
|
with app.ContentCache() as cc:
|
|
|
|
cc.set(cache_key_fname, basename(dst_path), self.FILE_CACHE_VALID)
|
|
|
|
cc.set(cache_key_data, "DUMMY", self.FILE_CACHE_VALID)
|
|
|
|
shutil.copy(dst_path, cc.get_cache_path(cache_key_data))
|
|
|
|
return dst_path
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def unpack(source_path, dest_dir):
|
2018-04-05 22:10:28 -07:00
|
|
|
with_progress = not app.is_disabled_progressbar()
|
|
|
|
try:
|
|
|
|
with FileUnpacker(source_path) as fu:
|
|
|
|
return fu.unpack(dest_dir, with_progress=with_progress)
|
|
|
|
except IOError as e:
|
|
|
|
if not with_progress:
|
|
|
|
raise e
|
|
|
|
with FileUnpacker(source_path) as fu:
|
|
|
|
return fu.unpack(dest_dir, with_progress=False)
|
2017-01-30 01:04:06 +02:00
|
|
|
|
2017-12-27 19:37:26 +02:00
|
|
|
@staticmethod
|
|
|
|
def parse_semver_spec(value, raise_exception=False):
|
|
|
|
try:
|
2018-07-03 14:55:48 +03:00
|
|
|
# Workaround for ^ issue and pre-releases
|
|
|
|
# https://github.com/rbarrois/python-semanticversion/issues/61
|
|
|
|
requirements = []
|
|
|
|
for item in str(value).split(","):
|
|
|
|
item = item.strip()
|
|
|
|
if not item:
|
|
|
|
continue
|
|
|
|
if item.startswith("^"):
|
|
|
|
major = semantic_version.Version.coerce(item[1:]).major
|
|
|
|
requirements.append(">=%s" % major)
|
|
|
|
requirements.append("<%s" % (int(major) + 1))
|
|
|
|
else:
|
|
|
|
requirements.append(item)
|
|
|
|
return semantic_version.Spec(*requirements)
|
2017-12-27 19:37:26 +02:00
|
|
|
except ValueError as e:
|
|
|
|
if raise_exception:
|
|
|
|
raise e
|
|
|
|
return None
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def parse_semver_version(value, raise_exception=False):
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
return semantic_version.Version(value)
|
|
|
|
except ValueError:
|
2018-01-09 21:56:21 +02:00
|
|
|
if "." not in str(value) and not str(value).isdigit():
|
|
|
|
raise ValueError("Invalid SemVer version %s" % value)
|
2017-12-27 19:37:26 +02:00
|
|
|
return semantic_version.Version.coerce(value)
|
|
|
|
except ValueError as e:
|
|
|
|
if raise_exception:
|
|
|
|
raise e
|
|
|
|
return None
|
|
|
|
|
2018-10-19 16:37:15 +03:00
|
|
|
@staticmethod
|
|
|
|
def parse_pkg_uri( # pylint: disable=too-many-branches
|
|
|
|
text, requirements=None):
|
|
|
|
text = str(text)
|
|
|
|
name, url = None, None
|
|
|
|
|
|
|
|
# Parse requirements
|
|
|
|
req_conditions = [
|
|
|
|
"@" in text, not requirements, ":" not in text
|
|
|
|
or text.rfind("/") < text.rfind("@")
|
|
|
|
]
|
|
|
|
if all(req_conditions):
|
|
|
|
text, requirements = text.rsplit("@", 1)
|
|
|
|
|
|
|
|
# Handle PIO Library Registry ID
|
|
|
|
if text.isdigit():
|
|
|
|
text = "id=" + text
|
|
|
|
# Parse custom name
|
|
|
|
elif "=" in text and not text.startswith("id="):
|
|
|
|
name, text = text.split("=", 1)
|
|
|
|
|
|
|
|
# Parse URL
|
|
|
|
# if valid URL with scheme vcs+protocol://
|
|
|
|
if "+" in text and text.find("+") < text.find("://"):
|
|
|
|
url = text
|
|
|
|
elif "/" in text or "\\" in text:
|
|
|
|
git_conditions = [
|
|
|
|
# Handle GitHub URL (https://github.com/user/package)
|
|
|
|
text.startswith("https://github.com/") and not text.endswith(
|
|
|
|
(".zip", ".tar.gz")),
|
|
|
|
(text.split("#", 1)[0]
|
|
|
|
if "#" in text else text).endswith(".git")
|
|
|
|
]
|
|
|
|
hg_conditions = [
|
|
|
|
# Handle Developer Mbed URL
|
|
|
|
# (https://developer.mbed.org/users/user/code/package/)
|
|
|
|
# (https://os.mbed.com/users/user/code/package/)
|
|
|
|
text.startswith("https://developer.mbed.org"),
|
|
|
|
text.startswith("https://os.mbed.com")
|
|
|
|
]
|
|
|
|
if any(git_conditions):
|
|
|
|
url = "git+" + text
|
|
|
|
elif any(hg_conditions):
|
|
|
|
url = "hg+" + text
|
|
|
|
elif "://" not in text and (isfile(text) or isdir(text)):
|
|
|
|
url = "file://" + text
|
|
|
|
elif "://" in text:
|
|
|
|
url = text
|
|
|
|
# Handle short version of GitHub URL
|
|
|
|
elif text.count("/") == 1:
|
|
|
|
url = "git+https://github.com/" + text
|
|
|
|
|
|
|
|
# Parse name from URL
|
|
|
|
if url and not name:
|
|
|
|
_url = url.split("#", 1)[0] if "#" in url else url
|
|
|
|
if _url.endswith(("\\", "/")):
|
|
|
|
_url = _url[:-1]
|
|
|
|
name = basename(_url)
|
|
|
|
if "." in name and not name.startswith("."):
|
|
|
|
name = name.rsplit(".", 1)[0]
|
|
|
|
|
|
|
|
return (name or text, requirements, url)
|
|
|
|
|
2017-06-24 16:07:40 +03:00
|
|
|
@staticmethod
|
|
|
|
def get_install_dirname(manifest):
|
|
|
|
name = re.sub(r"[^\da-z\_\-\. ]", "_", manifest['name'], flags=re.I)
|
2017-03-05 00:14:05 +02:00
|
|
|
if "id" in manifest:
|
|
|
|
name += "_ID%d" % manifest['id']
|
2017-11-02 23:14:32 +02:00
|
|
|
return str(name)
|
2017-03-05 00:14:05 +02:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
def get_src_manifest_path(self, pkg_dir):
|
2017-04-15 20:32:11 +03:00
|
|
|
if not isdir(pkg_dir):
|
|
|
|
return None
|
2016-07-18 01:38:35 +03:00
|
|
|
for item in os.listdir(pkg_dir):
|
|
|
|
if not isdir(join(pkg_dir, item)):
|
2016-05-26 19:43:36 +03:00
|
|
|
continue
|
2017-03-08 17:24:58 +02:00
|
|
|
if isfile(join(pkg_dir, item, self.SRC_MANIFEST_NAME)):
|
|
|
|
return join(pkg_dir, item, self.SRC_MANIFEST_NAME)
|
2016-05-26 19:43:36 +03:00
|
|
|
return None
|
|
|
|
|
2016-08-05 17:02:39 +03:00
|
|
|
def get_manifest_path(self, pkg_dir):
|
|
|
|
if not isdir(pkg_dir):
|
|
|
|
return None
|
2017-01-15 00:12:41 +02:00
|
|
|
for name in self.manifest_names:
|
|
|
|
manifest_path = join(pkg_dir, name)
|
|
|
|
if isfile(manifest_path):
|
|
|
|
return manifest_path
|
|
|
|
return None
|
2016-08-05 17:02:39 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
def manifest_exists(self, pkg_dir):
|
2017-03-08 17:24:58 +02:00
|
|
|
return self.get_manifest_path(pkg_dir) or \
|
|
|
|
self.get_src_manifest_path(pkg_dir)
|
|
|
|
|
|
|
|
def load_manifest(self, pkg_dir):
|
|
|
|
cache_key = "load_manifest-%s" % pkg_dir
|
|
|
|
result = self.cache_get(cache_key)
|
|
|
|
if result:
|
|
|
|
return result
|
2017-01-15 00:12:41 +02:00
|
|
|
|
2017-04-15 20:32:11 +03:00
|
|
|
manifest = {}
|
|
|
|
src_manifest = None
|
2017-03-08 17:24:58 +02:00
|
|
|
manifest_path = self.get_manifest_path(pkg_dir)
|
|
|
|
src_manifest_path = self.get_src_manifest_path(pkg_dir)
|
|
|
|
if src_manifest_path:
|
|
|
|
src_manifest = util.load_json(src_manifest_path)
|
2017-01-30 01:04:06 +02:00
|
|
|
|
2017-04-15 20:32:11 +03:00
|
|
|
if not manifest_path and not src_manifest_path:
|
|
|
|
return None
|
|
|
|
|
|
|
|
if manifest_path and manifest_path.endswith(".json"):
|
2017-03-08 17:24:58 +02:00
|
|
|
manifest = util.load_json(manifest_path)
|
2017-04-15 20:32:11 +03:00
|
|
|
elif manifest_path and manifest_path.endswith(".properties"):
|
2017-03-08 17:24:58 +02:00
|
|
|
with codecs.open(manifest_path, encoding="utf-8") as fp:
|
2017-01-15 00:12:41 +02:00
|
|
|
for line in fp.readlines():
|
|
|
|
if "=" not in line:
|
|
|
|
continue
|
|
|
|
key, value = line.split("=", 1)
|
|
|
|
manifest[key.strip()] = value.strip()
|
2017-03-08 17:24:58 +02:00
|
|
|
|
|
|
|
if src_manifest:
|
|
|
|
if "version" in src_manifest:
|
|
|
|
manifest['version'] = src_manifest['version']
|
|
|
|
manifest['__src_url'] = src_manifest['url']
|
2018-10-19 16:37:15 +03:00
|
|
|
# handle a custom package name
|
|
|
|
autogen_name = self.parse_pkg_uri(manifest['__src_url'])[0]
|
|
|
|
if "name" not in manifest or autogen_name != src_manifest['name']:
|
|
|
|
manifest['name'] = src_manifest['name']
|
2017-03-08 17:24:58 +02:00
|
|
|
|
|
|
|
if "name" not in manifest:
|
|
|
|
manifest['name'] = basename(pkg_dir)
|
|
|
|
if "version" not in manifest:
|
|
|
|
manifest['version'] = "0.0.0"
|
2017-01-15 00:12:41 +02:00
|
|
|
|
2017-11-02 23:14:32 +02:00
|
|
|
manifest['__pkg_dir'] = util.path_to_unicode(pkg_dir)
|
2017-03-08 17:24:58 +02:00
|
|
|
self.cache_set(cache_key, manifest)
|
2017-01-15 00:12:41 +02:00
|
|
|
return manifest
|
2016-05-26 22:33:17 +03:00
|
|
|
|
2017-01-30 01:04:06 +02:00
|
|
|
def get_installed(self):
|
|
|
|
items = []
|
2017-03-08 17:24:58 +02:00
|
|
|
for pkg_dir in self.read_dirs(self.package_dir):
|
2018-01-10 15:23:56 +02:00
|
|
|
if self.TMP_FOLDER_PREFIX in pkg_dir:
|
|
|
|
continue
|
2017-01-30 01:04:06 +02:00
|
|
|
manifest = self.load_manifest(pkg_dir)
|
|
|
|
if not manifest:
|
|
|
|
continue
|
|
|
|
assert "name" in manifest
|
|
|
|
items.append(manifest)
|
|
|
|
return items
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
def get_package(self, name, requirements=None, url=None):
|
|
|
|
pkg_id = int(name[3:]) if name.startswith("id=") else 0
|
|
|
|
best = None
|
|
|
|
for manifest in self.get_installed():
|
|
|
|
if url:
|
|
|
|
if manifest.get("__src_url") != url:
|
|
|
|
continue
|
|
|
|
elif pkg_id and manifest.get("id") != pkg_id:
|
|
|
|
continue
|
|
|
|
elif not pkg_id and manifest['name'] != name:
|
|
|
|
continue
|
2017-12-27 21:36:45 +02:00
|
|
|
elif not PkgRepoMixin.is_system_compatible(manifest.get("system")):
|
|
|
|
continue
|
2017-03-08 17:24:58 +02:00
|
|
|
|
|
|
|
# strict version or VCS HASH
|
|
|
|
if requirements and requirements == manifest['version']:
|
|
|
|
return manifest
|
|
|
|
|
|
|
|
try:
|
2017-12-27 19:37:26 +02:00
|
|
|
if requirements and not self.parse_semver_spec(
|
|
|
|
requirements, raise_exception=True).match(
|
|
|
|
self.parse_semver_version(
|
|
|
|
manifest['version'], raise_exception=True)):
|
2017-03-08 17:24:58 +02:00
|
|
|
continue
|
2017-12-27 19:37:26 +02:00
|
|
|
elif not best or (self.parse_semver_version(
|
|
|
|
manifest['version'], raise_exception=True) >
|
|
|
|
self.parse_semver_version(
|
|
|
|
best['version'], raise_exception=True)):
|
2017-03-08 17:24:58 +02:00
|
|
|
best = manifest
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return best
|
|
|
|
|
|
|
|
def get_package_dir(self, name, requirements=None, url=None):
|
|
|
|
manifest = self.get_package(name, requirements, url)
|
2017-06-01 19:53:05 +03:00
|
|
|
return manifest.get("__pkg_dir") if manifest and isdir(
|
|
|
|
manifest.get("__pkg_dir")) else None
|
2016-08-01 17:05:48 +03:00
|
|
|
|
2018-06-21 21:33:56 +03:00
|
|
|
def get_package_by_dir(self, pkg_dir):
|
|
|
|
for manifest in self.get_installed():
|
|
|
|
if manifest['__pkg_dir'] == util.path_to_unicode(abspath(pkg_dir)):
|
|
|
|
return manifest
|
|
|
|
return None
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
def find_pkg_root(self, src_dir):
|
|
|
|
if self.manifest_exists(src_dir):
|
|
|
|
return src_dir
|
|
|
|
for root, _, _ in os.walk(src_dir):
|
2016-09-04 00:35:47 +03:00
|
|
|
if self.manifest_exists(root):
|
|
|
|
return root
|
2017-01-15 00:12:41 +02:00
|
|
|
raise exception.MissingPackageManifest(", ".join(self.manifest_names))
|
2016-08-01 17:05:48 +03:00
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
def _install_from_piorepo(self, name, requirements):
|
|
|
|
pkg_dir = None
|
|
|
|
pkgdata = None
|
|
|
|
versions = None
|
|
|
|
for versions in PackageRepoIterator(name, self.repositories):
|
2016-05-29 17:59:08 +03:00
|
|
|
pkgdata = self.max_satisfying_repo_version(versions, requirements)
|
2016-05-26 19:43:36 +03:00
|
|
|
if not pkgdata:
|
|
|
|
continue
|
|
|
|
try:
|
2016-10-31 20:05:34 +02:00
|
|
|
pkg_dir = self._install_from_url(name, pkgdata['url'],
|
|
|
|
requirements,
|
|
|
|
pkgdata.get("sha1"))
|
2016-05-26 19:43:36 +03:00
|
|
|
break
|
|
|
|
except Exception as e: # pylint: disable=broad-except
|
|
|
|
click.secho("Warning! Package Mirror: %s" % e, fg="yellow")
|
2017-12-31 00:07:12 +02:00
|
|
|
click.secho("Looking for another mirror...", fg="yellow")
|
2016-05-26 19:43:36 +03:00
|
|
|
|
|
|
|
if versions is None:
|
2018-10-12 22:30:28 +03:00
|
|
|
util.internet_on(raise_exception=True)
|
2016-05-26 19:43:36 +03:00
|
|
|
raise exception.UnknownPackage(name)
|
|
|
|
elif not pkgdata:
|
2016-08-01 17:05:48 +03:00
|
|
|
raise exception.UndefinedPackageVersion(requirements or "latest",
|
|
|
|
util.get_systype())
|
2016-05-31 00:22:25 +03:00
|
|
|
return pkg_dir
|
2016-05-26 22:33:17 +03:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
def _install_from_url(self,
|
|
|
|
name,
|
|
|
|
url,
|
|
|
|
requirements=None,
|
|
|
|
sha1=None,
|
|
|
|
track=False):
|
2018-01-10 15:23:56 +02:00
|
|
|
tmp_dir = mkdtemp("-package", self.TMP_FOLDER_PREFIX, self.package_dir)
|
2017-03-08 17:24:58 +02:00
|
|
|
src_manifest_dir = None
|
|
|
|
src_manifest = {"name": name, "url": url, "requirements": requirements}
|
2016-05-31 00:22:25 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
if url.startswith("file://"):
|
2017-03-08 17:24:58 +02:00
|
|
|
_url = url[7:]
|
|
|
|
if isfile(_url):
|
|
|
|
self.unpack(_url, tmp_dir)
|
2016-07-18 01:38:35 +03:00
|
|
|
else:
|
2016-08-05 18:43:20 +03:00
|
|
|
util.rmtree_(tmp_dir)
|
2017-03-08 17:24:58 +02:00
|
|
|
shutil.copytree(_url, tmp_dir)
|
2016-08-01 17:05:48 +03:00
|
|
|
elif url.startswith(("http://", "https://")):
|
2016-05-31 00:22:25 +03:00
|
|
|
dlpath = self.download(url, tmp_dir, sha1)
|
|
|
|
assert isfile(dlpath)
|
|
|
|
self.unpack(dlpath, tmp_dir)
|
|
|
|
os.remove(dlpath)
|
|
|
|
else:
|
2016-07-18 01:38:35 +03:00
|
|
|
vcs = VCSClientFactory.newClient(tmp_dir, url)
|
|
|
|
assert vcs.export()
|
2017-03-08 17:24:58 +02:00
|
|
|
src_manifest_dir = vcs.storage_dir
|
|
|
|
src_manifest['version'] = vcs.get_current_revision()
|
|
|
|
|
2017-04-15 20:32:11 +03:00
|
|
|
_tmp_dir = tmp_dir
|
2017-04-15 21:28:01 +03:00
|
|
|
if not src_manifest_dir:
|
|
|
|
_tmp_dir = self.find_pkg_root(tmp_dir)
|
|
|
|
src_manifest_dir = join(_tmp_dir, ".pio")
|
|
|
|
|
|
|
|
# write source data to a special manifest
|
2017-03-08 17:24:58 +02:00
|
|
|
if track:
|
|
|
|
self._update_src_manifest(src_manifest, src_manifest_dir)
|
|
|
|
|
2017-04-15 20:32:11 +03:00
|
|
|
return self._install_from_tmp_dir(_tmp_dir, requirements)
|
2016-05-31 00:22:25 +03:00
|
|
|
finally:
|
|
|
|
if isdir(tmp_dir):
|
2016-08-05 18:43:20 +03:00
|
|
|
util.rmtree_(tmp_dir)
|
2017-12-15 22:16:37 +02:00
|
|
|
return None
|
2016-05-26 22:33:17 +03:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
def _update_src_manifest(self, data, src_dir):
|
|
|
|
if not isdir(src_dir):
|
|
|
|
os.makedirs(src_dir)
|
|
|
|
src_manifest_path = join(src_dir, self.SRC_MANIFEST_NAME)
|
2017-03-11 23:28:55 +02:00
|
|
|
_data = {}
|
2017-03-08 17:24:58 +02:00
|
|
|
if isfile(src_manifest_path):
|
2017-03-11 23:28:55 +02:00
|
|
|
_data = util.load_json(src_manifest_path)
|
|
|
|
_data.update(data)
|
2017-03-08 17:24:58 +02:00
|
|
|
with open(src_manifest_path, "w") as fp:
|
|
|
|
json.dump(_data, fp)
|
|
|
|
|
|
|
|
def _install_from_tmp_dir( # pylint: disable=too-many-branches
|
|
|
|
self, tmp_dir, requirements=None):
|
|
|
|
tmp_manifest = self.load_manifest(tmp_dir)
|
2017-01-24 22:07:45 +02:00
|
|
|
assert set(["name", "version"]) <= set(tmp_manifest.keys())
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
pkg_dirname = self.get_install_dirname(tmp_manifest)
|
|
|
|
pkg_dir = join(self.package_dir, pkg_dirname)
|
|
|
|
cur_manifest = self.load_manifest(pkg_dir)
|
|
|
|
|
2017-12-27 19:37:26 +02:00
|
|
|
tmp_semver = self.parse_semver_version(tmp_manifest['version'])
|
2017-03-08 17:24:58 +02:00
|
|
|
cur_semver = None
|
2017-12-27 19:37:26 +02:00
|
|
|
if cur_manifest:
|
|
|
|
cur_semver = self.parse_semver_version(cur_manifest['version'])
|
2016-05-26 22:33:17 +03:00
|
|
|
|
2016-05-31 00:22:25 +03:00
|
|
|
# package should satisfy requirements
|
|
|
|
if requirements:
|
2016-08-01 17:05:48 +03:00
|
|
|
mismatch_error = (
|
2017-06-05 16:05:05 +03:00
|
|
|
"Package version %s doesn't satisfy requirements %s" %
|
|
|
|
(tmp_manifest['version'], requirements))
|
2016-08-01 17:05:48 +03:00
|
|
|
try:
|
2018-07-03 14:55:48 +03:00
|
|
|
assert tmp_semver and tmp_semver in self.parse_semver_spec(
|
|
|
|
requirements, raise_exception=True), mismatch_error
|
2017-03-08 17:24:58 +02:00
|
|
|
except (AssertionError, ValueError):
|
2017-01-24 22:07:45 +02:00
|
|
|
assert tmp_manifest['version'] == requirements, mismatch_error
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
# check if package already exists
|
|
|
|
if cur_manifest:
|
|
|
|
# 0-overwrite, 1-rename, 2-fix to a version
|
|
|
|
action = 0
|
|
|
|
if "__src_url" in cur_manifest:
|
|
|
|
if cur_manifest['__src_url'] != tmp_manifest.get("__src_url"):
|
|
|
|
action = 1
|
|
|
|
elif "__src_url" in tmp_manifest:
|
|
|
|
action = 2
|
2017-01-24 22:07:45 +02:00
|
|
|
else:
|
2017-03-08 17:24:58 +02:00
|
|
|
if tmp_semver and (not cur_semver or tmp_semver > cur_semver):
|
|
|
|
action = 1
|
|
|
|
elif tmp_semver and cur_semver and tmp_semver != cur_semver:
|
|
|
|
action = 2
|
|
|
|
|
|
|
|
# rename
|
|
|
|
if action == 1:
|
|
|
|
target_dirname = "%s@%s" % (pkg_dirname,
|
|
|
|
cur_manifest['version'])
|
|
|
|
if "__src_url" in cur_manifest:
|
|
|
|
target_dirname = "%s@src-%s" % (
|
2018-06-08 21:37:57 +03:00
|
|
|
pkg_dirname, hashlib.md5(
|
|
|
|
cur_manifest['__src_url']).hexdigest())
|
2018-04-20 21:27:52 +03:00
|
|
|
shutil.move(pkg_dir, join(self.package_dir, target_dirname))
|
2017-03-08 17:24:58 +02:00
|
|
|
# fix to a version
|
|
|
|
elif action == 2:
|
|
|
|
target_dirname = "%s@%s" % (pkg_dirname,
|
|
|
|
tmp_manifest['version'])
|
|
|
|
if "__src_url" in tmp_manifest:
|
|
|
|
target_dirname = "%s@src-%s" % (
|
2018-06-08 21:37:57 +03:00
|
|
|
pkg_dirname, hashlib.md5(
|
|
|
|
tmp_manifest['__src_url']).hexdigest())
|
2017-03-08 17:24:58 +02:00
|
|
|
pkg_dir = join(self.package_dir, target_dirname)
|
2016-05-31 00:22:25 +03:00
|
|
|
|
|
|
|
# remove previous/not-satisfied package
|
|
|
|
if isdir(pkg_dir):
|
2016-08-05 18:43:20 +03:00
|
|
|
util.rmtree_(pkg_dir)
|
2018-04-20 21:27:52 +03:00
|
|
|
shutil.move(tmp_dir, pkg_dir)
|
2016-05-31 00:22:25 +03:00
|
|
|
assert isdir(pkg_dir)
|
2017-03-08 17:24:58 +02:00
|
|
|
self.cache_reset()
|
2016-05-31 00:22:25 +03:00
|
|
|
return pkg_dir
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
|
|
|
|
class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
|
|
|
|
|
2017-03-26 21:04:16 +03:00
|
|
|
# Handle circle dependencies
|
|
|
|
INSTALL_HISTORY = None
|
|
|
|
|
2016-07-18 01:38:35 +03:00
|
|
|
def __init__(self, package_dir, repositories=None):
|
|
|
|
self.repositories = repositories
|
|
|
|
self.package_dir = package_dir
|
|
|
|
if not isdir(self.package_dir):
|
|
|
|
os.makedirs(self.package_dir)
|
|
|
|
assert isdir(self.package_dir)
|
|
|
|
|
|
|
|
@property
|
2017-01-15 00:12:41 +02:00
|
|
|
def manifest_names(self):
|
2016-07-18 01:38:35 +03:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def print_message(self, message, nl=True):
|
|
|
|
click.echo("%s: %s" % (self.__class__.__name__, message), nl=nl)
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
def outdated(self, pkg_dir, requirements=None):
|
2017-01-30 01:04:06 +02:00
|
|
|
"""
|
|
|
|
Has 3 different results:
|
2018-09-21 19:23:08 +03:00
|
|
|
`None` - unknown package, VCS is detached to commit
|
2017-01-30 01:04:06 +02:00
|
|
|
`False` - package is up-to-date
|
|
|
|
`String` - a found latest version
|
|
|
|
"""
|
2017-09-06 14:37:04 +03:00
|
|
|
if not isdir(pkg_dir):
|
|
|
|
return None
|
2017-01-30 01:04:06 +02:00
|
|
|
latest = None
|
2017-03-08 17:24:58 +02:00
|
|
|
manifest = self.load_manifest(pkg_dir)
|
2018-09-21 19:23:08 +03:00
|
|
|
# skip detached package to a specific version
|
2017-09-03 20:58:48 +03:00
|
|
|
if "@" in pkg_dir and "__src_url" not in manifest and not requirements:
|
2017-01-30 01:04:06 +02:00
|
|
|
return None
|
2017-03-11 13:11:58 +02:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
if "__src_url" in manifest:
|
|
|
|
try:
|
|
|
|
vcs = VCSClientFactory.newClient(
|
|
|
|
pkg_dir, manifest['__src_url'], silent=True)
|
|
|
|
except (AttributeError, exception.PlatformioException):
|
|
|
|
return None
|
2017-01-30 01:04:06 +02:00
|
|
|
if not vcs.can_be_updated:
|
|
|
|
return None
|
|
|
|
latest = vcs.get_latest_revision()
|
|
|
|
else:
|
|
|
|
try:
|
2017-01-30 20:21:16 +02:00
|
|
|
latest = self.get_latest_repo_version(
|
2017-03-08 17:24:58 +02:00
|
|
|
"id=%d" % manifest['id']
|
|
|
|
if "id" in manifest else manifest['name'],
|
|
|
|
requirements,
|
|
|
|
silent=True)
|
2017-01-30 01:04:06 +02:00
|
|
|
except (exception.PlatformioException, ValueError):
|
|
|
|
return None
|
2017-03-11 13:11:58 +02:00
|
|
|
|
2017-01-30 01:04:06 +02:00
|
|
|
if not latest:
|
|
|
|
return None
|
2017-03-11 13:11:58 +02:00
|
|
|
|
2017-01-30 01:04:06 +02:00
|
|
|
up_to_date = False
|
|
|
|
try:
|
2017-03-11 13:11:58 +02:00
|
|
|
assert "__src_url" not in manifest
|
2017-12-27 19:37:26 +02:00
|
|
|
up_to_date = (self.parse_semver_version(
|
|
|
|
manifest['version'], raise_exception=True) >=
|
|
|
|
self.parse_semver_version(
|
|
|
|
latest, raise_exception=True))
|
2017-03-11 13:11:58 +02:00
|
|
|
except (AssertionError, ValueError):
|
2017-01-30 01:04:06 +02:00
|
|
|
up_to_date = latest == manifest['version']
|
2017-03-11 13:11:58 +02:00
|
|
|
|
2017-01-30 01:04:06 +02:00
|
|
|
return False if up_to_date else latest
|
2016-08-02 19:10:29 +03:00
|
|
|
|
2016-08-21 00:31:58 +03:00
|
|
|
def install(self,
|
|
|
|
name,
|
|
|
|
requirements=None,
|
|
|
|
silent=False,
|
2018-03-16 13:33:59 +02:00
|
|
|
after_update=False,
|
2017-12-19 00:51:35 +02:00
|
|
|
force=False):
|
2018-07-14 22:10:56 +03:00
|
|
|
pkg_dir = None
|
|
|
|
# interprocess lock
|
|
|
|
with LockFile(self.package_dir):
|
|
|
|
self.cache_reset()
|
2018-01-11 02:10:51 +02:00
|
|
|
|
2018-07-15 00:43:12 +03:00
|
|
|
name, requirements, url = self.parse_pkg_uri(name, requirements)
|
|
|
|
package_dir = self.get_package_dir(name, requirements, url)
|
|
|
|
|
|
|
|
# avoid circle dependencies
|
|
|
|
if not self.INSTALL_HISTORY:
|
|
|
|
self.INSTALL_HISTORY = []
|
|
|
|
history_key = "%s-%s-%s" % (name, requirements or "", url or "")
|
|
|
|
if history_key in self.INSTALL_HISTORY:
|
|
|
|
return package_dir
|
|
|
|
self.INSTALL_HISTORY.append(history_key)
|
|
|
|
|
|
|
|
if package_dir and force:
|
|
|
|
self.uninstall(package_dir)
|
|
|
|
package_dir = None
|
|
|
|
|
|
|
|
if not package_dir or not silent:
|
|
|
|
msg = "Installing " + click.style(name, fg="cyan")
|
|
|
|
if requirements:
|
|
|
|
msg += " @ " + requirements
|
|
|
|
self.print_message(msg)
|
|
|
|
if package_dir:
|
|
|
|
if not silent:
|
|
|
|
click.secho(
|
|
|
|
"{name} @ {version} is already installed".format(
|
|
|
|
**self.load_manifest(package_dir)),
|
|
|
|
fg="yellow")
|
|
|
|
return package_dir
|
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
if url:
|
|
|
|
pkg_dir = self._install_from_url(
|
|
|
|
name, url, requirements, track=True)
|
|
|
|
else:
|
|
|
|
pkg_dir = self._install_from_piorepo(name, requirements)
|
2016-07-18 01:38:35 +03:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
if not pkg_dir or not self.manifest_exists(pkg_dir):
|
|
|
|
raise exception.PackageInstallError(name, requirements or "*",
|
|
|
|
util.get_systype())
|
2016-08-01 17:05:48 +03:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
manifest = self.load_manifest(pkg_dir)
|
|
|
|
assert manifest
|
2016-08-01 17:05:48 +03:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
if not after_update:
|
|
|
|
telemetry.on_event(
|
|
|
|
category=self.__class__.__name__,
|
|
|
|
action="Install",
|
|
|
|
label=manifest['name'])
|
|
|
|
|
|
|
|
if not silent:
|
|
|
|
click.secho(
|
|
|
|
"{name} @ {version} has been successfully installed!".
|
|
|
|
format(**manifest),
|
|
|
|
fg="green")
|
2016-07-18 01:38:35 +03:00
|
|
|
|
|
|
|
return pkg_dir
|
|
|
|
|
2018-03-16 13:33:59 +02:00
|
|
|
def uninstall(self, package, requirements=None, after_update=False):
|
2018-07-14 22:10:56 +03:00
|
|
|
# interprocess lock
|
|
|
|
with LockFile(self.package_dir):
|
|
|
|
self.cache_reset()
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
if isdir(package) and self.get_package_by_dir(package):
|
|
|
|
pkg_dir = package
|
|
|
|
else:
|
|
|
|
name, requirements, url = self.parse_pkg_uri(
|
|
|
|
package, requirements)
|
|
|
|
pkg_dir = self.get_package_dir(name, requirements, url)
|
2017-03-08 17:24:58 +02:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
if not pkg_dir:
|
|
|
|
raise exception.UnknownPackage(
|
|
|
|
"%s @ %s" % (package, requirements or "*"))
|
2016-08-01 17:05:48 +03:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
manifest = self.load_manifest(pkg_dir)
|
|
|
|
click.echo(
|
|
|
|
"Uninstalling %s @ %s: \t" % (click.style(
|
|
|
|
manifest['name'], fg="cyan"), manifest['version']),
|
|
|
|
nl=False)
|
2017-03-04 18:37:03 +02:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
if islink(pkg_dir):
|
|
|
|
os.unlink(pkg_dir)
|
|
|
|
else:
|
|
|
|
util.rmtree_(pkg_dir)
|
2017-03-08 17:24:58 +02:00
|
|
|
self.cache_reset()
|
2016-08-01 17:05:48 +03:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
# unfix package with the same name
|
|
|
|
pkg_dir = self.get_package_dir(manifest['name'])
|
|
|
|
if pkg_dir and "@" in pkg_dir:
|
|
|
|
shutil.move(
|
|
|
|
pkg_dir,
|
|
|
|
join(self.package_dir, self.get_install_dirname(manifest)))
|
|
|
|
self.cache_reset()
|
|
|
|
|
|
|
|
click.echo("[%s]" % click.style("OK", fg="green"))
|
|
|
|
|
|
|
|
if not after_update:
|
|
|
|
telemetry.on_event(
|
|
|
|
category=self.__class__.__name__,
|
|
|
|
action="Uninstall",
|
|
|
|
label=manifest['name'])
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-08-01 17:05:48 +03:00
|
|
|
return True
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2017-09-06 20:57:56 +03:00
|
|
|
def update(self, package, requirements=None, only_check=False):
|
2018-06-21 21:33:56 +03:00
|
|
|
if isdir(package) and self.get_package_by_dir(package):
|
2017-03-08 17:24:58 +02:00
|
|
|
pkg_dir = package
|
2016-08-05 17:02:39 +03:00
|
|
|
else:
|
2017-11-25 00:31:16 +02:00
|
|
|
pkg_dir = self.get_package_dir(*self.parse_pkg_uri(package))
|
2017-03-08 17:24:58 +02:00
|
|
|
|
|
|
|
if not pkg_dir:
|
2018-06-08 21:37:57 +03:00
|
|
|
raise exception.UnknownPackage(
|
|
|
|
"%s @ %s" % (package, requirements or "*"))
|
2017-03-08 17:24:58 +02:00
|
|
|
|
|
|
|
manifest = self.load_manifest(pkg_dir)
|
|
|
|
name = manifest['name']
|
2016-08-05 17:02:39 +03:00
|
|
|
|
2016-08-01 17:05:48 +03:00
|
|
|
click.echo(
|
2017-01-30 01:04:06 +02:00
|
|
|
"{} {:<40} @ {:<15}".format(
|
|
|
|
"Checking" if only_check else "Updating",
|
2017-03-03 23:29:17 +02:00
|
|
|
click.style(manifest['name'], fg="cyan"), manifest['version']),
|
2016-08-01 17:05:48 +03:00
|
|
|
nl=False)
|
2017-01-30 01:04:06 +02:00
|
|
|
if not util.internet_on():
|
|
|
|
click.echo("[%s]" % (click.style("Off-line", fg="yellow")))
|
2017-12-15 22:16:37 +02:00
|
|
|
return None
|
2017-03-08 17:24:58 +02:00
|
|
|
|
|
|
|
latest = self.outdated(pkg_dir, requirements)
|
2017-01-30 22:36:25 +02:00
|
|
|
if latest:
|
2017-01-30 01:04:06 +02:00
|
|
|
click.echo("[%s]" % (click.style(latest, fg="red")))
|
|
|
|
elif latest is False:
|
|
|
|
click.echo("[%s]" % (click.style("Up-to-date", fg="green")))
|
|
|
|
else:
|
2018-09-21 19:23:08 +03:00
|
|
|
click.echo("[%s]" % (click.style("Detached", fg="yellow")))
|
2017-01-30 01:04:06 +02:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
if only_check or not latest:
|
2017-12-15 22:16:37 +02:00
|
|
|
return True
|
2017-01-30 01:04:06 +02:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
if "__src_url" in manifest:
|
2017-03-08 19:52:11 +02:00
|
|
|
vcs = VCSClientFactory.newClient(pkg_dir, manifest['__src_url'])
|
2016-08-01 17:05:48 +03:00
|
|
|
assert vcs.update()
|
2017-03-08 17:24:58 +02:00
|
|
|
self._update_src_manifest(
|
|
|
|
dict(version=vcs.get_current_revision()), vcs.storage_dir)
|
2014-06-13 20:47:02 +03:00
|
|
|
else:
|
2018-03-16 13:33:59 +02:00
|
|
|
self.uninstall(pkg_dir, after_update=True)
|
|
|
|
self.install(name, latest, after_update=True)
|
2014-06-13 20:47:02 +03:00
|
|
|
|
2014-11-29 22:48:15 +02:00
|
|
|
telemetry.on_event(
|
2016-07-11 13:27:30 +03:00
|
|
|
category=self.__class__.__name__,
|
2016-08-01 17:05:48 +03:00
|
|
|
action="Update",
|
|
|
|
label=manifest['name'])
|
2016-05-26 19:43:36 +03:00
|
|
|
return True
|
2014-11-29 22:48:15 +02:00
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-07-11 13:27:30 +03:00
|
|
|
class PackageManager(BasePkgManager):
|
|
|
|
|
2016-12-05 18:51:25 +02:00
|
|
|
FILE_CACHE_VALID = None # disable package caching
|
|
|
|
|
2016-07-11 13:27:30 +03:00
|
|
|
@property
|
2017-01-15 00:12:41 +02:00
|
|
|
def manifest_names(self):
|
|
|
|
return ["package.json"]
|