2017-06-05 16:02:39 +03:00
|
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
2015-11-18 17:16:17 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-05-18 23:38:59 +03:00
|
|
|
|
2020-08-14 16:48:12 +03:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2022-03-28 18:18:51 +03:00
|
|
|
import functools
|
2014-12-27 23:59:20 +02:00
|
|
|
import json
|
2019-08-17 20:55:16 +03:00
|
|
|
import math
|
2015-02-15 23:48:04 +02:00
|
|
|
import os
|
2016-10-13 00:42:16 +03:00
|
|
|
import platform
|
2015-03-09 12:27:54 +02:00
|
|
|
import re
|
2021-05-19 19:43:41 +03:00
|
|
|
import shutil
|
2018-01-16 00:57:06 +02:00
|
|
|
import time
|
2022-03-28 18:18:51 +03:00
|
|
|
from datetime import datetime
|
2015-10-13 18:24:40 +01:00
|
|
|
from glob import glob
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-09-01 16:05:02 +03:00
|
|
|
import click
|
2021-04-20 20:28:49 +03:00
|
|
|
import zeroconf
|
2016-09-01 16:05:02 +03:00
|
|
|
|
2021-04-20 20:28:49 +03:00
|
|
|
from platformio import __version__, exception, proc
|
2021-03-19 00:21:44 +02:00
|
|
|
from platformio.compat import IS_MACOS, IS_WINDOWS
|
2020-08-26 15:40:03 +03:00
|
|
|
from platformio.fs import cd, load_json # pylint: disable=unused-import
|
2019-08-12 19:44:37 +03:00
|
|
|
from platformio.proc import exec_command # pylint: disable=unused-import
|
2015-05-25 23:26:35 +03:00
|
|
|
|
|
|
|
|
2015-04-23 14:11:30 +01:00
|
|
|
class memoized(object):
|
2018-03-23 00:08:07 +02:00
|
|
|
def __init__(self, expire=0):
|
2019-06-06 00:13:04 +03:00
|
|
|
expire = str(expire)
|
|
|
|
if expire.isdigit():
|
2019-06-07 15:12:24 +03:00
|
|
|
expire = "%ss" % int((int(expire) / 1000))
|
2019-06-06 00:13:04 +03:00
|
|
|
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
|
|
|
assert expire.endswith(tuple(tdmap))
|
|
|
|
self.expire = int(tdmap[expire[-1]] * int(expire[:-1]))
|
2017-03-08 17:24:58 +02:00
|
|
|
self.cache = {}
|
2015-04-23 14:11:30 +01:00
|
|
|
|
2018-03-23 00:08:07 +02:00
|
|
|
def __call__(self, func):
|
2022-03-28 18:18:51 +03:00
|
|
|
@functools.wraps(func)
|
2018-03-23 00:08:07 +02:00
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
key = str(args) + str(kwargs)
|
2019-09-23 23:13:48 +03:00
|
|
|
if key not in self.cache or (
|
|
|
|
self.expire > 0 and self.cache[key][0] < time.time() - self.expire
|
|
|
|
):
|
2018-03-23 00:08:07 +02:00
|
|
|
self.cache[key] = (time.time(), func(*args, **kwargs))
|
|
|
|
return self.cache[key][1]
|
|
|
|
|
2018-10-24 01:19:39 +03:00
|
|
|
wrapper.reset = self._reset
|
2018-03-23 00:08:07 +02:00
|
|
|
return wrapper
|
|
|
|
|
2018-10-24 01:19:39 +03:00
|
|
|
def _reset(self):
|
2019-06-06 00:13:04 +03:00
|
|
|
self.cache.clear()
|
2018-10-24 01:19:39 +03:00
|
|
|
|
2015-04-23 14:11:30 +01:00
|
|
|
|
2017-08-15 22:57:20 +03:00
|
|
|
class throttle(object):
|
2017-08-17 23:55:42 +03:00
|
|
|
def __init__(self, threshhold):
|
|
|
|
self.threshhold = threshhold # milliseconds
|
2017-08-15 22:57:20 +03:00
|
|
|
self.last = 0
|
|
|
|
|
2018-03-23 00:08:07 +02:00
|
|
|
def __call__(self, func):
|
2022-03-28 18:18:51 +03:00
|
|
|
@functools.wraps(func)
|
2017-08-15 22:57:20 +03:00
|
|
|
def wrapper(*args, **kwargs):
|
2018-01-16 00:57:06 +02:00
|
|
|
diff = int(round((time.time() - self.last) * 1000))
|
2017-08-17 23:55:42 +03:00
|
|
|
if diff < self.threshhold:
|
2018-01-16 00:57:06 +02:00
|
|
|
time.sleep((self.threshhold - diff) * 0.001)
|
|
|
|
self.last = time.time()
|
2018-03-23 00:08:07 +02:00
|
|
|
return func(*args, **kwargs)
|
2017-08-15 22:57:20 +03:00
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2015-09-03 19:04:09 +03:00
|
|
|
def singleton(cls):
|
2021-04-28 19:58:50 +03:00
|
|
|
"""From PEP-318 http://www.python.org/dev/peps/pep-0318/#examples"""
|
2015-09-03 19:04:09 +03:00
|
|
|
_instances = {}
|
|
|
|
|
|
|
|
def get_instance(*args, **kwargs):
|
|
|
|
if cls not in _instances:
|
|
|
|
_instances[cls] = cls(*args, **kwargs)
|
|
|
|
return _instances[cls]
|
2016-08-03 23:38:20 +03:00
|
|
|
|
2015-09-03 19:04:09 +03:00
|
|
|
return get_instance
|
|
|
|
|
|
|
|
|
2014-07-31 16:20:31 +03:00
|
|
|
def get_systype():
|
2016-10-13 00:42:16 +03:00
|
|
|
type_ = platform.system().lower()
|
|
|
|
arch = platform.machine().lower()
|
2021-12-08 18:40:07 +02:00
|
|
|
if type_ == "windows" and "x86" in arch:
|
|
|
|
arch = "amd64" if "64" in arch else "x86"
|
2015-09-10 19:47:14 +03:00
|
|
|
return "%s_%s" % (type_, arch) if arch else type_
|
2014-06-12 23:29:47 +03:00
|
|
|
|
|
|
|
|
2017-12-18 21:31:49 +02:00
|
|
|
def get_serial_ports(filter_hwid=False):
|
2015-10-03 12:28:21 +01:00
|
|
|
try:
|
2019-10-18 15:41:52 +03:00
|
|
|
# pylint: disable=import-outside-toplevel
|
2015-10-03 12:28:21 +01:00
|
|
|
from serial.tools.list_ports import comports
|
|
|
|
except ImportError:
|
2015-02-15 23:48:04 +02:00
|
|
|
raise exception.GetSerialPortsError(os.name)
|
2016-01-26 20:30:45 +02:00
|
|
|
|
|
|
|
result = []
|
|
|
|
for p, d, h in comports():
|
|
|
|
if not p:
|
|
|
|
continue
|
2016-10-31 20:06:48 +02:00
|
|
|
if not filter_hwid or "VID:PID" in h:
|
|
|
|
result.append({"port": p, "description": d, "hwid": h})
|
|
|
|
|
|
|
|
if filter_hwid:
|
|
|
|
return result
|
2016-01-26 20:30:45 +02:00
|
|
|
|
2015-11-17 23:20:04 +02:00
|
|
|
# fix for PySerial
|
2021-03-19 00:21:44 +02:00
|
|
|
if not result and IS_MACOS:
|
2016-06-03 18:35:47 +03:00
|
|
|
for p in glob("/dev/tty.*"):
|
2016-06-03 20:14:37 +03:00
|
|
|
result.append({"port": p, "description": "n/a", "hwid": "n/a"})
|
2015-11-17 23:20:04 +02:00
|
|
|
return result
|
2015-03-09 12:27:54 +02:00
|
|
|
|
|
|
|
|
2018-01-10 02:06:05 +02:00
|
|
|
# Backward compatibility for PIO Core <3.5
|
|
|
|
get_serialports = get_serial_ports
|
|
|
|
|
|
|
|
|
2017-12-18 21:31:49 +02:00
|
|
|
def get_logical_devices():
|
|
|
|
items = []
|
2021-03-19 00:21:44 +02:00
|
|
|
if IS_WINDOWS:
|
2017-08-14 15:27:12 +03:00
|
|
|
try:
|
2020-08-22 17:48:49 +03:00
|
|
|
result = proc.exec_command(
|
2019-09-23 23:13:48 +03:00
|
|
|
["wmic", "logicaldisk", "get", "name,VolumeName"]
|
|
|
|
).get("out", "")
|
2017-12-18 21:31:49 +02:00
|
|
|
devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
|
2017-08-14 15:27:12 +03:00
|
|
|
for line in result.split("\n"):
|
2017-12-18 21:31:49 +02:00
|
|
|
match = devicenamere.match(line.strip())
|
2017-08-14 15:27:12 +03:00
|
|
|
if not match:
|
|
|
|
continue
|
2019-09-23 23:13:48 +03:00
|
|
|
items.append({"path": match.group(1) + "\\", "name": match.group(2)})
|
2017-12-18 21:31:49 +02:00
|
|
|
return items
|
2017-08-14 15:27:12 +03:00
|
|
|
except WindowsError: # pylint: disable=undefined-variable
|
|
|
|
pass
|
|
|
|
# try "fsutil"
|
2020-08-22 17:48:49 +03:00
|
|
|
result = proc.exec_command(["fsutil", "fsinfo", "drives"]).get("out", "")
|
2017-12-18 21:31:49 +02:00
|
|
|
for device in re.findall(r"[A-Z]:\\", result):
|
2017-12-27 16:02:36 +02:00
|
|
|
items.append({"path": device, "name": None})
|
2017-12-18 21:31:49 +02:00
|
|
|
return items
|
2018-12-26 20:54:29 +02:00
|
|
|
|
2020-08-22 17:48:49 +03:00
|
|
|
result = proc.exec_command(["df"]).get("out")
|
2018-12-26 20:54:29 +02:00
|
|
|
devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
|
|
|
|
for line in result.split("\n"):
|
|
|
|
match = devicenamere.match(line.strip())
|
|
|
|
if not match:
|
|
|
|
continue
|
2019-09-23 23:13:48 +03:00
|
|
|
items.append({"path": match.group(1), "name": os.path.basename(match.group(1))})
|
2017-12-18 21:31:49 +02:00
|
|
|
return items
|
|
|
|
|
|
|
|
|
|
|
|
def get_mdns_services():
|
|
|
|
class mDNSListener(object):
|
|
|
|
def __init__(self):
|
2019-09-23 23:13:48 +03:00
|
|
|
self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All)
|
2017-12-18 21:31:49 +02:00
|
|
|
self._found_types = []
|
|
|
|
self._found_services = []
|
|
|
|
|
|
|
|
def __enter__(self):
|
2021-01-18 18:17:10 +02:00
|
|
|
zeroconf.ServiceBrowser(
|
|
|
|
self._zc,
|
|
|
|
[
|
|
|
|
"_http._tcp.local.",
|
|
|
|
"_hap._tcp.local.",
|
|
|
|
"_services._dns-sd._udp.local.",
|
|
|
|
],
|
|
|
|
self,
|
|
|
|
)
|
2017-12-18 21:31:49 +02:00
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, etype, value, traceback):
|
|
|
|
self._zc.close()
|
|
|
|
|
|
|
|
def add_service(self, zc, type_, name):
|
|
|
|
try:
|
2017-12-23 22:59:49 +02:00
|
|
|
assert zeroconf.service_type_name(name)
|
|
|
|
assert str(name)
|
2019-09-23 23:13:48 +03:00
|
|
|
except (AssertionError, UnicodeError, zeroconf.BadTypeInNameException):
|
2017-12-18 21:31:49 +02:00
|
|
|
return
|
|
|
|
if name not in self._found_types:
|
|
|
|
self._found_types.append(name)
|
|
|
|
zeroconf.ServiceBrowser(self._zc, name, self)
|
|
|
|
if type_ in self._found_types:
|
|
|
|
s = zc.get_service_info(type_, name)
|
|
|
|
if s:
|
|
|
|
self._found_services.append(s)
|
|
|
|
|
2021-01-18 20:51:50 +02:00
|
|
|
def remove_service(self, zc, type_, name):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def update_service(self, zc, type_, name):
|
|
|
|
pass
|
|
|
|
|
2017-12-18 21:31:49 +02:00
|
|
|
def get_services(self):
|
|
|
|
return self._found_services
|
|
|
|
|
|
|
|
items = []
|
|
|
|
with mDNSListener() as mdns:
|
2018-01-16 00:57:06 +02:00
|
|
|
time.sleep(3)
|
2017-12-18 21:31:49 +02:00
|
|
|
for service in mdns.get_services():
|
2018-03-23 13:54:33 +02:00
|
|
|
properties = None
|
2018-12-26 20:54:29 +02:00
|
|
|
if service.properties:
|
|
|
|
try:
|
|
|
|
properties = {
|
2019-09-23 23:13:48 +03:00
|
|
|
k.decode("utf8"): v.decode("utf8")
|
|
|
|
if isinstance(v, bytes)
|
|
|
|
else v
|
2018-12-26 20:54:29 +02:00
|
|
|
for k, v in service.properties.items()
|
|
|
|
}
|
|
|
|
json.dumps(properties)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
properties = None
|
2018-03-23 13:54:33 +02:00
|
|
|
|
2019-09-23 23:13:48 +03:00
|
|
|
items.append(
|
|
|
|
{
|
|
|
|
"type": service.type,
|
|
|
|
"name": service.name,
|
2021-01-18 18:17:10 +02:00
|
|
|
"ip": ", ".join(service.parsed_addresses()),
|
2019-09-23 23:13:48 +03:00
|
|
|
"port": service.port,
|
|
|
|
"properties": properties,
|
|
|
|
}
|
|
|
|
)
|
2017-12-18 21:31:49 +02:00
|
|
|
return items
|
2014-09-04 18:58:12 +03:00
|
|
|
|
|
|
|
|
2020-08-22 17:48:49 +03:00
|
|
|
def pioversion_to_intstr():
|
2021-04-28 19:58:50 +03:00
|
|
|
"""Legacy for framework-zephyr/scripts/platformio/platformio-build-pre.py"""
|
2020-08-22 17:48:49 +03:00
|
|
|
vermatch = re.match(r"^([\d\.]+)", __version__)
|
|
|
|
assert vermatch
|
|
|
|
return [int(i) for i in vermatch.group(1).split(".")[:3]]
|
2020-08-16 20:21:30 +03:00
|
|
|
|
|
|
|
|
2018-01-13 19:44:05 +02:00
|
|
|
def items_to_list(items):
|
2020-02-05 00:04:16 +02:00
|
|
|
if isinstance(items, list):
|
|
|
|
return items
|
|
|
|
return [i.strip() for i in items.split(",") if i.strip()]
|
2018-01-13 19:44:05 +02:00
|
|
|
|
|
|
|
|
|
|
|
def items_in_list(needle, haystack):
|
|
|
|
needle = items_to_list(needle)
|
|
|
|
haystack = items_to_list(haystack)
|
|
|
|
if "*" in needle or "*" in haystack:
|
|
|
|
return True
|
|
|
|
return set(needle) & set(haystack)
|
|
|
|
|
|
|
|
|
2022-03-28 18:18:51 +03:00
|
|
|
def parse_datetime(datestr):
|
2018-01-16 00:57:06 +02:00
|
|
|
if "T" in datestr and "Z" in datestr:
|
2022-03-28 18:18:51 +03:00
|
|
|
return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
return datetime.strptime(datestr)
|
2018-01-16 00:57:06 +02:00
|
|
|
|
|
|
|
|
2018-05-25 21:18:08 +03:00
|
|
|
def merge_dicts(d1, d2, path=None):
|
|
|
|
if path is None:
|
|
|
|
path = []
|
|
|
|
for key in d2:
|
2019-09-23 23:13:48 +03:00
|
|
|
if key in d1 and isinstance(d1[key], dict) and isinstance(d2[key], dict):
|
2018-05-25 21:18:08 +03:00
|
|
|
merge_dicts(d1[key], d2[key], path + [str(key)])
|
|
|
|
else:
|
|
|
|
d1[key] = d2[key]
|
|
|
|
return d1
|
|
|
|
|
|
|
|
|
2019-08-17 20:55:16 +03:00
|
|
|
def print_labeled_bar(label, is_error=False, fg=None):
|
2021-05-19 19:43:41 +03:00
|
|
|
terminal_width, _ = shutil.get_terminal_size()
|
2019-08-17 20:55:16 +03:00
|
|
|
width = len(click.unstyle(label))
|
|
|
|
half_line = "=" * int((terminal_width - width - 2) / 2)
|
2019-09-23 23:13:48 +03:00
|
|
|
click.secho("%s %s %s" % (half_line, label, half_line), fg=fg, err=is_error)
|
2019-08-17 20:55:16 +03:00
|
|
|
|
|
|
|
|
2019-08-20 16:49:18 +03:00
|
|
|
def humanize_duration_time(duration):
|
|
|
|
if duration is None:
|
|
|
|
return duration
|
|
|
|
duration = duration * 1000
|
2019-08-17 20:55:16 +03:00
|
|
|
tokens = []
|
2019-08-20 16:49:18 +03:00
|
|
|
for multiplier in (3600000, 60000, 1000, 1):
|
|
|
|
fraction = math.floor(duration / multiplier)
|
|
|
|
tokens.append(int(round(duration) if multiplier == 1 else fraction))
|
|
|
|
duration -= fraction * multiplier
|
|
|
|
return "{:02d}:{:02d}:{:02d}.{:03d}".format(*tokens)
|