tools: bugfix - broken export script while working with esp-idf file

+ bugfix IDF_PATH detection in sh and dash shells.
+ created Classes that represents idf-env.json file
This commit is contained in:
Marek Fiala
2022-03-19 21:37:16 +01:00
parent 45c1d1cba2
commit 0bf264a948
2 changed files with 340 additions and 155 deletions

View File

@@ -36,6 +36,14 @@ __script_dir(){
echo "$script_dir" echo "$script_dir"
} }
__is_dir_esp_idf(){
if [ ! -f "$1/tools/idf.py" ] || [ ! -f "$1/tools/idf_tools.py" ]
then
# Echo command here is not used for printing to the terminal, but as non-empty return value from function.
echo "THIS DIRECTORY IS NOT ESP-IDF"
fi
}
__main() { __main() {
# The file doesn't have executable permissions, so this shouldn't really happen. # The file doesn't have executable permissions, so this shouldn't really happen.
# Doing this in case someone tries to chmod +x it and execute... # Doing this in case someone tries to chmod +x it and execute...
@@ -58,22 +66,28 @@ __main() {
elif [ -n "${ZSH_VERSION-}" ] elif [ -n "${ZSH_VERSION-}" ]
then then
self_path="${(%):-%x}" self_path="${(%):-%x}"
else
echo "Could not detect IDF_PATH. Please set it before sourcing this script:"
echo " export IDF_PATH=(add path here)"
return 1
fi fi
script_dir=$(__script_dir) script_dir=$(__script_dir)
# Since sh or dash shells can't detect script_dir correctly, check if script_dir looks like an IDF directory
is_script_dir_esp_idf=$(__is_dir_esp_idf ${script_dir})
if [ -z "${IDF_PATH}" ] if [ -z "${IDF_PATH}" ]
then then
# IDF_PATH not set in the environment. # IDF_PATH not set in the environment.
if [ -n "${is_script_dir_esp_idf}" ]
then
echo "Could not detect IDF_PATH. Please set it before sourcing this script:"
echo " export IDF_PATH=(add path here)"
return 1
fi
export IDF_PATH="${script_dir}" export IDF_PATH="${script_dir}"
echo "Setting IDF_PATH to '${IDF_PATH}'" echo "Setting IDF_PATH to '${IDF_PATH}'"
else else
# IDF_PATH came from the environment, check if the path is valid # IDF_PATH came from the environment, check if the path is valid
if [ ! "${IDF_PATH}" = "${script_dir}" ] # Set IDF_PATH to script_dir, if script_dir looks like an IDF directory
if [ ! "${IDF_PATH}" = "${script_dir}" ] && [ -z "${is_script_dir_esp_idf}" ]
then then
# Change IDF_PATH is important when there are 2 ESP-IDF versions in different directories. # Change IDF_PATH is important when there are 2 ESP-IDF versions in different directories.
# Sourcing this script without change, would cause sourcing wrong export script. # Sourcing this script without change, would cause sourcing wrong export script.
@@ -81,7 +95,8 @@ __main() {
export IDF_PATH="${script_dir}" export IDF_PATH="${script_dir}"
fi fi
# Check if this path looks like an IDF directory # Check if this path looks like an IDF directory
if [ ! -f "${IDF_PATH}/tools/idf.py" ] || [ ! -f "${IDF_PATH}/tools/idf_tools.py" ] is_idf_path_esp_idf=$(__is_dir_esp_idf ${IDF_PATH})
if [ -n "${is_idf_path_esp_idf}" ]
then then
echo "IDF_PATH is set to '${IDF_PATH}', but it doesn't look like an ESP-IDF directory." echo "IDF_PATH is set to '${IDF_PATH}', but it doesn't look like an ESP-IDF directory."
echo "If you have set IDF_PATH manually, check if the path is correct." echo "If you have set IDF_PATH manually, check if the path is correct."
@@ -175,12 +190,15 @@ __cleanup() {
unset SOURCE_BASH unset SOURCE_BASH
unset WARNING_MSG unset WARNING_MSG
unset uninstall unset uninstall
unset is_idf_path_esp_idf
unset is_script_dir_esp_idf
unset __realpath unset __realpath
unset __main unset __main
unset __verbose unset __verbose
unset __enable_autocomplete unset __enable_autocomplete
unset __cleanup unset __cleanup
unset __is_dir_esp_idf
# Not unsetting IDF_PYTHON_ENV_PATH, it can be used by IDF build system # Not unsetting IDF_PYTHON_ENV_PATH, it can be used by IDF build system
# to check whether we are using a private Python environment # to check whether we are using a private Python environment

View File

@@ -46,6 +46,7 @@ import sys
import tarfile import tarfile
import time import time
from collections import OrderedDict, namedtuple from collections import OrderedDict, namedtuple
from json import JSONEncoder
from ssl import SSLContext # noqa: F401 from ssl import SSLContext # noqa: F401
from tarfile import TarFile # noqa: F401 from tarfile import TarFile # noqa: F401
from zipfile import ZipFile from zipfile import ZipFile
@@ -937,6 +938,222 @@ class IDFTool(object):
return tool_json return tool_json
class IDFEnvEncoder(JSONEncoder):
"""
IDFEnvEncoder is used for encoding IDFEnv, IDFRecord, SelectedIDFRecord classes to JSON in readable format. Not as (__main__.IDFRecord object at '0x7fcxx')
Additionally remove first underscore with private properties when processing
"""
def default(self, obj): # type: ignore
return {k.lstrip('_'): v for k, v in vars(obj).items()}
class IDFRecord:
"""
IDFRecord represents one record of installed ESP-IDF on system.
Contains:
* version - actual version of ESP-IDF (example '5.0')
* path - absolute path to the ESP-IDF
* features - features using ESP-IDF
* targets - ESP chips for which are installed needed toolchains (example ['esp32' , 'esp32s2'])
- Default value is [], since user didn't define any targets yet
"""
def __init__(self) -> None:
self.version = '' # type: str
self.path = '' # type: str
self._features = ['core'] # type: list[str]
self._targets = [] # type: list[str]
def __iter__(self): # type: ignore
yield from {
'version': self.version,
'path': self.path,
'features': self._features,
'targets': self._targets
}.items()
def __str__(self) -> str:
return json.dumps(dict(self), ensure_ascii=False, indent=4) # type: ignore
def __repr__(self) -> str:
return self.__str__()
@property
def features(self) -> List[str]:
return self._features
def extend_features(self, features: List[str]) -> None:
# Features can be only updated, but always maintain existing features.
self._features = list(set(features + self._features))
@property
def targets(self) -> List[str]:
return self._targets
def extend_targets(self, targets: List[str]) -> None:
# Targets can be only updated, but always maintain existing targets.
self._targets = list(set(targets + self._targets))
@classmethod
def get_active_idf_record(cls): # type: () -> IDFRecord
idf_record_obj = cls()
idf_record_obj.version = get_idf_version()
idf_record_obj.path = global_idf_path or ''
return idf_record_obj
@classmethod
def get_idf_record_from_dict(cls, record_dict): # type: (Dict[str, Any]) -> IDFRecord
idf_record_obj = cls()
try:
idf_record_obj.version = record_dict['version']
idf_record_obj.path = record_dict['path']
except KeyError:
# When some of these key attributes, which are irreplaceable with default values, are not found, raise VallueError
raise ValueError('Inconsistent record')
idf_record_obj.extend_features(record_dict.get('features', []))
idf_record_obj.extend_targets(record_dict.get('targets', []))
unset = record_dict.get('unset')
# Records with unset are type SelectedIDFRecord
if unset:
return SelectedIDFRecord(idf_record_obj, unset)
return idf_record_obj
class SelectedIDFRecord(IDFRecord):
"""
SelectedIDFRecord extends IDFRecord by unset attribute
* unset - global variables that need to be removed from env when the active esp-idf environment is beiing deactivated
"""
# No constructor from parent IDFRecord class is called because that conctructor create instance with default values,
# meanwhile SelectedIDFRecord constructor is called only to expand existing IDFRecord instance.
def __init__(self, idf_record_obj: IDFRecord, unset: Dict[str, Any]):
self.version = idf_record_obj.version
self.path = idf_record_obj.path
self._targets = idf_record_obj.targets
self._features = idf_record_obj.features
self.unset = unset
def __iter__(self): # type: ignore
yield from {
'version': self.version,
'path': self.path,
'features': self._features,
'targets': self._targets,
'unset': self.unset
}.items()
def __str__(self) -> str:
return json.dumps(dict(self), ensure_ascii=False, indent=4) # type: ignore
def __repr__(self) -> str:
return self.__str__()
# When there is no need to store unset attr with IDF record, cast it back SelectedIDFRecord -> IDFRecord
def cast_to_idf_record(self) -> IDFRecord:
idf_record_obj = IDFRecord()
idf_record_obj.version = self.version
idf_record_obj.path = self.path
idf_record_obj._targets = self._targets
idf_record_obj._features = self._features
return idf_record_obj
class IDFEnv:
"""
IDFEnv represents ESP-IDF Environments installed on system. All information are saved and loaded from IDF_ENV_FILE
Contains:
* idf_selected_id - ID of selected ESP-IDF from idf_installed. ID is combination of ESP-IDF absolute path and version
* idf_installed - all installed environments of ESP-IDF on system
* idf_previous_id - ID of ESP-IDF which was active before switching to idf_selected_id
"""
def __init__(self) -> None:
active_idf_id = active_repo_id()
self.idf_selected_id = active_idf_id # type: str
self.idf_installed = {active_idf_id: IDFRecord.get_active_idf_record()} # type: Dict[str, IDFRecord]
self.idf_previous_id = '' # type: str
def __iter__(self): # type: ignore
yield from {
'idfSelectedId': self.idf_selected_id,
'idfInstalled': self.idf_installed,
'idfPreviousId': self.idf_previous_id
}.items()
def __str__(self) -> str:
return json.dumps(dict(self), cls=IDFEnvEncoder, ensure_ascii=False, indent=4) # type: ignore
def __repr__(self) -> str:
return self.__str__()
def save(self) -> None:
try:
if global_idf_tools_path: # mypy fix for Optional[str] in the next call
# the directory doesn't exist if this is run on a clean system the first time
mkdir_p(global_idf_tools_path)
with open(os.path.join(global_idf_tools_path or '', IDF_ENV_FILE), 'w') as w:
json.dump(dict(self), w, cls=IDFEnvEncoder, ensure_ascii=False, indent=4) # type: ignore
except (IOError, OSError):
fatal('File {} is not accessible to write. '.format(os.path.join(global_idf_tools_path or '', IDF_ENV_FILE)))
raise SystemExit(1)
def get_active_idf_record(self) -> IDFRecord:
return self.idf_installed[active_repo_id()]
def get_selected_idf_record(self) -> IDFRecord:
return self.idf_installed[self.idf_selected_id]
def get_previous_idf_record(self) -> Union[IDFRecord, str]:
if self.idf_previous_id != '':
return self.idf_installed[self.idf_previous_id]
return ''
def idf_installed_update(self, idf_name: str, idf_value: IDFRecord) -> None:
self.idf_installed[idf_name] = idf_value
@classmethod
def get_idf_env(cls): # type: () -> IDFEnv
# IDFEnv class is used to process IDF_ENV_FILE file. The constructor is therefore called only in this method that loads the file and checks its contents
idf_env_obj = cls()
try:
idf_env_file_path = os.path.join(global_idf_tools_path or '', IDF_ENV_FILE)
with open(idf_env_file_path, 'r') as idf_env_file:
idf_env_json = json.load(idf_env_file)
try:
idf_installed = idf_env_json['idfInstalled']
except KeyError:
# If no ESP-IDF record is found in loaded file, do not update and keep default value from constructor
pass
else:
# Load and verify ESP-IDF records found in IDF_ENV_FILE
idf_installed.pop('sha', None)
idf_installed_verified = {} # type: dict[str, IDFRecord]
for idf in idf_installed:
try:
idf_installed_verified[idf] = IDFRecord.get_idf_record_from_dict(idf_installed[idf])
except ValueError as err:
warn('{} "{}" found in {}, removing this record.' .format(err, idf, idf_env_file_path))
# Combine ESP-IDF loaded records with the one in constructor, to be sure that there is an active ESP-IDF record in the idf_installed
# If the active record is already in idf_installed, it is not overwritten
idf_env_obj.idf_installed = dict(idf_env_obj.idf_installed, **idf_installed_verified)
for file_var_name, class_var_name in [('idfSelectedId', 'idf_selected_id'), ('idfPreviousId', 'idf_previous_id')]:
idf_env_value = idf_env_json.get(file_var_name)
# Update the variable only if it meets the given conditions, otherwise keep default value from constructor
if idf_env_value in idf_env_obj.idf_installed and idf_env_value != 'sha':
idf_env_obj.__setattr__(class_var_name, idf_env_value)
except (IOError, OSError, ValueError):
# If no, empty or not-accessible to read IDF_ENV_FILE found, use default values from constructor
pass
return idf_env_obj
def load_tools_info(): # type: () -> dict[str, IDFTool] def load_tools_info(): # type: () -> dict[str, IDFTool]
""" """
Load tools metadata from tools.json, return a dictionary: tool name - tool info Load tools metadata from tools.json, return a dictionary: tool name - tool info
@@ -1045,64 +1262,9 @@ def get_python_env_path() -> Tuple[str, str, str, str]:
return idf_python_env_path, idf_python_export_path, virtualenv_python, idf_version return idf_python_env_path, idf_python_export_path, virtualenv_python, idf_version
def get_idf_env() -> Any: def add_and_check_targets(idf_env_obj, targets_str): # type: (IDFEnv, str) -> list[str]
active_repo_init = {
'version': get_idf_version(),
'path': global_idf_path,
'features': [],
'targets': []
} # type: dict[str, Any]
active_idf = active_repo_id()
try:
idf_env_file_path = os.path.join(global_idf_tools_path or '', IDF_ENV_FILE)
with open(idf_env_file_path, 'r') as idf_env_file:
idf_env_json = json.load(idf_env_file)
if active_idf not in idf_env_json['idfInstalled']:
idf_env_json['idfInstalled'][active_idf] = active_repo_init
return idf_env_json
except (IOError, OSError):
return {
'idfSelectedId': active_idf,
'idfPreviousId': '',
'idfInstalled':
{
active_idf: active_repo_init
}
}
def save_idf_env(idf_env_json): # type: (dict[str, Any]) -> None
try:
if global_idf_tools_path: # mypy fix for Optional[str] in the next call
# the directory doesn't exist if this is run on a clean system the first time
mkdir_p(global_idf_tools_path)
with open(os.path.join(global_idf_tools_path or '', IDF_ENV_FILE), 'w') as w:
json.dump(idf_env_json, w, indent=4)
except (IOError, OSError):
fatal('File {} is not accessible to write. '.format(os.path.join(global_idf_tools_path or '', IDF_ENV_FILE)))
raise SystemExit(1)
def update_targets_and_features(idf_env_json, targets_to_update, features_to_update):
# type: (dict[str, Any], Optional[list[str]], Optional[list[str]]) -> tuple[dict[str, Any], list[str], list[str]]
targets, features = get_requested_targets_and_features(idf_env_json)
targets = list(set(targets + targets_to_update)) if targets_to_update else []
features = list(set(features + features_to_update)) if features_to_update else []
update_with = []
if targets:
update_with += [('targets', targets)]
if features:
update_with += [('features', features)]
idf_env_json['idfInstalled'][active_repo_id()].update(update_with)
return idf_env_json, targets, features
def add_and_save_targets(idf_env_json, targets_str): # type: (dict[str, Any], str) -> list[str]
""" """
Define targets from targets_str, check that the target names are valid and save them to idf_env_json. Define targets from targets_str, check that the target names are valid and add them to idf_env_obj
""" """
targets_from_tools_json = get_all_targets_from_tools_json() targets_from_tools_json = get_all_targets_from_tools_json()
invalid_targets = [] invalid_targets = []
@@ -1114,37 +1276,25 @@ def add_and_save_targets(idf_env_json, targets_str): # type: (dict[str, Any], s
if invalid_targets: if invalid_targets:
warn('Targets: "{}" are not supported. Only allowed options are: {}.'.format(', '.join(invalid_targets), ', '.join(targets_from_tools_json))) warn('Targets: "{}" are not supported. Only allowed options are: {}.'.format(', '.join(invalid_targets), ', '.join(targets_from_tools_json)))
raise SystemExit(1) raise SystemExit(1)
# removing duplicates idf_env_obj.get_active_idf_record().extend_targets(targets)
targets = list(set(targets))
idf_env_json, targets, _ = update_targets_and_features(idf_env_json, targets, None)
else: else:
idf_env_json, targets, _ = update_targets_and_features(idf_env_json, targets_from_tools_json, None) idf_env_obj.get_active_idf_record().extend_targets(targets_from_tools_json)
save_idf_env(idf_env_json) return idf_env_obj.get_active_idf_record().targets
return targets
def feature_to_requirements_path(feature): # type: (str) -> str def feature_to_requirements_path(feature): # type: (str) -> str
return os.path.join(global_idf_path or '', 'tools', 'requirements', 'requirements.{}.txt'.format(feature)) return os.path.join(global_idf_path or '', 'tools', 'requirements', 'requirements.{}.txt'.format(feature))
def add_and_save_features(idf_env_json, features_str): # type: (dict[str, Any], str) -> list[str] def add_and_check_features(idf_env_obj, features_str): # type: (IDFEnv, str) -> list[str]
_, features = get_requested_targets_and_features(idf_env_json) new_features = []
for new_feature_candidate in features_str.split(','): for new_feature_candidate in features_str.split(','):
if os.path.isfile(feature_to_requirements_path(new_feature_candidate)): if os.path.isfile(feature_to_requirements_path(new_feature_candidate)):
features += [new_feature_candidate] new_features += [new_feature_candidate]
features = list(set(features + ['core'])) # remove duplicates idf_env_obj.get_active_idf_record().extend_features(new_features)
idf_env_json, _, features = update_targets_and_features(idf_env_json, None, features) return idf_env_obj.get_active_idf_record().features
save_idf_env(idf_env_json)
return features
def get_requested_targets_and_features(idf_env_json): # type: (dict[str, Any]) -> tuple[list[str], list[str]]
active_idf = active_repo_id()
targets = idf_env_json['idfInstalled'][active_idf].get('targets', [])
features = idf_env_json['idfInstalled'][active_idf].get('features', [])
return targets, features
def get_all_targets_from_tools_json(): # type: () -> list[str] def get_all_targets_from_tools_json(): # type: () -> list[str]
@@ -1160,8 +1310,8 @@ def get_all_targets_from_tools_json(): # type: () -> list[str]
return sorted(targets_from_tools_json) return sorted(targets_from_tools_json)
def filter_tools_info(tools_info): # type: (OrderedDict[str, IDFTool]) -> OrderedDict[str,IDFTool] def filter_tools_info(idf_env_obj, tools_info): # type: (IDFEnv, OrderedDict[str, IDFTool]) -> OrderedDict[str,IDFTool]
targets, _ = get_requested_targets_and_features(get_idf_env()) targets = idf_env_obj.get_active_idf_record().targets
if not targets: if not targets:
return tools_info return tools_info
else: else:
@@ -1171,51 +1321,53 @@ def filter_tools_info(tools_info): # type: (OrderedDict[str, IDFTool]) -> Order
return OrderedDict(filtered_tools_spec) return OrderedDict(filtered_tools_spec)
def add_and_save_unset(idf_env_json, export_dict): # type: (dict[str, Any], dict[str, Any]) -> dict[str, Any] def add_unset(idf_env_obj, new_unset_vars, args): # type: (IDFEnv, dict[str, Any], list[str]) -> None
""" """
Save global variables that need to be removed when the active esp-idf environment is deactivated. Add global variables that need to be removed when the active esp-idf environment is deactivated.
""" """
if export_dict.get('PATH'): if 'PATH' in new_unset_vars:
export_dict['PATH'] = export_dict['PATH'].split(':')[:-1] # PATH is stored as list of sub-paths without '$PATH' new_unset_vars['PATH'] = new_unset_vars['PATH'].split(':')[:-1] # PATH is stored as list of sub-paths without '$PATH'
active_idf = active_repo_id()
if active_idf != idf_env_json['idfSelectedId']:
idf_env_json['idfPreviousId'] = idf_env_json['idfSelectedId']
idf_env_json['idfSelectedId'] = active_idf
idf_env_json['idfInstalled'][active_idf]['unset'] = export_dict
previous_idf = idf_env_json['idfPreviousId'] new_unset_vars['PATH'] = new_unset_vars.get('PATH', [])
if previous_idf: args_add_paths_extras = vars(args).get('add_paths_extras') # remove mypy error with args
idf_env_json['idfInstalled'][previous_idf].pop('unset', None) new_unset_vars['PATH'] = new_unset_vars['PATH'] + args_add_paths_extras.split(':') if args_add_paths_extras else new_unset_vars['PATH']
save_idf_env(idf_env_json)
return idf_env_json selected_idf = idf_env_obj.get_selected_idf_record()
# Detection if new variables are being added to the active ESP-IDF environment, or new terminal without active ESP-IDF environment is exporting.
if 'IDF_PYTHON_ENV_PATH' in os.environ:
# Adding new variables to SelectedIDFRecord (ESP-IDF env already activated)
if not isinstance(selected_idf, SelectedIDFRecord):
# Versions without feature Switching between ESP-IDF versions (version <= 4.4) don't have SelectedIDFRecord -> set new one
idf_env_obj.idf_installed_update(idf_env_obj.idf_selected_id, SelectedIDFRecord(selected_idf, new_unset_vars))
else:
# SelectedIDFRecord detected -> update
exported_unset_vars = selected_idf.unset
new_unset_vars['PATH'] = list(set(new_unset_vars['PATH'] + exported_unset_vars.get('PATH', []))) # remove duplicates
selected_idf.unset = dict(exported_unset_vars, **new_unset_vars) # merge two dicts
idf_env_obj.idf_installed_update(idf_env_obj.idf_selected_id, selected_idf)
else:
# Resetting new SelectedIDFRecord (new ESP-IDF env is being activated)
idf_env_obj.idf_installed_update(idf_env_obj.idf_selected_id, SelectedIDFRecord(selected_idf, new_unset_vars))
previous_idf = idf_env_obj.get_previous_idf_record()
# If new ESP-IDF environment was activated, the previous one can't be SelectedIDFRecord anymore
if isinstance(previous_idf, SelectedIDFRecord):
idf_env_obj.idf_installed_update(idf_env_obj.idf_previous_id, previous_idf.cast_to_idf_record())
return
def deactivate_statement(args): # type: (list[str]) -> None def deactivate_statement(idf_env_obj, args): # type: (IDFEnv, list[str]) -> None
""" """
Deactivate statement is sequence of commands, that remove some global variables from enviroment, Deactivate statement is sequence of commands, that remove some global variables from enviroment,
so the environment gets to the state it was before calling export.{sh/fish} script. so the environment gets to the state it was before calling export.{sh/fish} script.
""" """
idf_env_json = get_idf_env() selected_idf = idf_env_obj.get_selected_idf_record()
# Handling idf-env version without feature Switching between ESP-IDF versions (version <= 4.4) if not isinstance(selected_idf, SelectedIDFRecord):
if 'sha' in idf_env_json['idfInstalled']:
try:
idf_env_json['idfInstalled'].pop('sha')
if idf_env_json['idfPreviousId'] == 'sha':
idf_env_json['idfPreviousId'] = ''
if idf_env_json['idfSelectedId'] == 'sha':
idf_env_json['idfSelectedId'] = active_repo_id()
return
finally:
save_idf_env(idf_env_json)
unset = {}
selected_idf = idf_env_json['idfSelectedId']
if 'unset' not in idf_env_json['idfInstalled'].get(selected_idf, None):
warn('No IDF variables to unset found. Deactivation of previous esp-idf version was unsuccessful.') warn('No IDF variables to unset found. Deactivation of previous esp-idf version was unsuccessful.')
return return
unset = selected_idf.unset
unset = idf_env_json['idfInstalled'][selected_idf]['unset']
env_path = os.getenv('PATH') # type: Optional[str] env_path = os.getenv('PATH') # type: Optional[str]
if env_path: if env_path:
cleared_env_path = ':'.join([k for k in env_path.split(':') if k not in unset['PATH']]) cleared_env_path = ':'.join([k for k in env_path.split(':') if k not in unset['PATH']])
@@ -1241,9 +1393,15 @@ def get_unset_format_and_separator(args): # type: (list[str]) -> Tuple[str, str
return {EXPORT_SHELL: ('unset {}', ';'), EXPORT_KEY_VALUE: ('{}', '\n')}[args.format] # type: ignore return {EXPORT_SHELL: ('unset {}', ';'), EXPORT_KEY_VALUE: ('{}', '\n')}[args.format] # type: ignore
def different_idf_ver_detected() -> bool: def different_idf_detected() -> bool:
# If IDF global variable found, test if belong to different ESP-IDF version
if 'IDF_TOOLS_EXPORT_CMD' in os.environ:
if global_idf_path != os.path.dirname(os.environ['IDF_TOOLS_EXPORT_CMD']):
return True
# No previous ESP-IDF export detected, nothing to be unset # No previous ESP-IDF export detected, nothing to be unset
if not os.getenv('IDF_PYTHON_ENV_PATH') and not os.getenv('OPENOCD_SCRIPTS') and not os.getenv('ESP_IDF_VERSION'): if all(s not in os.environ for s in ['IDF_PYTHON_ENV_PATH', 'OPENOCD_SCRIPTS', 'ESP_IDF_VERSION']):
return False return False
# User is exporting the same version as is in env # User is exporting the same version as is in env
@@ -1283,7 +1441,7 @@ def action_list(args): # type: ignore
def action_check(args): # type: ignore def action_check(args): # type: ignore
tools_info = load_tools_info() tools_info = load_tools_info()
tools_info = filter_tools_info(tools_info) tools_info = filter_tools_info(IDFEnv.get_idf_env(), tools_info)
not_found_list = [] not_found_list = []
info('Checking for installed tools...') info('Checking for installed tools...')
for name, tool in tools_info.items(): for name, tool in tools_info.items():
@@ -1309,13 +1467,15 @@ def action_check(args): # type: ignore
def action_export(args): # type: ignore def action_export(args): # type: ignore
idf_env_obj = IDFEnv.get_idf_env()
if args.unset: if args.unset:
if different_idf_ver_detected(): if different_idf_detected():
deactivate_statement(args) deactivate_statement(idf_env_obj, args)
idf_env_obj.save()
return return
tools_info = load_tools_info() tools_info = load_tools_info()
tools_info = filter_tools_info(tools_info) tools_info = filter_tools_info(idf_env_obj, tools_info)
all_tools_found = True all_tools_found = True
export_vars = {} export_vars = {}
paths_to_export = [] paths_to_export = []
@@ -1397,8 +1557,9 @@ def action_export(args): # type: ignore
if idf_python_export_path not in current_path: if idf_python_export_path not in current_path:
paths_to_export.append(idf_python_export_path) paths_to_export.append(idf_python_export_path)
if not os.getenv('ESP_IDF_VERSION'): idf_version = get_idf_version()
export_vars['ESP_IDF_VERSION'] = get_idf_version() if os.getenv('ESP_IDF_VERSION') != idf_version:
export_vars['ESP_IDF_VERSION'] = idf_version
idf_tools_dir = os.path.join(global_idf_path, 'tools') idf_tools_dir = os.path.join(global_idf_path, 'tools')
idf_tools_dir = to_shell_specific_paths([idf_tools_dir])[0] idf_tools_dir = to_shell_specific_paths([idf_tools_dir])[0]
@@ -1419,16 +1580,16 @@ def action_export(args): # type: ignore
export_statements = export_sep.join([export_format.format(k, v) for k, v in export_vars.items()]) export_statements = export_sep.join([export_format.format(k, v) for k, v in export_vars.items()])
active_idf_id = active_repo_id()
if idf_env_obj.idf_selected_id != active_idf_id:
idf_env_obj.idf_previous_id = idf_env_obj.idf_selected_id
idf_env_obj.idf_selected_id = active_idf_id
if export_statements: if export_statements:
print(export_statements) print(export_statements)
idf_env_json = add_and_save_unset(get_idf_env(), export_vars) add_unset(idf_env_obj, export_vars, args)
if args.add_paths_extras:
unset_dict = idf_env_json['idfInstalled'][idf_env_json['idfSelectedId']]['unset'] idf_env_obj.save()
if 'PATH' not in unset_dict:
unset_dict['PATH'] = args.add_paths_extras.split(':')
else:
unset_dict['PATH'] += args.add_paths_extras.split(':')
save_idf_env(idf_env_json)
if not all_tools_found: if not all_tools_found:
raise SystemExit(1) raise SystemExit(1)
@@ -1534,9 +1695,11 @@ def get_tools_spec_and_platform_info(selected_platform, targets, tools_spec,
def action_download(args): # type: ignore def action_download(args): # type: ignore
tools_spec = args.tools tools_spec = args.tools
targets = [] # type: list[str] targets = [] # type: list[str]
# Installing only single tools, no targets are specified. # Downloading tools required for defined ESP_targets
if 'required' in tools_spec: if 'required' in tools_spec:
targets = add_and_save_targets(get_idf_env(), args.targets) idf_env_obj = IDFEnv.get_idf_env()
targets = add_and_check_targets(idf_env_obj, args.targets)
idf_env_obj.save()
tools_spec, tools_info_for_platform = get_tools_spec_and_platform_info(args.platform, targets, args.tools) tools_spec, tools_info_for_platform = get_tools_spec_and_platform_info(args.platform, targets, args.tools)
@@ -1571,26 +1734,28 @@ def action_install(args): # type: ignore
tools_spec = args.tools # type: ignore tools_spec = args.tools # type: ignore
targets = [] # type: list[str] targets = [] # type: list[str]
info('Current system platform: {}'.format(CURRENT_PLATFORM)) info('Current system platform: {}'.format(CURRENT_PLATFORM))
# Installing only single tools, no targets are specified. # No single tool '<tool_name>@<version>' was defined, install whole toolchains
if 'required' in tools_spec: if 'required' in tools_spec or 'all' in tools_spec:
targets = add_and_save_targets(get_idf_env(), args.targets) idf_env_obj = IDFEnv.get_idf_env()
targets = add_and_check_targets(idf_env_obj, args.targets)
idf_env_obj.save()
info('Selected targets are: {}'.format(', '.join(targets))) info('Selected targets are: {}'.format(', '.join(targets)))
if not tools_spec or 'required' in tools_spec: # Installing tools for defined ESP_targets
# Installing tools for all ESP_targets required by the operating system. if 'required' in tools_spec:
tools_spec = [k for k, v in tools_info.items() if v.get_install_type() == IDFTool.INSTALL_ALWAYS] tools_spec = [k for k, v in tools_info.items() if v.get_install_type() == IDFTool.INSTALL_ALWAYS]
# Filtering tools user defined list of ESP_targets # If only some ESP_targets are defined, filter tools for those
if 'all' not in targets: if len(get_all_targets_from_tools_json()) != len(targets):
def is_tool_selected(tool): # type: (IDFTool) -> bool def is_tool_selected(tool): # type: (IDFTool) -> bool
supported_targets = tool.get_supported_targets() supported_targets = tool.get_supported_targets()
return (any(item in targets for item in supported_targets) or supported_targets == ['all']) return (any(item in targets for item in supported_targets) or supported_targets == ['all'])
tools_spec = [k for k in tools_spec if is_tool_selected(tools_info[k])] tools_spec = [k for k in tools_spec if is_tool_selected(tools_info[k])]
info('Installing tools: {}'.format(', '.join(tools_spec))) info('Installing tools: {}'.format(', '.join(tools_spec)))
# Installing tools for all ESP_targets (MacOS, Windows, Linux) # Installing all available tools for all operating systems (MacOS, Windows, Linux)
elif 'all' in tools_spec: else:
tools_spec = [k for k, v in tools_info.items() if v.get_install_type() != IDFTool.INSTALL_NEVER] tools_spec = [k for k, v in tools_info.items() if v.get_install_type() != IDFTool.INSTALL_NEVER]
info('Installing tools: {}'.format(', '.join(tools_spec))) info('Installing tools: {}'.format(', '.join(tools_spec)))
for tool_spec in tools_spec: for tool_spec in tools_spec:
if '@' not in tool_spec: if '@' not in tool_spec:
@@ -1640,7 +1805,9 @@ def get_wheels_dir(): # type: () -> Optional[str]
def get_requirements(new_features): # type: (str) -> list[str] def get_requirements(new_features): # type: (str) -> list[str]
features = add_and_save_features(get_idf_env(), new_features) idf_env_obj = IDFEnv.get_idf_env()
features = add_and_check_features(idf_env_obj, new_features)
idf_env_obj.save()
return [feature_to_requirements_path(feature) for feature in features] return [feature_to_requirements_path(feature) for feature in features]
@@ -1890,7 +2057,7 @@ def action_uninstall(args): # type: (Any) -> None
return (supported_targets == ['all'] or any(item in targets for item in supported_targets)) return (supported_targets == ['all'] or any(item in targets for item in supported_targets))
tools_info = load_tools_info() tools_info = load_tools_info()
targets, _ = get_requested_targets_and_features(get_idf_env()) targets = IDFEnv.get_idf_env().get_active_idf_record().targets
tools_path = os.path.join(global_idf_tools_path or '', 'tools') tools_path = os.path.join(global_idf_tools_path or '', 'tools')
dist_path = os.path.join(global_idf_tools_path or '', 'dist') dist_path = os.path.join(global_idf_tools_path or '', 'dist')
used_tools = [k for k, v in tools_info.items() if (v.get_install_type() == IDFTool.INSTALL_ALWAYS and is_tool_selected(tools_info[k]))] used_tools = [k for k, v in tools_info.items() if (v.get_install_type() == IDFTool.INSTALL_ALWAYS and is_tool_selected(tools_info[k]))]