forked from platformio/platformio-core
Specify encoding for "open()" functions
This commit is contained in:
@ -114,7 +114,7 @@ class State(object):
|
|||||||
def __exit__(self, type_, value, traceback):
|
def __exit__(self, type_, value, traceback):
|
||||||
if self.modified:
|
if self.modified:
|
||||||
try:
|
try:
|
||||||
with open(self.path, "w") as fp:
|
with open(self.path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(json.dumps(self._storage))
|
fp.write(json.dumps(self._storage))
|
||||||
except IOError:
|
except IOError:
|
||||||
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
||||||
|
@ -226,7 +226,11 @@ if set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS):
|
|||||||
projenv = env
|
projenv = env
|
||||||
data = projenv.DumpIDEData(env)
|
data = projenv.DumpIDEData(env)
|
||||||
# dump to file for the further reading by project.helpers.load_project_ide_data
|
# dump to file for the further reading by project.helpers.load_project_ide_data
|
||||||
with open(projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")), "w") as fp:
|
with open(
|
||||||
|
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
|
||||||
|
mode="w",
|
||||||
|
encoding="utf8",
|
||||||
|
) as fp:
|
||||||
json.dump(data, fp)
|
json.dump(data, fp)
|
||||||
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
|
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
|
||||||
env.Exit(0)
|
env.Exit(0)
|
||||||
|
@ -152,7 +152,7 @@ def WriteCompilationDb(target, source, env):
|
|||||||
item["file"] = os.path.abspath(item["file"])
|
item["file"] = os.path.abspath(item["file"])
|
||||||
entries.append(item)
|
entries.append(item)
|
||||||
|
|
||||||
with open(str(target[0]), "w") as target_file:
|
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
|
||||||
json.dump(
|
json.dump(
|
||||||
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
||||||
)
|
)
|
||||||
|
@ -86,7 +86,9 @@ class LibBuilderFactory(object):
|
|||||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
with io.open(os.path.join(root, fname), errors="ignore") as fp:
|
with io.open(
|
||||||
|
os.path.join(root, fname), encoding="utf8", errors="ignore"
|
||||||
|
) as fp:
|
||||||
content = fp.read()
|
content = fp.read()
|
||||||
if not content:
|
if not content:
|
||||||
continue
|
continue
|
||||||
@ -671,7 +673,7 @@ class MbedLibBuilder(LibBuilderBase):
|
|||||||
|
|
||||||
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
||||||
lines = []
|
lines = []
|
||||||
with open(mbed_config_path) as fp:
|
with open(mbed_config_path, encoding="utf8") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line == "#endif":
|
if line == "#endif":
|
||||||
@ -690,7 +692,7 @@ class MbedLibBuilder(LibBuilderBase):
|
|||||||
if len(tokens) < 2 or tokens[1] not in macros:
|
if len(tokens) < 2 or tokens[1] not in macros:
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
lines.append("")
|
lines.append("")
|
||||||
with open(mbed_config_path, "w") as fp:
|
with open(mbed_config_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write("\n".join(lines))
|
fp.write("\n".join(lines))
|
||||||
|
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ def _file_long_data(env, data):
|
|||||||
)
|
)
|
||||||
if os.path.isfile(tmp_file):
|
if os.path.isfile(tmp_file):
|
||||||
return tmp_file
|
return tmp_file
|
||||||
with open(tmp_file, "w") as fp:
|
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
return tmp_file
|
return tmp_file
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ def _run_tool(cmd, env, tool_args):
|
|||||||
makedirs(build_dir)
|
makedirs(build_dir)
|
||||||
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
||||||
|
|
||||||
with open(tmp_file, "w") as fp:
|
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write("\n".join(tool_args))
|
fp.write("\n".join(tool_args))
|
||||||
|
|
||||||
cmd.append("@" + tmp_file)
|
cmd.append("@" + tmp_file)
|
||||||
@ -241,7 +241,9 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
|||||||
file_data.update(v)
|
file_data.update(v)
|
||||||
data["memory"]["files"].append(file_data)
|
data["memory"]["files"].append(file_data)
|
||||||
|
|
||||||
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
|
with open(
|
||||||
|
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
fp.write(json.dumps(data))
|
fp.write(json.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
|
@ -78,9 +78,9 @@ class ContentCache(object):
|
|||||||
if not os.path.isdir(os.path.dirname(cache_path)):
|
if not os.path.isdir(os.path.dirname(cache_path)):
|
||||||
os.makedirs(os.path.dirname(cache_path))
|
os.makedirs(os.path.dirname(cache_path))
|
||||||
try:
|
try:
|
||||||
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
with open(self._db_path, "a") as fp:
|
with open(self._db_path, mode="a", encoding="utf8") as fp:
|
||||||
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
if os.path.isfile(cache_path):
|
if os.path.isfile(cache_path):
|
||||||
@ -102,7 +102,7 @@ class ContentCache(object):
|
|||||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||||
found = False
|
found = False
|
||||||
newlines = []
|
newlines = []
|
||||||
with open(self._db_path) as fp:
|
with open(self._db_path, encoding="utf8") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if "=" not in line:
|
if "=" not in line:
|
||||||
@ -129,7 +129,7 @@ class ContentCache(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
if found and self._lock_dbindex():
|
if found and self._lock_dbindex():
|
||||||
with open(self._db_path, "w") as fp:
|
with open(self._db_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write("\n".join(newlines) + "\n")
|
fp.write("\n".join(newlines) + "\n")
|
||||||
self._unlock_dbindex()
|
self._unlock_dbindex()
|
||||||
|
|
||||||
|
@ -40,13 +40,13 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
|||||||
)
|
)
|
||||||
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
|
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
with open(self._tmp_cfg_file, "w") as fp:
|
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"exclude-path = "
|
"exclude-path = "
|
||||||
+ self.config.get_optional_dir("packages").replace("\\", "/")
|
+ self.config.get_optional_dir("packages").replace("\\", "/")
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(self._tmp_cmd_file, "w") as fp:
|
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
" ".join(
|
" ".join(
|
||||||
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
||||||
|
@ -261,7 +261,7 @@ class ProjectRPC:
|
|||||||
return project_dir
|
return project_dir
|
||||||
if not os.path.isdir(src_dir):
|
if not os.path.isdir(src_dir):
|
||||||
os.makedirs(src_dir)
|
os.makedirs(src_dir)
|
||||||
with open(main_path, "w") as fp:
|
with open(main_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(main_content.strip())
|
fp.write(main_content.strip())
|
||||||
return project_dir
|
return project_dir
|
||||||
|
|
||||||
|
@ -240,7 +240,7 @@ def init_base_project(project_dir):
|
|||||||
|
|
||||||
|
|
||||||
def init_include_readme(include_dir):
|
def init_include_readme(include_dir):
|
||||||
with open(os.path.join(include_dir, "README"), "w") as fp:
|
with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"""
|
"""
|
||||||
This directory is intended for project header files.
|
This directory is intended for project header files.
|
||||||
@ -286,7 +286,7 @@ https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
|||||||
|
|
||||||
|
|
||||||
def init_lib_readme(lib_dir):
|
def init_lib_readme(lib_dir):
|
||||||
with open(os.path.join(lib_dir, "README"), "w") as fp:
|
with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"""
|
"""
|
||||||
This directory is intended for project specific (private) libraries.
|
This directory is intended for project specific (private) libraries.
|
||||||
@ -339,7 +339,7 @@ More information about PlatformIO Library Dependency Finder
|
|||||||
|
|
||||||
|
|
||||||
def init_test_readme(test_dir):
|
def init_test_readme(test_dir):
|
||||||
with open(os.path.join(test_dir, "README"), "w") as fp:
|
with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"""
|
"""
|
||||||
This directory is intended for PlatformIO Unit Testing and project tests.
|
This directory is intended for PlatformIO Unit Testing and project tests.
|
||||||
@ -360,7 +360,7 @@ def init_cvs_ignore(project_dir):
|
|||||||
conf_path = os.path.join(project_dir, ".gitignore")
|
conf_path = os.path.join(project_dir, ".gitignore")
|
||||||
if os.path.isfile(conf_path):
|
if os.path.isfile(conf_path):
|
||||||
return
|
return
|
||||||
with open(conf_path, "w") as fp:
|
with open(conf_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(".pio\n")
|
fp.write(".pio\n")
|
||||||
|
|
||||||
|
|
||||||
|
@ -173,7 +173,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
|||||||
address = port.getHost()
|
address = port.getHost()
|
||||||
self.log.debug("Serial Bridge is started on {address!r}", address=address)
|
self.log.debug("Serial Bridge is started on {address!r}", address=address)
|
||||||
if "sock" in self.cmd_options:
|
if "sock" in self.cmd_options:
|
||||||
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(self.cmd_options["sock"], "sock"),
|
||||||
|
mode="w",
|
||||||
|
encoding="utf8",
|
||||||
|
) as fp:
|
||||||
fp.write("socket://localhost:%d" % address.port)
|
fp.write("socket://localhost:%d" % address.port)
|
||||||
|
|
||||||
def client_terminal_stopped(self):
|
def client_terminal_stopped(self):
|
||||||
|
@ -350,7 +350,7 @@ def device_monitor(ctx, agents, **kwargs):
|
|||||||
sleep(0.1)
|
sleep(0.1)
|
||||||
if not t.is_alive():
|
if not t.is_alive():
|
||||||
return
|
return
|
||||||
with open(sock_file) as fp:
|
with open(sock_file, encoding="utf8") as fp:
|
||||||
kwargs["port"] = fp.read()
|
kwargs["port"] = fp.read()
|
||||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||||
t.join(2)
|
t.join(2)
|
||||||
|
@ -54,11 +54,11 @@ def clean_build_dir(build_dir, config):
|
|||||||
if isdir(build_dir):
|
if isdir(build_dir):
|
||||||
# check project structure
|
# check project structure
|
||||||
if isfile(checksum_file):
|
if isfile(checksum_file):
|
||||||
with open(checksum_file) as fp:
|
with open(checksum_file, encoding="utf8") as fp:
|
||||||
if fp.read() == checksum:
|
if fp.read() == checksum:
|
||||||
return
|
return
|
||||||
fs.rmtree(build_dir)
|
fs.rmtree(build_dir)
|
||||||
|
|
||||||
makedirs(build_dir)
|
makedirs(build_dir)
|
||||||
with open(checksum_file, "w") as fp:
|
with open(checksum_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(checksum)
|
fp.write(checksum)
|
||||||
|
@ -42,7 +42,7 @@ def is_completion_code_installed(shell, path):
|
|||||||
|
|
||||||
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
||||||
|
|
||||||
with open(path) as fp:
|
with open(path, encoding="utf8") as fp:
|
||||||
return click_completion.get_code(shell=shell) in fp.read()
|
return click_completion.get_code(shell=shell) in fp.read()
|
||||||
|
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ def uninstall_completion_code(shell, path):
|
|||||||
|
|
||||||
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
||||||
|
|
||||||
with open(path, "r+") as fp:
|
with open(path, "r+", encoding="utf8") as fp:
|
||||||
contents = fp.read()
|
contents = fp.read()
|
||||||
fp.seek(0)
|
fp.seek(0)
|
||||||
fp.truncate()
|
fp.truncate()
|
||||||
|
@ -224,7 +224,7 @@ class TestProcessorBase(object):
|
|||||||
test_dir,
|
test_dir,
|
||||||
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
|
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
|
||||||
)
|
)
|
||||||
with open(tmp_file, "w") as fp:
|
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
|
|
||||||
atexit.register(delete_tmptest_files, test_dir)
|
atexit.register(delete_tmptest_files, test_dir)
|
||||||
|
@ -103,7 +103,7 @@ def get_pip_package(to_develop):
|
|||||||
os.makedirs(cache_dir)
|
os.makedirs(cache_dir)
|
||||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||||
try:
|
try:
|
||||||
with open(pkg_name, "w") as fp:
|
with open(pkg_name, "w", encoding="utf8") as fp:
|
||||||
r = exec_command(
|
r = exec_command(
|
||||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
|
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
|
||||||
)
|
)
|
||||||
|
@ -154,11 +154,11 @@ def is_prog_obsolete(prog_path):
|
|||||||
new_digest = shasum.hexdigest()
|
new_digest = shasum.hexdigest()
|
||||||
old_digest = None
|
old_digest = None
|
||||||
if isfile(prog_hash_path):
|
if isfile(prog_hash_path):
|
||||||
with open(prog_hash_path) as fp:
|
with open(prog_hash_path, encoding="utf8") as fp:
|
||||||
old_digest = fp.read()
|
old_digest = fp.read()
|
||||||
if new_digest == old_digest:
|
if new_digest == old_digest:
|
||||||
return False
|
return False
|
||||||
with open(prog_hash_path, "w") as fp:
|
with open(prog_hash_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(new_digest)
|
fp.write(new_digest)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ class GDBClientProcess(DebugClientProcess):
|
|||||||
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
||||||
commands = banner + commands + footer
|
commands = banner + commands + footer
|
||||||
|
|
||||||
with open(dst, "w") as fp:
|
with open(dst, mode="w", encoding="utf8") as fp:
|
||||||
fp.write("\n".join(self.debug_config.reveal_patterns(commands)))
|
fp.write("\n".join(self.debug_config.reveal_patterns(commands)))
|
||||||
|
|
||||||
def stdin_data_received(self, data):
|
def stdin_data_received(self, data):
|
||||||
|
@ -52,7 +52,7 @@ def get_source_dir():
|
|||||||
|
|
||||||
def load_json(file_path):
|
def load_json(file_path):
|
||||||
try:
|
try:
|
||||||
with open(file_path, "r") as f:
|
with open(file_path, mode="r", encoding="utf8") as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise exception.InvalidJSONFile(file_path)
|
raise exception.InvalidJSONFile(file_path)
|
||||||
@ -102,7 +102,7 @@ def ensure_udev_rules():
|
|||||||
|
|
||||||
def _rules_to_set(rules_path):
|
def _rules_to_set(rules_path):
|
||||||
result = set()
|
result = set()
|
||||||
with open(rules_path) as fp:
|
with open(rules_path, encoding="utf8") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if not line or line.startswith("#"):
|
if not line or line.startswith("#"):
|
||||||
|
@ -62,7 +62,9 @@ class LockFile(object):
|
|||||||
else:
|
else:
|
||||||
raise LockFileExists
|
raise LockFileExists
|
||||||
|
|
||||||
self._fp = open(self._lock_path, "w") # pylint: disable=consider-using-with
|
self._fp = open(
|
||||||
|
self._lock_path, mode="w", encoding="utf8"
|
||||||
|
) # pylint: disable=consider-using-with
|
||||||
try:
|
try:
|
||||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
@ -156,7 +156,9 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
|
|||||||
subprocess.check_call(args + [dep])
|
subprocess.check_call(args + [dep])
|
||||||
|
|
||||||
# build manifests
|
# build manifests
|
||||||
with open(os.path.join(target_dir, "package.json"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(target_dir, "package.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
json.dump(
|
json.dump(
|
||||||
dict(
|
dict(
|
||||||
name="contrib-pysite",
|
name="contrib-pysite",
|
||||||
|
@ -44,7 +44,9 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
|||||||
root_dir = self.find_library_root(path)
|
root_dir = self.find_library_root(path)
|
||||||
|
|
||||||
# automatically generate library manifest
|
# automatically generate library manifest
|
||||||
with open(os.path.join(root_dir, "library.json"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(root_dir, "library.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
json.dump(
|
json.dump(
|
||||||
dict(
|
dict(
|
||||||
name=spec.name,
|
name=spec.name,
|
||||||
|
@ -382,12 +382,12 @@ class PackageMetaData(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def dump(self, path):
|
def dump(self, path):
|
||||||
with open(path, "w") as fp:
|
with open(path, mode="w", encoding="utf8") as fp:
|
||||||
return json.dump(self.as_dict(), fp)
|
return json.dump(self.as_dict(), fp)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(path):
|
def load(path):
|
||||||
with open(path) as fp:
|
with open(path, encoding="utf8") as fp:
|
||||||
data = json.load(fp)
|
data = json.load(fp)
|
||||||
if data["spec"]:
|
if data["spec"]:
|
||||||
data["spec"] = PackageSpec(**data["spec"])
|
data["spec"] = PackageSpec(**data["spec"])
|
||||||
|
@ -181,7 +181,9 @@ class PackagePacker(object):
|
|||||||
and os.path.isdir(os.path.join(src, include[0]))
|
and os.path.isdir(os.path.join(src, include[0]))
|
||||||
):
|
):
|
||||||
src = os.path.join(src, include[0])
|
src = os.path.join(src, include[0])
|
||||||
with open(os.path.join(src, "library.json"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(src, "library.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
manifest_updated = manifest.copy()
|
manifest_updated = manifest.copy()
|
||||||
del manifest_updated["export"]["include"]
|
del manifest_updated["export"]["include"]
|
||||||
json.dump(manifest_updated, fp, indent=2, ensure_ascii=False)
|
json.dump(manifest_updated, fp, indent=2, ensure_ascii=False)
|
||||||
|
@ -158,7 +158,7 @@ def is_container():
|
|||||||
return True
|
return True
|
||||||
if not os.path.isfile("/proc/1/cgroup"):
|
if not os.path.isfile("/proc/1/cgroup"):
|
||||||
return False
|
return False
|
||||||
with open("/proc/1/cgroup") as fp:
|
with open("/proc/1/cgroup", encoding="utf8") as fp:
|
||||||
return ":/docker/" in fp.read()
|
return ":/docker/" in fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
@ -456,7 +456,7 @@ class ProjectConfig(ProjectConfigBase, ProjectConfigDirsMixin):
|
|||||||
path = path or self.path
|
path = path or self.path
|
||||||
if path in self._instances:
|
if path in self._instances:
|
||||||
del self._instances[path]
|
del self._instances[path]
|
||||||
with open(path or self.path, "w+") as fp:
|
with open(path or self.path, mode="w+", encoding="utf8") as fp:
|
||||||
fp.write(CONFIG_HEADER.strip() + "\n\n")
|
fp.write(CONFIG_HEADER.strip() + "\n\n")
|
||||||
self._parser.write(fp)
|
self._parser.write(fp)
|
||||||
fp.seek(0)
|
fp.seek(0)
|
||||||
|
@ -177,6 +177,6 @@ def _load_cached_project_ide_data(project_dir, env_names):
|
|||||||
for name in env_names:
|
for name in env_names:
|
||||||
if not os.path.isfile(os.path.join(build_dir, name, "idedata.json")):
|
if not os.path.isfile(os.path.join(build_dir, name, "idedata.json")):
|
||||||
continue
|
continue
|
||||||
with open(os.path.join(build_dir, name, "idedata.json")) as fp:
|
with open(os.path.join(build_dir, name, "idedata.json"), encoding="utf8") as fp:
|
||||||
result[name] = json.load(fp)
|
result[name] = json.load(fp)
|
||||||
return result
|
return result
|
||||||
|
@ -46,7 +46,7 @@ def test_download(isolated_pio_core):
|
|||||||
lm.cleanup_expired_downloads()
|
lm.cleanup_expired_downloads()
|
||||||
assert not os.path.isfile(archive_path)
|
assert not os.path.isfile(archive_path)
|
||||||
# check that key is deleted from DB
|
# check that key is deleted from DB
|
||||||
with open(lm.get_download_usagedb_path()) as fp:
|
with open(lm.get_download_usagedb_path(), encoding="utf8") as fp:
|
||||||
assert os.path.basename(archive_path) not in fp.read()
|
assert os.path.basename(archive_path) not in fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user