Specify encoding for "open()" functions

This commit is contained in:
Ivan Kravets
2021-08-28 13:10:07 +03:00
parent b9219a2b62
commit d819617d2b
28 changed files with 63 additions and 43 deletions

View File

@ -114,7 +114,7 @@ class State(object):
def __exit__(self, type_, value, traceback):
if self.modified:
try:
with open(self.path, "w") as fp:
with open(self.path, mode="w", encoding="utf8") as fp:
fp.write(json.dumps(self._storage))
except IOError:
raise exception.HomeDirPermissionsError(get_project_core_dir())

View File

@ -226,7 +226,11 @@ if set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS):
projenv = env
data = projenv.DumpIDEData(env)
# dump to file for the further reading by project.helpers.load_project_ide_data
with open(projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")), "w") as fp:
with open(
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
mode="w",
encoding="utf8",
) as fp:
json.dump(data, fp)
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
env.Exit(0)

View File

@ -152,7 +152,7 @@ def WriteCompilationDb(target, source, env):
item["file"] = os.path.abspath(item["file"])
entries.append(item)
with open(str(target[0]), "w") as target_file:
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
json.dump(
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
)

View File

@ -86,7 +86,9 @@ class LibBuilderFactory(object):
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
):
continue
with io.open(os.path.join(root, fname), errors="ignore") as fp:
with io.open(
os.path.join(root, fname), encoding="utf8", errors="ignore"
) as fp:
content = fp.read()
if not content:
continue
@ -671,7 +673,7 @@ class MbedLibBuilder(LibBuilderBase):
def _mbed_conf_append_macros(self, mbed_config_path, macros):
lines = []
with open(mbed_config_path) as fp:
with open(mbed_config_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if line == "#endif":
@ -690,7 +692,7 @@ class MbedLibBuilder(LibBuilderBase):
if len(tokens) < 2 or tokens[1] not in macros:
lines.append(line)
lines.append("")
with open(mbed_config_path, "w") as fp:
with open(mbed_config_path, mode="w", encoding="utf8") as fp:
fp.write("\n".join(lines))

View File

@ -65,7 +65,7 @@ def _file_long_data(env, data):
)
if os.path.isfile(tmp_file):
return tmp_file
with open(tmp_file, "w") as fp:
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write(data)
return tmp_file

View File

@ -37,7 +37,7 @@ def _run_tool(cmd, env, tool_args):
makedirs(build_dir)
tmp_file = join(build_dir, "size-data-longcmd.txt")
with open(tmp_file, "w") as fp:
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write("\n".join(tool_args))
cmd.append("@" + tmp_file)
@ -241,7 +241,9 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
file_data.update(v)
data["memory"]["files"].append(file_data)
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
with open(
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
) as fp:
fp.write(json.dumps(data))

View File

@ -78,9 +78,9 @@ class ContentCache(object):
if not os.path.isdir(os.path.dirname(cache_path)):
os.makedirs(os.path.dirname(cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
with open(self._db_path, mode="a", encoding="utf8") as fp:
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
except UnicodeError:
if os.path.isfile(cache_path):
@ -102,7 +102,7 @@ class ContentCache(object):
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path) as fp:
with open(self._db_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
@ -129,7 +129,7 @@ class ContentCache(object):
pass
if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
with open(self._db_path, mode="w", encoding="utf8") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()

View File

@ -40,13 +40,13 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
)
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
with open(self._tmp_cfg_file, "w") as fp:
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
fp.write(
"exclude-path = "
+ self.config.get_optional_dir("packages").replace("\\", "/")
)
with open(self._tmp_cmd_file, "w") as fp:
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
fp.write(
" ".join(
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]

View File

@ -261,7 +261,7 @@ class ProjectRPC:
return project_dir
if not os.path.isdir(src_dir):
os.makedirs(src_dir)
with open(main_path, "w") as fp:
with open(main_path, mode="w", encoding="utf8") as fp:
fp.write(main_content.strip())
return project_dir

View File

@ -240,7 +240,7 @@ def init_base_project(project_dir):
def init_include_readme(include_dir):
with open(os.path.join(include_dir, "README"), "w") as fp:
with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for project header files.
@ -286,7 +286,7 @@ https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
def init_lib_readme(lib_dir):
with open(os.path.join(lib_dir, "README"), "w") as fp:
with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for project specific (private) libraries.
@ -339,7 +339,7 @@ More information about PlatformIO Library Dependency Finder
def init_test_readme(test_dir):
with open(os.path.join(test_dir, "README"), "w") as fp:
with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for PlatformIO Unit Testing and project tests.
@ -360,7 +360,7 @@ def init_cvs_ignore(project_dir):
conf_path = os.path.join(project_dir, ".gitignore")
if os.path.isfile(conf_path):
return
with open(conf_path, "w") as fp:
with open(conf_path, mode="w", encoding="utf8") as fp:
fp.write(".pio\n")

View File

@ -173,7 +173,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
address = port.getHost()
self.log.debug("Serial Bridge is started on {address!r}", address=address)
if "sock" in self.cmd_options:
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
with open(
os.path.join(self.cmd_options["sock"], "sock"),
mode="w",
encoding="utf8",
) as fp:
fp.write("socket://localhost:%d" % address.port)
def client_terminal_stopped(self):

View File

@ -350,7 +350,7 @@ def device_monitor(ctx, agents, **kwargs):
sleep(0.1)
if not t.is_alive():
return
with open(sock_file) as fp:
with open(sock_file, encoding="utf8") as fp:
kwargs["port"] = fp.read()
ctx.invoke(cmd_device_monitor, **kwargs)
t.join(2)

View File

@ -54,11 +54,11 @@ def clean_build_dir(build_dir, config):
if isdir(build_dir):
# check project structure
if isfile(checksum_file):
with open(checksum_file) as fp:
with open(checksum_file, encoding="utf8") as fp:
if fp.read() == checksum:
return
fs.rmtree(build_dir)
makedirs(build_dir)
with open(checksum_file, "w") as fp:
with open(checksum_file, mode="w", encoding="utf8") as fp:
fp.write(checksum)

View File

@ -42,7 +42,7 @@ def is_completion_code_installed(shell, path):
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path) as fp:
with open(path, encoding="utf8") as fp:
return click_completion.get_code(shell=shell) in fp.read()
@ -64,7 +64,7 @@ def uninstall_completion_code(shell, path):
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path, "r+") as fp:
with open(path, "r+", encoding="utf8") as fp:
contents = fp.read()
fp.seek(0)
fp.truncate()

View File

@ -224,7 +224,7 @@ class TestProcessorBase(object):
test_dir,
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
)
with open(tmp_file, "w") as fp:
with open(tmp_file, mode="w", encoding="utf8") as fp:
fp.write(data)
atexit.register(delete_tmptest_files, test_dir)

View File

@ -103,7 +103,7 @@ def get_pip_package(to_develop):
os.makedirs(cache_dir)
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
try:
with open(pkg_name, "w") as fp:
with open(pkg_name, "w", encoding="utf8") as fp:
r = exec_command(
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
)

View File

@ -154,11 +154,11 @@ def is_prog_obsolete(prog_path):
new_digest = shasum.hexdigest()
old_digest = None
if isfile(prog_hash_path):
with open(prog_hash_path) as fp:
with open(prog_hash_path, encoding="utf8") as fp:
old_digest = fp.read()
if new_digest == old_digest:
return False
with open(prog_hash_path, "w") as fp:
with open(prog_hash_path, mode="w", encoding="utf8") as fp:
fp.write(new_digest)
return True

View File

@ -105,7 +105,7 @@ class GDBClientProcess(DebugClientProcess):
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
commands = banner + commands + footer
with open(dst, "w") as fp:
with open(dst, mode="w", encoding="utf8") as fp:
fp.write("\n".join(self.debug_config.reveal_patterns(commands)))
def stdin_data_received(self, data):

View File

@ -52,7 +52,7 @@ def get_source_dir():
def load_json(file_path):
try:
with open(file_path, "r") as f:
with open(file_path, mode="r", encoding="utf8") as f:
return json.load(f)
except ValueError:
raise exception.InvalidJSONFile(file_path)
@ -102,7 +102,7 @@ def ensure_udev_rules():
def _rules_to_set(rules_path):
result = set()
with open(rules_path) as fp:
with open(rules_path, encoding="utf8") as fp:
for line in fp.readlines():
line = line.strip()
if not line or line.startswith("#"):

View File

@ -62,7 +62,9 @@ class LockFile(object):
else:
raise LockFileExists
self._fp = open(self._lock_path, "w") # pylint: disable=consider-using-with
self._fp = open(
self._lock_path, mode="w", encoding="utf8"
) # pylint: disable=consider-using-with
try:
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)

View File

@ -156,7 +156,9 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
subprocess.check_call(args + [dep])
# build manifests
with open(os.path.join(target_dir, "package.json"), "w") as fp:
with open(
os.path.join(target_dir, "package.json"), mode="w", encoding="utf8"
) as fp:
json.dump(
dict(
name="contrib-pysite",

View File

@ -44,7 +44,9 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
root_dir = self.find_library_root(path)
# automatically generate library manifest
with open(os.path.join(root_dir, "library.json"), "w") as fp:
with open(
os.path.join(root_dir, "library.json"), mode="w", encoding="utf8"
) as fp:
json.dump(
dict(
name=spec.name,

View File

@ -382,12 +382,12 @@ class PackageMetaData(object):
)
def dump(self, path):
with open(path, "w") as fp:
with open(path, mode="w", encoding="utf8") as fp:
return json.dump(self.as_dict(), fp)
@staticmethod
def load(path):
with open(path) as fp:
with open(path, encoding="utf8") as fp:
data = json.load(fp)
if data["spec"]:
data["spec"] = PackageSpec(**data["spec"])

View File

@ -181,7 +181,9 @@ class PackagePacker(object):
and os.path.isdir(os.path.join(src, include[0]))
):
src = os.path.join(src, include[0])
with open(os.path.join(src, "library.json"), "w") as fp:
with open(
os.path.join(src, "library.json"), mode="w", encoding="utf8"
) as fp:
manifest_updated = manifest.copy()
del manifest_updated["export"]["include"]
json.dump(manifest_updated, fp, indent=2, ensure_ascii=False)

View File

@ -158,7 +158,7 @@ def is_container():
return True
if not os.path.isfile("/proc/1/cgroup"):
return False
with open("/proc/1/cgroup") as fp:
with open("/proc/1/cgroup", encoding="utf8") as fp:
return ":/docker/" in fp.read()

View File

@ -456,7 +456,7 @@ class ProjectConfig(ProjectConfigBase, ProjectConfigDirsMixin):
path = path or self.path
if path in self._instances:
del self._instances[path]
with open(path or self.path, "w+") as fp:
with open(path or self.path, mode="w+", encoding="utf8") as fp:
fp.write(CONFIG_HEADER.strip() + "\n\n")
self._parser.write(fp)
fp.seek(0)

View File

@ -177,6 +177,6 @@ def _load_cached_project_ide_data(project_dir, env_names):
for name in env_names:
if not os.path.isfile(os.path.join(build_dir, name, "idedata.json")):
continue
with open(os.path.join(build_dir, name, "idedata.json")) as fp:
with open(os.path.join(build_dir, name, "idedata.json"), encoding="utf8") as fp:
result[name] = json.load(fp)
return result

View File

@ -46,7 +46,7 @@ def test_download(isolated_pio_core):
lm.cleanup_expired_downloads()
assert not os.path.isfile(archive_path)
# check that key is deleted from DB
with open(lm.get_download_usagedb_path()) as fp:
with open(lm.get_download_usagedb_path(), encoding="utf8") as fp:
assert os.path.basename(archive_path) not in fp.read()