forked from platformio/platformio-core
Better support for file contents writing // Issue #2796
This commit is contained in:
@ -41,12 +41,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.compat import (
|
||||
WINDOWS,
|
||||
get_file_contents,
|
||||
hashlib_encode_data,
|
||||
string_types,
|
||||
)
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.managers.lib import LibraryManager
|
||||
|
||||
|
||||
@ -92,7 +87,7 @@ class LibBuilderFactory(object):
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
||||
):
|
||||
continue
|
||||
content = get_file_contents(join(root, fname))
|
||||
content = fs.get_file_contents(join(root, fname))
|
||||
if not content:
|
||||
continue
|
||||
if "Arduino.h" in content and include_re.search(content):
|
||||
@ -1047,22 +1042,22 @@ def ConfigureProjectLibBuilder(env):
|
||||
title += " %s" % lb.version
|
||||
if vcs_info and vcs_info.get("version"):
|
||||
title += " #%s" % vcs_info.get("version")
|
||||
sys.stdout.write("%s|-- %s" % (margin, title))
|
||||
click.echo("%s|-- %s" % (margin, title), nl=False)
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
if vcs_info:
|
||||
sys.stdout.write(" [%s]" % vcs_info.get("url"))
|
||||
sys.stdout.write(" (")
|
||||
sys.stdout.write(lb.path)
|
||||
sys.stdout.write(")")
|
||||
sys.stdout.write("\n")
|
||||
click.echo(" [%s]" % vcs_info.get("url"), nl=False)
|
||||
click.echo(" (", nl=False)
|
||||
click.echo(lb.path, nl=False)
|
||||
click.echo(")", nl=False)
|
||||
click.echo("")
|
||||
if lb.depbuilders:
|
||||
_print_deps_tree(lb, level + 1)
|
||||
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
|
||||
|
||||
print ("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
|
||||
print (
|
||||
click.echo("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
|
||||
click.echo(
|
||||
"LDF Modes: Finder ~ %s, Compatibility ~ %s"
|
||||
% (ldf_mode, project.lib_compat_mode)
|
||||
)
|
||||
@ -1070,19 +1065,19 @@ def ConfigureProjectLibBuilder(env):
|
||||
project.install_dependencies()
|
||||
|
||||
lib_builders = env.GetLibBuilders()
|
||||
print ("Found %d compatible libraries" % len(lib_builders))
|
||||
click.echo("Found %d compatible libraries" % len(lib_builders))
|
||||
|
||||
print ("Scanning dependencies...")
|
||||
click.echo("Scanning dependencies...")
|
||||
project.search_deps_recursive()
|
||||
|
||||
if ldf_mode.startswith("chain") and project.depbuilders:
|
||||
_correct_found_libs(lib_builders)
|
||||
|
||||
if project.depbuilders:
|
||||
print ("Dependency Graph")
|
||||
click.echo("Dependency Graph")
|
||||
_print_deps_tree(project)
|
||||
else:
|
||||
print ("No dependencies")
|
||||
click.echo("No dependencies")
|
||||
|
||||
return project
|
||||
|
||||
|
@ -18,6 +18,7 @@ from hashlib import md5
|
||||
from os import makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data
|
||||
|
||||
# Windows CLI has limit with command length to 8192
|
||||
@ -66,8 +67,7 @@ def _file_long_data(env, data):
|
||||
)
|
||||
if isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, "w") as fp:
|
||||
fp.write(data)
|
||||
fs.write_file_contents(tmp_file, data)
|
||||
return tmp_file
|
||||
|
||||
|
||||
|
@ -25,7 +25,7 @@ from SCons.Action import Action # pylint: disable=import-error
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import get_file_contents, glob_escape
|
||||
from platformio.compat import glob_escape
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
from platformio.proc import exec_command
|
||||
|
||||
@ -62,7 +62,7 @@ class InoToCPPConverter(object):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = get_file_contents(node.get_path())
|
||||
contents = fs.get_file_contents(node.get_path())
|
||||
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
|
||||
if self.is_main_node(contents):
|
||||
lines = _lines + lines
|
||||
@ -78,16 +78,14 @@ class InoToCPPConverter(object):
|
||||
def process(self, contents):
|
||||
out_file = self._main_ino + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
contents = get_file_contents(out_file)
|
||||
contents = fs.get_file_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
with open(out_file, "w") as fp:
|
||||
fp.write(self.append_prototypes(contents))
|
||||
fs.write_file_contents(out_file, self.append_prototypes(contents))
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = mkstemp()[1]
|
||||
with open(tmp_path, "w") as fp:
|
||||
fp.write(contents)
|
||||
fs.write_file_contents(tmp_path, contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
|
@ -149,8 +149,7 @@ def cli(
|
||||
|
||||
component_stats = collect_component_stats(results)
|
||||
if json_output:
|
||||
click.echo(dump_json_to_unicode(
|
||||
results_to_json(results, component_stats)))
|
||||
click.echo(dump_json_to_unicode(results_to_json(results, component_stats)))
|
||||
elif not silent:
|
||||
print_check_summary(results, component_stats)
|
||||
|
||||
@ -167,7 +166,7 @@ def results_to_json(raw, components):
|
||||
"ignored": item.get("succeeded") is None,
|
||||
"succeeded": bool(item.get("succeeded")),
|
||||
"defects": [d.to_json() for d in item.get("defects", [])],
|
||||
"stats": [{k: v} for k, v in components.items()]
|
||||
"stats": [{k: v} for k, v in components.items()],
|
||||
}
|
||||
)
|
||||
results.append(item)
|
||||
|
@ -145,9 +145,9 @@ def init_base_project(project_dir):
|
||||
|
||||
|
||||
def init_include_readme(include_dir):
|
||||
with open(join(include_dir, "README"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
fs.write_file_contents(
|
||||
join(include_dir, "README"),
|
||||
"""
|
||||
This directory is intended for project header files.
|
||||
|
||||
A header file is a file containing C declarations and macro definitions
|
||||
@ -186,15 +186,15 @@ Read more about using header files in official GCC documentation:
|
||||
* Computed Includes
|
||||
|
||||
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
||||
"""
|
||||
)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
def init_lib_readme(lib_dir):
|
||||
with open(join(lib_dir, "README"), "w") as f:
|
||||
# pylint: disable=line-too-long
|
||||
f.write(
|
||||
"""
|
||||
# pylint: disable=line-too-long
|
||||
fs.write_file_contents(
|
||||
join(lib_dir, "README"),
|
||||
"""
|
||||
This directory is intended for project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link into executable file.
|
||||
|
||||
@ -240,14 +240,14 @@ libraries scanning project source files.
|
||||
|
||||
More information about PlatformIO Library Dependency Finder
|
||||
- https://docs.platformio.org/page/librarymanager/ldf.html
|
||||
"""
|
||||
)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
def init_test_readme(test_dir):
|
||||
with open(join(test_dir, "README"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
fs.write_file_contents(
|
||||
join(test_dir, "README"),
|
||||
"""
|
||||
This directory is intended for PIO Unit Testing and project tests.
|
||||
|
||||
Unit Testing is a software testing method by which individual units of
|
||||
@ -258,17 +258,17 @@ in the development cycle.
|
||||
|
||||
More information about PIO Unit Testing:
|
||||
- https://docs.platformio.org/page/plus/unit-testing.html
|
||||
"""
|
||||
)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
def init_ci_conf(project_dir):
|
||||
conf_path = join(project_dir, ".travis.yml")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(conf_path, "w") as f:
|
||||
f.write(
|
||||
"""# Continuous Integration (CI) is the practice, in software
|
||||
fs.write_file_contents(
|
||||
conf_path,
|
||||
"""# Continuous Integration (CI) is the practice, in software
|
||||
# engineering, of merging all developer working copies with a shared mainline
|
||||
# several times a day < https://docs.platformio.org/page/ci/index.html >
|
||||
#
|
||||
@ -335,16 +335,15 @@ def init_ci_conf(project_dir):
|
||||
#
|
||||
# script:
|
||||
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
|
||||
"""
|
||||
)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
def init_cvs_ignore(project_dir):
|
||||
conf_path = join(project_dir, ".gitignore")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(conf_path, "w") as fp:
|
||||
fp.write(".pio\n")
|
||||
fs.write_file_contents(conf_path, ".pio\n")
|
||||
|
||||
|
||||
def fill_project_envs(
|
||||
|
@ -23,7 +23,6 @@ import click
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.compat import get_file_contents
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@ -201,7 +200,7 @@ def device_monitor(ctx, **kwargs):
|
||||
sleep(0.1)
|
||||
if not t.is_alive():
|
||||
return
|
||||
kwargs["port"] = get_file_contents(sock_file)
|
||||
kwargs["port"] = fs.get_file_contents(sock_file)
|
||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||
t.join(2)
|
||||
finally:
|
||||
|
@ -52,10 +52,6 @@ if PY2:
|
||||
return path
|
||||
return path.decode(get_filesystem_encoding()).encode("utf-8")
|
||||
|
||||
def get_file_contents(path):
|
||||
with open(path) as f:
|
||||
return f.read()
|
||||
|
||||
def hashlib_encode_data(data):
|
||||
if is_bytes(data):
|
||||
return data
|
||||
@ -104,14 +100,6 @@ else:
|
||||
def path_to_unicode(path):
|
||||
return path
|
||||
|
||||
def get_file_contents(path):
|
||||
try:
|
||||
with open(path) as f:
|
||||
return f.read()
|
||||
except UnicodeDecodeError:
|
||||
with open(path, encoding="latin-1") as f:
|
||||
return f.read()
|
||||
|
||||
def hashlib_encode_data(data):
|
||||
if is_bytes(data):
|
||||
return data
|
||||
|
@ -220,14 +220,12 @@ def is_prog_obsolete(prog_path):
|
||||
break
|
||||
shasum.update(data)
|
||||
new_digest = shasum.hexdigest()
|
||||
old_digest = None
|
||||
if isfile(prog_hash_path):
|
||||
with open(prog_hash_path, "r") as fp:
|
||||
old_digest = fp.read()
|
||||
old_digest = (
|
||||
fs.get_file_contents(prog_hash_path) if isfile(prog_hash_path) else None
|
||||
)
|
||||
if new_digest == old_digest:
|
||||
return False
|
||||
with open(prog_hash_path, "w") as fp:
|
||||
fp.write(new_digest)
|
||||
fs.write_file_contents(prog_hash_path, new_digest)
|
||||
return True
|
||||
|
||||
|
||||
|
@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@ -23,7 +24,7 @@ from glob import glob
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import WINDOWS, get_file_contents, glob_escape
|
||||
from platformio.compat import WINDOWS, glob_escape
|
||||
|
||||
|
||||
class cd(object):
|
||||
@ -48,6 +49,24 @@ def get_source_dir():
|
||||
return os.path.dirname(curpath)
|
||||
|
||||
|
||||
def get_file_contents(path):
|
||||
try:
|
||||
with open(path) as fp:
|
||||
return fp.read()
|
||||
except UnicodeDecodeError:
|
||||
with io.open(path, encoding="latin-1") as fp:
|
||||
return fp.read()
|
||||
|
||||
|
||||
def write_file_contents(path, contents):
|
||||
try:
|
||||
with open(path, "w") as fp:
|
||||
return fp.write(contents)
|
||||
except UnicodeDecodeError:
|
||||
with io.open(path, "w", encoding="latin-1") as fp:
|
||||
return fp.write(contents)
|
||||
|
||||
|
||||
def load_json(file_path):
|
||||
try:
|
||||
with open(file_path, "r") as f:
|
||||
|
@ -208,8 +208,7 @@ class ProjectRPC(object):
|
||||
return project_dir
|
||||
if not isdir(src_dir):
|
||||
os.makedirs(src_dir)
|
||||
with open(main_path, "w") as f:
|
||||
f.write(main_content.strip())
|
||||
fs.write_file_contents(main_path, main_content.strip())
|
||||
return project_dir
|
||||
|
||||
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
|
||||
|
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath
|
||||
@ -20,7 +20,6 @@ from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath
|
||||
import bottle
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import get_file_contents
|
||||
from platformio.proc import where_is_program
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import load_project_ide_data
|
||||
@ -137,11 +136,11 @@ class ProjectGenerator(object):
|
||||
|
||||
@staticmethod
|
||||
def _render_tpl(tpl_path, tpl_vars):
|
||||
return bottle.template(get_file_contents(tpl_path), **tpl_vars)
|
||||
return bottle.template(fs.get_file_contents(tpl_path), **tpl_vars)
|
||||
|
||||
@staticmethod
|
||||
def _merge_contents(dst_path, contents):
|
||||
if basename(dst_path) == ".gitignore" and isfile(dst_path):
|
||||
return
|
||||
with codecs.open(dst_path, "w", encoding="utf8") as fp:
|
||||
with io.open(dst_path, "w", encoding="utf8") as fp:
|
||||
fp.write(contents)
|
||||
|
@ -53,12 +53,9 @@ def clean_build_dir(build_dir, config):
|
||||
|
||||
if isdir(build_dir):
|
||||
# check project structure
|
||||
if isfile(checksum_file):
|
||||
with open(checksum_file) as f:
|
||||
if f.read() == checksum:
|
||||
return
|
||||
if isfile(checksum_file) and fs.get_file_contents(checksum_file) == checksum:
|
||||
return
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
makedirs(build_dir)
|
||||
with open(checksum_file, "w") as f:
|
||||
f.write(checksum)
|
||||
fs.write_file_contents(checksum_file, checksum)
|
||||
|
@ -19,7 +19,7 @@ from string import Template
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio import exception, fs
|
||||
|
||||
TRANSPORT_OPTIONS = {
|
||||
"arduino": {
|
||||
@ -193,7 +193,6 @@ class TestProcessorBase(object):
|
||||
data = Template(tpl).substitute(baudrate=self.get_baudrate())
|
||||
|
||||
tmp_file = join(test_dir, "output_export.cpp")
|
||||
with open(tmp_file, "w") as f:
|
||||
f.write(data)
|
||||
fs.write_file_contents(tmp_file, data)
|
||||
|
||||
atexit.register(delete_tmptest_file, tmp_file)
|
||||
|
@ -428,7 +428,7 @@ def test_platform_json_schema():
|
||||
},
|
||||
"frameworks": sorted(["arduino", "simba"]),
|
||||
"version": "1.15.0",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user