mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-08 07:04:32 +02:00
docs: Manage parallel sphinx-build runs with optional parallel jobs within them
This commit is contained in:
committed by
Angus Gratton
parent
105567d077
commit
35db219be2
@@ -8,7 +8,10 @@
|
|||||||
#
|
#
|
||||||
# Specific custom docs functionality should be added in conf_common.py or in a Sphinx extension, not here.
|
# Specific custom docs functionality should be added in conf_common.py or in a Sphinx extension, not here.
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
import argparse
|
import argparse
|
||||||
|
import math
|
||||||
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
@@ -37,6 +40,10 @@ def main():
|
|||||||
required=False)
|
required=False)
|
||||||
parser.add_argument("--target", "-t", choices=TARGETS, required=False)
|
parser.add_argument("--target", "-t", choices=TARGETS, required=False)
|
||||||
parser.add_argument("--build-dir", "-b", type=str, default="_build")
|
parser.add_argument("--build-dir", "-b", type=str, default="_build")
|
||||||
|
parser.add_argument("--sphinx-parallel-builds", "-p", choices=["auto"] + [str(x) for x in range(8)],
|
||||||
|
help="Parallel Sphinx builds - number of independent Sphinx builds to run", default="auto")
|
||||||
|
parser.add_argument("--sphinx-parallel-jobs", "-j", choices=["auto"] + [str(x) for x in range(8)],
|
||||||
|
help="Sphinx parallel jobs argument - number of threads for each Sphinx build to use", default="auto")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -52,39 +59,94 @@ def main():
|
|||||||
else:
|
else:
|
||||||
targets = [args.target]
|
targets = [args.target]
|
||||||
|
|
||||||
for language in languages:
|
num_sphinx_builds = len(languages) * len(targets)
|
||||||
for target in targets:
|
num_cpus = multiprocessing.cpu_count()
|
||||||
build_dir = os.path.realpath(os.path.join(args.build_dir, language, target))
|
|
||||||
build_docs(language, target, build_dir)
|
|
||||||
|
|
||||||
def build_docs(language, target, build_dir):
|
if args.sphinx_parallel_builds == "auto":
|
||||||
print("Building language:%s target:%s build_dir:%s" % (language, target, build_dir))
|
# at most one sphinx build per CPU, up to the number of CPUs
|
||||||
|
args.sphinx_parallel_builds = min(num_sphinx_builds, num_cpus)
|
||||||
|
else:
|
||||||
|
args.sphinx_parallel_builds = int(args.sphinx_parallel_builds)
|
||||||
|
|
||||||
|
if args.sphinx_parallel_jobs == "auto":
|
||||||
|
# N CPUs per build job, rounded up - (maybe smarter to round down to avoid contention, idk)
|
||||||
|
args.sphinx_parallel_jobs = int(math.ceil(num_cpus / args.sphinx_parallel_builds))
|
||||||
|
else:
|
||||||
|
args.sphinx_parallel_jobs = int(args.sphinx_parallel_jobs)
|
||||||
|
|
||||||
|
print("Will use %d parallel builds and %d jobs per build" % (args.sphinx_parallel_builds, args.sphinx_parallel_jobs))
|
||||||
|
pool = multiprocessing.Pool(args.sphinx_parallel_builds)
|
||||||
|
|
||||||
|
# make a list of all combinations of build_docs() args as tuples
|
||||||
|
#
|
||||||
|
# there's probably a fancy way to do this with itertools but this way is actually readable
|
||||||
|
entries = []
|
||||||
|
for target in targets:
|
||||||
|
for language in languages:
|
||||||
|
build_dir = os.path.realpath(os.path.join(args.build_dir, language, target))
|
||||||
|
entries.append((language, target, build_dir, args.sphinx_parallel_jobs))
|
||||||
|
|
||||||
|
print(entries)
|
||||||
|
failures = pool.map(call_build_docs, entries)
|
||||||
|
if any(failures):
|
||||||
|
if len(entries) > 1:
|
||||||
|
print("The following language/target combinations failed to build:")
|
||||||
|
for f in failures:
|
||||||
|
if f is not None:
|
||||||
|
print("language: %s target: %s" % (f[0], f[1]))
|
||||||
|
raise SystemExit(2)
|
||||||
|
|
||||||
|
|
||||||
|
def call_build_docs(entry):
|
||||||
|
build_docs(*entry)
|
||||||
|
|
||||||
|
|
||||||
|
def build_docs(language, target, build_dir, sphinx_parallel_jobs=1):
|
||||||
|
# Note: because this runs in a multiprocessing Process, everything which happens here should be isolated to a single process
|
||||||
|
# (ie it doesn't matter if Sphinx is using global variables, as they're it's own copy of the global variables)
|
||||||
|
|
||||||
|
# wrap stdout & stderr in a way that lets us see which build_docs instance they come from
|
||||||
|
#
|
||||||
|
# this doesn't apply to subprocesses, they write to OS stdout & stderr so no prefix appears
|
||||||
|
prefix = "%s/%s: " % (language, target)
|
||||||
|
|
||||||
|
print("Building in build_dir:%s" % (build_dir))
|
||||||
try:
|
try:
|
||||||
os.makedirs(build_dir)
|
os.makedirs(build_dir)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
try:
|
|
||||||
environ = {}
|
|
||||||
environ.update(os.environ)
|
|
||||||
environ['BUILDDIR'] = build_dir
|
|
||||||
|
|
||||||
args = [sys.executable, "-m", "sphinx",
|
environ = {}
|
||||||
"-j", "auto", # use all the cores! (where possible)
|
environ.update(os.environ)
|
||||||
"-b", "html", # TODO: PDFs
|
environ['BUILDDIR'] = build_dir
|
||||||
"-d", os.path.join(build_dir, "doctrees"),
|
|
||||||
# TODO: support multiple sphinx-warning.log files, somehow
|
args = [sys.executable, "-u", "-m", "sphinx.cmd.build",
|
||||||
"-w", "sphinx-warning.log",
|
"-j", str(sphinx_parallel_jobs),
|
||||||
"-t", target,
|
"-b", "html", # TODO: PDFs
|
||||||
"-D", "idf_target={}".format(target),
|
"-d", os.path.join(build_dir, "doctrees"),
|
||||||
os.path.join(os.path.abspath(os.path.dirname(__file__)), language), # srcdir for this language
|
# TODO: support multiple sphinx-warning.log files, somehow
|
||||||
os.path.join(build_dir, "html") # build directory
|
"-w", "sphinx-warning.log",
|
||||||
]
|
"-t", target,
|
||||||
cwd = build_dir # also run sphinx in the build directory
|
"-D", "idf_target={}".format(target),
|
||||||
print("Running '{}'".format(" ".join(args)))
|
os.path.join(os.path.abspath(os.path.dirname(__file__)), language), # srcdir for this language
|
||||||
subprocess.check_call(args, cwd=cwd, env=environ)
|
os.path.join(build_dir, "html") # build directory
|
||||||
except subprocess.CalledProcessError:
|
]
|
||||||
print("Sphinx failed for language:%s target:%s" % (language, target))
|
cwd = build_dir # also run sphinx in the build directory
|
||||||
raise SystemExit(1) # rest of the details should be in stdout
|
|
||||||
|
os.chdir(cwd)
|
||||||
|
print("Running '%s'" % (" ".join(args)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Note: we can't call sphinx.cmd.build.main() here as multiprocessing doesn't est >1 layer deep
|
||||||
|
# and sphinx.cmd.build() also does a lot of work in the calling thread, especially for j ==1,
|
||||||
|
# so using a Pyhthon thread for this part is a poor option (GIL)
|
||||||
|
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
for c in iter(lambda: p.stdout.readline(), ''):
|
||||||
|
sys.stdout.write(prefix)
|
||||||
|
sys.stdout.write(c)
|
||||||
|
except KeyboardInterrupt: # this seems to be the only way to get Ctrl-C to kill everything?
|
||||||
|
p.kill()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@@ -39,7 +39,6 @@ def setup(app):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
|
idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
|
||||||
|
|
||||||
app.add_config_value('docs_root', os.path.join(idf_path, "docs"), 'env')
|
app.add_config_value('docs_root', os.path.join(idf_path, "docs"), 'env')
|
||||||
app.add_config_value('idf_path', idf_path, 'env')
|
app.add_config_value('idf_path', idf_path, 'env')
|
||||||
app.add_config_value('build_dir', build_dir, 'env') # not actually an IDF thing
|
app.add_config_value('build_dir', build_dir, 'env') # not actually an IDF thing
|
||||||
|
Reference in New Issue
Block a user