Merge branch 'feature/doc_build_subset' into 'master'
docs: support building a subset of the documentation with build_docs Closes IDF-1688 See merge request espressif/esp-idf!8857
This commit is contained in:
commit
6048791a1b
5 changed files with 60 additions and 4 deletions
|
@ -84,6 +84,8 @@ def main():
|
||||||
help="Parallel Sphinx builds - number of independent Sphinx builds to run", default="auto")
|
help="Parallel Sphinx builds - number of independent Sphinx builds to run", default="auto")
|
||||||
parser.add_argument("--sphinx-parallel-jobs", "-j", choices=["auto"] + [str(x) for x in range(8)],
|
parser.add_argument("--sphinx-parallel-jobs", "-j", choices=["auto"] + [str(x) for x in range(8)],
|
||||||
help="Sphinx parallel jobs argument - number of threads for each Sphinx build to use", default="1")
|
help="Sphinx parallel jobs argument - number of threads for each Sphinx build to use", default="1")
|
||||||
|
parser.add_argument("--input-docs", "-i", nargs='+', default=[""],
|
||||||
|
help="List of documents to build relative to the doc base folder, i.e. the language folder. Defaults to all documents")
|
||||||
|
|
||||||
action_parsers = parser.add_subparsers(dest='action')
|
action_parsers = parser.add_subparsers(dest='action')
|
||||||
|
|
||||||
|
@ -153,7 +155,7 @@ def parallel_call(args, callback):
|
||||||
for target in targets:
|
for target in targets:
|
||||||
for language in languages:
|
for language in languages:
|
||||||
build_dir = os.path.realpath(os.path.join(args.build_dir, language, target))
|
build_dir = os.path.realpath(os.path.join(args.build_dir, language, target))
|
||||||
entries.append((language, target, build_dir, args.sphinx_parallel_jobs, args.builders))
|
entries.append((language, target, build_dir, args.sphinx_parallel_jobs, args.builders, args.input_docs))
|
||||||
|
|
||||||
print(entries)
|
print(entries)
|
||||||
errcodes = pool.map(callback, entries)
|
errcodes = pool.map(callback, entries)
|
||||||
|
@ -175,7 +177,7 @@ def parallel_call(args, callback):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername):
|
def sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername, input_docs):
|
||||||
# Note: because this runs in a multiprocessing Process, everything which happens here should be isolated to a single process
|
# Note: because this runs in a multiprocessing Process, everything which happens here should be isolated to a single process
|
||||||
# (ie it doesn't matter if Sphinx is using global variables, as they're it's own copy of the global variables)
|
# (ie it doesn't matter if Sphinx is using global variables, as they're it's own copy of the global variables)
|
||||||
|
|
||||||
|
@ -201,6 +203,7 @@ def sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername):
|
||||||
"-w", SPHINX_WARN_LOG,
|
"-w", SPHINX_WARN_LOG,
|
||||||
"-t", target,
|
"-t", target,
|
||||||
"-D", "idf_target={}".format(target),
|
"-D", "idf_target={}".format(target),
|
||||||
|
"-D", "docs_to_build={}".format(",". join(input_docs)),
|
||||||
os.path.join(os.path.abspath(os.path.dirname(__file__)), language), # srcdir for this language
|
os.path.join(os.path.abspath(os.path.dirname(__file__)), language), # srcdir for this language
|
||||||
os.path.join(build_dir, buildername) # build directory
|
os.path.join(build_dir, buildername) # build directory
|
||||||
]
|
]
|
||||||
|
@ -259,9 +262,9 @@ def action_build(args):
|
||||||
|
|
||||||
|
|
||||||
def call_build_docs(entry):
|
def call_build_docs(entry):
|
||||||
(language, target, build_dir, sphinx_parallel_jobs, builders) = entry
|
(language, target, build_dir, sphinx_parallel_jobs, builders, input_docs) = entry
|
||||||
for buildername in builders:
|
for buildername in builders:
|
||||||
ret = sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername)
|
ret = sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername, input_docs)
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
|
@ -384,6 +384,7 @@ def setup(app):
|
||||||
app.add_config_value('idf_targets', None, 'env')
|
app.add_config_value('idf_targets', None, 'env')
|
||||||
|
|
||||||
app.add_config_value('conditional_include_dict', None, 'env')
|
app.add_config_value('conditional_include_dict', None, 'env')
|
||||||
|
app.add_config_value('docs_to_build', None, 'env')
|
||||||
|
|
||||||
# Breathe extension variables (depend on build_dir)
|
# Breathe extension variables (depend on build_dir)
|
||||||
# note: we generate into xml_in and then copy_if_modified to xml dir
|
# note: we generate into xml_in and then copy_if_modified to xml dir
|
||||||
|
|
|
@ -167,6 +167,8 @@ Other Extensions
|
||||||
:idf_file:`docs/idf_extensions/exclude_docs.py`
|
:idf_file:`docs/idf_extensions/exclude_docs.py`
|
||||||
Sphinx extension that updates the excluded documents according to the conditional_include_dict {tag:documents}. If the tag is set, then the list of documents will be included.
|
Sphinx extension that updates the excluded documents according to the conditional_include_dict {tag:documents}. If the tag is set, then the list of documents will be included.
|
||||||
|
|
||||||
|
Also responsible for excluding documents when building with the config value ``docs_to_build`` set. In these cases all documents not listed in ``docs_to_build`` will be excluded.
|
||||||
|
|
||||||
Subscribes to ``idf-defines-generated`` as it relies on the sphinx tags to determine which documents to exclude
|
Subscribes to ``idf-defines-generated`` as it relies on the sphinx tags to determine which documents to exclude
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/run_doxygen.py`
|
:idf_file:`docs/idf_extensions/run_doxygen.py`
|
||||||
|
|
|
@ -511,6 +511,23 @@ Choices for language (``-l``) are ``en`` and ``zh_CN``. Choices for target (``-t
|
||||||
|
|
||||||
Build documentation will be placed in ``_build/<language>/<target>/html`` folder. To see it, open the ``index.html`` inside this directory in a web browser.
|
Build documentation will be placed in ``_build/<language>/<target>/html`` folder. To see it, open the ``index.html`` inside this directory in a web browser.
|
||||||
|
|
||||||
|
Building a subset of the documentation
|
||||||
|
""""""""""""""""""""""""""""""""""""""
|
||||||
|
Since building the full documentation can be quite slow, it might be useful to just build just the subset of the documentation you are interested in.
|
||||||
|
|
||||||
|
This can be achieved by listing the document you want to build::
|
||||||
|
|
||||||
|
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst build
|
||||||
|
|
||||||
|
Building multiple documents is also possible::
|
||||||
|
|
||||||
|
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst api-reference/peripherals/adc.rst build
|
||||||
|
|
||||||
|
As well as wildcards::
|
||||||
|
|
||||||
|
./build_docs.py -l en -t esp32 -i api-reference/peripherals/* build
|
||||||
|
|
||||||
|
Note that this is a feature intended to simply testing and debugging during writing of documentation. The HTML output won't be perfect, i.e. it will not build a proper index that lists all the documents, and any references to documents that are not built will result in warnings.
|
||||||
|
|
||||||
Building PDF
|
Building PDF
|
||||||
""""""""""""
|
""""""""""""
|
||||||
|
|
|
@ -1,10 +1,43 @@
|
||||||
|
from sphinx.util import get_matching_files
|
||||||
|
from sphinx.util.matching import compile_matchers
|
||||||
|
|
||||||
|
|
||||||
# Updates the excluded documents according to the conditional_include_dict {tag:documents}
|
# Updates the excluded documents according to the conditional_include_dict {tag:documents}
|
||||||
def update_exclude_patterns(app, config):
|
def update_exclude_patterns(app, config):
|
||||||
|
|
||||||
|
# Default to building all if option not set
|
||||||
|
if config.docs_to_build:
|
||||||
|
build_subset(app, config)
|
||||||
|
|
||||||
for tag, docs in config.conditional_include_dict.items():
|
for tag, docs in config.conditional_include_dict.items():
|
||||||
if not app.tags.has(tag):
|
if not app.tags.has(tag):
|
||||||
app.config.exclude_patterns.extend(docs)
|
app.config.exclude_patterns.extend(docs)
|
||||||
|
|
||||||
|
|
||||||
|
def build_subset(app, config):
|
||||||
|
# Convert to list of docs to build
|
||||||
|
docs_to_build = config.docs_to_build.split(',')
|
||||||
|
|
||||||
|
# Exclude all documents which were not set as docs_to_build when build_docs were called
|
||||||
|
exclude_docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(docs_to_build))]
|
||||||
|
docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))]
|
||||||
|
|
||||||
|
app.config.exclude_patterns.extend(exclude_docs)
|
||||||
|
|
||||||
|
# Get all docs that will be built
|
||||||
|
docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))]
|
||||||
|
if not docs:
|
||||||
|
raise ValueError("No documents to build")
|
||||||
|
print("Building a subset of the documents: {}".format(docs))
|
||||||
|
|
||||||
|
# Sphinx requires a master document, if there is a document name 'index' then we pick that
|
||||||
|
index_docs = [doc for doc in docs if 'index' in doc]
|
||||||
|
if index_docs:
|
||||||
|
config.master_doc = index_docs[0].replace('.rst', '')
|
||||||
|
else:
|
||||||
|
config.master_doc = docs[0].replace('.rst', '')
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
# Tags are generated together with defines
|
# Tags are generated together with defines
|
||||||
app.connect('config-inited', update_exclude_patterns)
|
app.connect('config-inited', update_exclude_patterns)
|
||||||
|
|
Loading…
Reference in a new issue