docs: Refactor extensions into packages, update the add-ons-reference docs page

Includes converting some of the remaining standalone scripts into Sphinx extensions.

Make flake8 clean
This commit is contained in:
Angus Gratton 2019-11-29 08:56:53 +11:00 committed by Angus Gratton
parent cbede3a3a4
commit a148d8e6ba
28 changed files with 360 additions and 434 deletions

View file

@ -302,7 +302,7 @@ EXPAND_ONLY_PREDEF = YES
PREDEFINED = \
$(ENV_DOXYGEN_DEFINES) \
__attribute__(x)= \
_Static_assert(x, y) = \
_Static_assert(x) = \
IDF_DEPRECATED(X)= \
IRAM_ATTR= \
configSUPPORT_DYNAMIC_ALLOCATION=1 \

View file

@ -17,6 +17,7 @@ import sys
LANGUAGES = ["en", "zh_CN"]
TARGETS = ["esp32", "esp32s2"]
def main():
# check Python dependencies for docs
try:
@ -43,13 +44,13 @@ def main():
print("Building all languages")
languages = LANGUAGES
else:
languages = [ args.language ]
languages = [args.language]
if args.target is None:
print("Building all targets")
targets = TARGETS
else:
targets = [ args.target ]
targets = [args.target]
for language in languages:
for target in targets:
@ -76,9 +77,9 @@ def build_docs(language, target, build_dir):
"-w", "sphinx-warning.log",
"-t", target,
"-D", "idf_target={}".format(target),
os.path.join(os.path.abspath(os.path.dirname(__file__)), language), # srcdir for this language
os.path.join(os.path.abspath(os.path.dirname(__file__)), language), # srcdir for this language
os.path.join(build_dir, "html") # build directory
]
]
cwd = build_dir # also run sphinx in the build directory
print("Running '{}'".format(" ".join(args)))
subprocess.check_call(args, cwd=cwd, env=environ)

View file

@ -18,65 +18,13 @@ from __future__ import print_function
from __future__ import unicode_literals
import sys
import os
import os.path
import re
import subprocess
# Note: If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute
from local_util import run_cmd_get_output, copy_if_modified, call_with_python
# build_docs on the CI server sometimes fails under Python3. This is a workaround:
sys.setrecursionlimit(3500)
try:
build_dir = os.environ['BUILDDIR']
except KeyError:
build_dir = '_build'
build_dir = os.path.abspath(build_dir)
# Fill in a default IDF_PATH if it's missing (ie when Read The Docs is building the docs)
try:
idf_path = os.environ['IDF_PATH']
except KeyError:
idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
docs_root = os.path.join(idf_path, "docs")
try:
os.mkdir(build_dir)
except OSError:
pass
try:
os.mkdir(os.path.join(build_dir, 'inc'))
except OSError:
pass
# Generate version-related includes
#
# (Note: this is in a function as it needs to access configuration to get the language)
def generate_version_specific_includes(app):
print("Generating version-specific includes...")
version_tmpdir = '{}/version_inc'.format(build_dir)
call_with_python('{}/gen-version-specific-includes.py {} {}'.format(docs_root, app.config.language, version_tmpdir))
copy_if_modified(version_tmpdir, '{}/inc'.format(build_dir))
# Generate toolchain download links
print("Generating toolchain download links")
base_url = 'https://dl.espressif.com/dl/'
toolchain_tmpdir = '{}/toolchain_inc'.format(build_dir)
call_with_python('{}/gen-toolchain-links.py ../../tools/toolchain_versions.mk {} {}'.format(docs_root, base_url, toolchain_tmpdir))
copy_if_modified(toolchain_tmpdir, '{}/inc'.format(build_dir))
print("Generating IDF Tools list")
os.environ["IDF_MAINTAINER"] = "1"
tools_rst = os.path.join(builddir, 'idf-tools-inc.rst')
tools_rst_tmp = os.path.join(builddir, 'inc', 'idf-tools-inc.rst')
call_with_python("{}/tools/idf_tools.py gen-doc --output {}".format(idf_path, tools_rst_tmp))
copy_if_modified(tools_rst_tmp, tools_rst)
# http://stackoverflow.com/questions/12772927/specifying-an-online-image-in-sphinx-restructuredtext-format
#
@ -84,7 +32,6 @@ suppress_warnings = ['image.nonlocal_uri']
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
@ -92,24 +39,33 @@ suppress_warnings = ['image.nonlocal_uri']
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['breathe',
'link-roles',
'sphinx.ext.todo',
'sphinxcontrib.blockdiag',
'sphinxcontrib.seqdiag',
'sphinxcontrib.actdiag',
'sphinxcontrib.nwdiag',
'sphinxcontrib.rackdiag',
'sphinxcontrib.packetdiag',
'html_redirects',
'idf_build_system',
'kconfig_reference',
'doxygen_idf',
'sphinx.ext.todo',
'include_build_file',
'toctree_filter',
'extensions.html_redirects',
'extensions.toctree_filter',
'idf_extensions.include_build_file',
'idf_extensions.link_roles',
'idf_extensions.build_system',
'idf_extensions.esp_err_definitions',
'idf_extensions.gen_toolchain_links',
'idf_extensions.gen_version_specific_includes',
'idf_extensions.kconfig_reference',
'idf_extensions.run_doxygen',
'idf_extensions.gen_idf_tools_links',
# from https://github.com/pfalcon/sphinx_selective_exclude
'sphinx_selective_exclude.eager_only',
#'sphinx_selective_exclude.search_auto_exclude',
#'sphinx_selective_exclude.modindex_exclude',
# TODO: determine if we need search_auto_exclude
# 'sphinx_selective_exclude.search_auto_exclude',
]
# sphinx.ext.todo extension parameters
@ -120,13 +76,6 @@ todo_include_todos = False
# Enabling this fixes cropping of blockdiag edge labels
seqdiag_antialias = True
# Breathe extension variables
# Doxygen regenerates files in 'xml/' directory every time,
# but we copy files to 'xml_in/' only when they change, to speed up
# incremental builds.
breathe_projects = {"esp32-idf": os.path.join(build_dir, "xml_in/")}
breathe_default_project = "esp32-idf"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@ -155,7 +104,7 @@ master_doc = 'index'
# This is supposed to be "the short X.Y version", but it's the only version
# visible when you open index.html.
# Display full version to make things less confusing.
version = run_cmd_get_output('git describe')
version = subprocess.check_output(['git', 'describe']).strip()
# The full version, including alpha/beta/rc tags.
# If needed, nearest tag is returned by 'git describe --abbrev=0'.
release = version
@ -171,7 +120,8 @@ print('Version: {0} Release: {1}'.format(version, release))
# directories to ignore when looking for source files.
exclude_patterns = ['**/inc/**']
# Add target-specific excludes based on tags. Haven't found any better way to do this yet
# Add target-specific excludes based on tags (for the IDF_TARGET). Haven't found any better way to do this yet
def update_exclude_patterns(tags):
if "esp32" not in tags:
# Exclude ESP32-only document pages so they aren't found in the initial search for .rst files
@ -186,6 +136,7 @@ def update_exclude_patterns(tags):
'gnu-make-legacy.rst']:
exclude_patterns.append(e)
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
@ -389,6 +340,9 @@ texinfo_documents = [
# Override RTD CSS theme to introduce the theme corrections
# https://github.com/rtfd/sphinx_rtd_theme/pull/432
def setup(app):
app.config.build_dir = build_dir
app.add_stylesheet('theme_overrides.css')
generate_version_specific_includes(app)
# Breathe extension variables (depend on build_dir)
# note: we generate into xml_in and then copy_if_modified to xml dir
app.config.breathe_projects = {"esp32-idf": os.path.join(app.config.build_dir, "xml_in/")}
app.config.breathe_default_project = "esp32-idf"

View file

@ -1,78 +0,0 @@
# Extension to generate Doxygen XML include files, with IDF config & soc macros included
import glob
import os.path
import re
import sys
import subprocess
gen_dxd = __import__("gen-dxd")
from local_util import copy_if_modified, call_with_python
def setup(app):
# The idf_build_system extension will emit this event once it
app.connect('idf-info', generate_doxygen)
return { 'parallel_read_safe' : True, 'parallel_write_safe': True, 'version': '0.1' }
def _parse_defines(header_path):
defines = {}
# Note: we run C preprocessor here without any -I arguments, so assumption is
# that these headers are all self-contained and don't include any other headers
# not in the same directory
print("Reading macros from %s..." % (header_path))
processed_output = subprocess.check_output(["xtensa-esp32-elf-gcc", "-dM", "-E", header_path]).decode()
for line in processed_output.split("\n"):
line = line.strip()
m = re.search("#define ([^ ]+) ?(.*)", line)
if m and not m.group(1).startswith("_"):
defines[m.group(1)] = m.group(2)
return defines
def generate_doxygen(app, project_description):
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
# Parse kconfig macros to pass into doxygen
#
# TODO: this should use the set of "config which can't be changed" eventually,
# not the header
defines = _parse_defines(os.path.join(project_description["build_dir"],
"config", "sdkconfig.h"))
# Add all SOC _caps.h headers to the defines
#
# kind of a hack, be nicer to add a component info dict in project_description.json
soc_path = [p for p in project_description["build_component_paths"] if p.endswith("/soc")][0]
for soc_header in glob.glob(os.path.join(soc_path, project_description["target"],
"include", "soc", "*_caps.h")):
defines.update(_parse_defines(soc_header))
# Call Doxygen to get XML files from the header files
print("Calling Doxygen to generate latest XML files")
doxy_env = {
"ENV_DOXYGEN_DEFINES": " ".join(defines),
"IDF_PATH": app.config.idf_path,
}
doxyfile = os.path.join(app.config.docs_root, "Doxyfile")
# note: run Doxygen in the build directory, so the xml & xml_in files end up in there
subprocess.check_call(["doxygen", doxyfile], env=doxy_env, cwd=build_dir)
# Doxygen has generated XML files in 'xml' directory.
# Copy them to 'xml_in', only touching the files which have changed.
copy_if_modified(os.path.join(build_dir, 'xml/'), os.path.join(build_dir, 'xml_in/'))
# Generate 'api_name.inc' files using the XML files by Doxygen
gen_dxd.builddir = build_dir
gen_dxd.doxyfile_path = doxyfile
gen_dxd.header_file_path_prefix = "components/"
gen_dxd.xml_directory_path = "{}/xml".format(build_dir)
gen_dxd.inc_directory_path = "{}/inc".format(build_dir)
gen_dxd.generate_api_inc_files()
# Generate 'esp_err_defs.inc' file with ESP_ERR_ error code definitions from inc file
esp_err_inc_path = '{}/inc/esp_err_defs.inc'.format(build_dir)
call_with_python('{}/tools/gen_esp_err_to_name.py --rst_output {}.in'.format(app.config.idf_path, esp_err_inc_path))
copy_if_modified(esp_err_inc_path + '.in', esp_err_inc_path)

View file

@ -122,6 +122,7 @@ External RAM use has the following restrictions:
* External RAM cannot be used as task stack memory. Due to this, :cpp:func:`xTaskCreate` and similar functions will always allocate internal memory for stack and task TCBs, and functions such as :cpp:func:`xTaskCreateStatic` will check if the buffers passed are internal.
* By default, failure to initialize external RAM will cause the ESP-IDF startup to abort. This can be disabled by enabling the config item :ref:`CONFIG_SPIRAM_IGNORE_NOTFOUND`. If :ref:`CONFIG_SPIRAM_ALLOW_BSS_SEG_EXTERNAL_MEMORY` is enabled, the option to ignore failure is not available as the linker will have assigned symbols to external memory addresses at link time.
.. only:: esp32
.. include:: inc/external-ram-esp32-notes.inc

View file

@ -138,4 +138,4 @@ Although the methods above are recommended for ESP-IDF users, they are not a mus
List of IDF Tools
-----------------
.. include:: /_build/inc/idf-tools-inc.rst
.. include-build-file:: idf-tools-inc.rst

View file

@ -10,7 +10,7 @@ import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from conf_common import * # noqa: F401, F403 - need to make available everything from common
from local_util import download_file_if_missing # noqa: E402 - need to import from common folder
from idf_extensions.util import download_file_if_missing # noqa: E402 - need to import from common folder
# General information about the project.
project = u'ESP-IDF Programming Guide'
@ -32,5 +32,4 @@ nwdiag_fontpath = '../_static/DejaVuSans.ttf'
rackdiag_fontpath = '../_static/DejaVuSans.ttf'
packetdiag_fontpath = '../_static/DejaVuSans.ttf'
update_exclude_patterns(tags)
update_exclude_patterns(tags) # noqa: F405, need to import * from conf_common

View file

@ -5,6 +5,8 @@ This documentation is created using `Sphinx <http://www.sphinx-doc.org/>`_ appli
Besides Sphinx there are several other applications that help to provide nicely formatted and easy to navigate documentation. These applications are listed in section :ref:`setup-for-building-documentation` with the installed version numbers provided in file :idf_file:`docs/requirements.txt`.
We build ESP-IDF documentation for two languages (English, Simplified Chinese) and for multiple chips. Therefore we don't run ``sphinx`` directly, there is a wrapper Python program ``build_docs.py`` that runs Sphinx.
On top of that we have created a couple of custom add-ons and extensions to help integrate documentation with underlining `ESP-IDF`_ repository and further improve navigation as well as maintenance of documentation.
The purpose of this section is to provide a quick reference to the add-ons and the extensions.
@ -17,48 +19,43 @@ Documentation Folder Structure
* The ``docs`` folder contains localized documentation in :idf:`docs/en` (English) and :idf:`docs/zh_CN` (simplified Chinese) subfolders.
* Graphics files and fonts common to localized documentation are contained in :idf:`docs/_static` subfolder
* Remaining files in the root of ``docs`` as well as ``docs/en`` and ``docs/zh_CN`` provide configuration and scripts used to automate documentation processing including the add-ons and extensions.
* Several folders and files are generated dynamically during documentations build and placed primarily in ``docs/[lang]/_build`` folders. These folders are temporary and not visible in `ESP-IDF`_ repository,
* Sphinx extensions are provided in two directories, ``extensions`` and ``idf_extensions``
* A ``_build`` directory is created in the ``docs`` folder by ``build_docs.py``. This directory is not added to the `ESP-IDF`_ repository.
Add-ons and Extensions Reference
--------------------------------
Config Files
^^^^^^^^^^^^
:idf_file:`docs/conf_common.py`
This file contains configuration common to each localized documentation (e.g. English, Chinese). The contents of this file is imported to standard Sphinx configuration file ``conf.py`` located in respective language folders (e.g. ``docs/en``, ``docs/zh_CN``) during build for each language.
:idf_file:`docs/check_doc_warnings.sh`
If there are any warnings reported during documentation build, then the build is failed. The warnings should be resolved before merging any documentation updates. This script is doing check for warnings in respective log file to fail the build. See also description of ``sphinx-known-warnings.txt`` below.
:idf_file:`docs/sphinx-known-warnings.txt`
There are couple of spurious Sphinx warnings that cannot be resolved without doing update to the Sphinx source code itself. For such specific cases respective warnings are documented in ``sphinx-known-warnings.txt`` file, that is checked during documentation build, to ignore the spurious warnings.
Scripts
^^^^^^^
:idf_file:`docs/build_docs.py`
Top-level executable program which runs a Sphinx build for one or more language/target combinations. Run ``build_docs.py --help`` for full command line options.
When ``build_docs.py`` runs Sphinx it sets the ``idf_target`` configuration variable, sets a Sphinx tag with the same name as the configuration variable, and uses some environment variables to communicate paths to :ref:`IDF-Specific Extensions`.
:idf_file:`docs/check_lang_folder_sync.sh`
To reduce potential discrepancies when maintaining concurrent language version, the structure and filenames of language folders ``docs/en`` and ``docs/zh_CN`` folders should be kept identical. The script ``check_lang_folder_sync.sh`` is run on each documentation build to verify if this condition is met.
.. note::
.. note::
If a new content is provided in e.g. English, and there is no any translation yet, then the corresponding file in ``zh_CN`` folder should contain an ``.. include::`` directive pointing to the source file in English. This will automatically include the English version visible to Chinese readers. For example if a file ``docs/zh_CN/contribute/documenting-code.rst`` does not have a Chinese translation, then it should contain ``.. include:: ../../en/contribute/documenting-code.rst`` instead.
:idf_file:`docs/docs_common.mk`
It contains the common code which is included into the language-specific ``Makefiles``. Note that this file contains couple of customizations comparing to what is provided within standard Sphinx installation, e.g. ``gh-linkcheck`` command has been added.
Non-Docs Scripts
^^^^^^^^^^^^^^^^
:idf_file:`docs/gen-dxd.py`
A Python script that generates API reference files based on Doxygen ``xml`` output. The files have an ``inc`` extension and are located in ``docs/[lang]/_build/inc`` directory created dynamically when documentation is build. Please refer to :doc:`documenting-code` and :doc:`../api-reference/template`, section **API Reference** for additional details on this process.
:idf_file:`docs/gen-toolchain-links.py`
There couple of places in documentation that provide links to download the toolchain. To provide one source of this information and reduce effort to manually update several files, this script generates toolchain download links and toolchain unpacking code snippets based on information found in :idf_file:`tools/toolchain_versions.mk`.
:idf_file:`docs/gen-version-specific-includes.py`
Another Python script to automatically generate reStructuredText Text ``.inc`` snippets with version-based content for this ESP-IDF version.
:idf_file:`docs/html_redirects.py`
During documentation lifetime some source files are moved between folders or renamed. This Python script is adding a mechanism to redirect documentation pages that have changed URL by generating in the Sphinx output static HTML redirect pages. The script is used together with a redirection list ``html_redirect_pages`` defined in file :idf_file:`docs/conf_common.py`.
:idf_file:`docs/link-roles.py`
This is an implementation of a custom `Sphinx Roles <https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html>`_ to help linking from documentation to specific files and folders in `ESP-IDF`_. For description of implemented roles please see :ref:`link-custom-roles` and :ref:`link-language-versions`.
:idf_file:`docs/local_util.py`
A collection of utility functions useful primarily when building documentation locally (see :ref:`setup-for-building-documentation`) to reduce the time to generate documentation on a second and subsequent builds. The utility functions check what Doxygen ``xml`` input files have been changed and copy these files to destination folders, so only the changed files are used during build process.
:idf_file:`docs/sphinx-known-warnings.txt`
There are couple of spurious Sphinx warnings that cannot be resolved without doing update to the Sphinx source code itself. For such specific cases respective warnings are documented in ``sphinx-known-warnings.txt`` file, that is checked during documentation build, to ignore the spurious warnings.
These scripts are used to build docs but also used for other purposes:
:idf_file:`tools/gen_esp_err_to_name.py`
This script is traversing the `ESP-IDF`_ directory structure looking for error codes and messages in source code header files to generate an ``.inc`` file to include in documentation under :doc:`../api-reference/error-codes`.
@ -66,7 +63,89 @@ Add-ons and Extensions Reference
:idf_file:`tools/kconfig_new/confgen.py`
Options to configure ESP-IDF's :idf:`components` are contained in ``Kconfig`` files located inside directories of individual components, e.g. :idf_file:`components/bt/Kconfig`. This script is traversing the ``component`` directories to collect configuration options and generate an ``.inc`` file to include in documentation under :ref:`configuration-options-reference`.
Generic Extensions
^^^^^^^^^^^^^^^^^^
These are Sphinx extensions developed for IDF that don't rely on any IDF-docs-specific behaviour or configuration:
:idf_file:`docs/extensions/toctree_filter.py`
Sphinx extensions overrides the ``:toctree:`` directive to allow filtering entries based on whether a tag is set, as ``:tagname: toctree_entry``. See the Python file for a more complete description.
:idf_file:`docs/extensions/html_redirects.py`
During documentation lifetime some source files are moved between folders or renamed. This Sphinx extension adds a mechanism to redirect documentation pages that have changed URL by generating in the Sphinx output static HTML redirect pages. The script is used together with a redirection list ``html_redirect_pages``. ``conf_common.py`` builds this list from :idf_file:`docs/page_redirects.txt`
Third Party Extensions
^^^^^^^^^^^^^^^^^^^^^^
- ``sphinxcontrib`` extensions for blockdiag, seqdiag, actdiag, nwdiag, rackdiag & packetdiag diagrams.
- `Sphinx selective exclude`_ ``eager_only`` extension
.. _idf-specific extensions:
IDF-Specific Extensions
^^^^^^^^^^^^^^^^^^^^^^^
Build System Integration
########################
:idf:`docs/idf_extensions/build_system/`
Python package implementing a Sphinx extension to pull IDF build system information into the docs build
* Creates a dummy CMake IDF project and runs CMake to generate metadata
* Registers some new configuration variables and emits a new Sphinx event, both for use by other extensions.
Configuration Variables
@@@@@@@@@@@@@@@@@@@@@@@
* ``docs_root`` - The absolute path of the $IDF_PATH/docs directory
* ``idf_path`` - The value of IDF_PATH variable, or the absolute path of IDF_PATH if environment unset
* ``build_dir`` - The build directory passed in by ``build_docs.py``, default will be like ``_build/<lang>/<target>``
* ``idf_target`` - The IDF_TARGET value. Expected that ``build_docs.py`` set this on the Sphinx command line
New Event
@@@@@@@@@
``idf-info`` event is emitted early in the build, after the dummy project CMake run is complete.
Arguments are ``(app, project_description)`` where ``project_description`` is a dict containing the values parsed from ``project_description.json`` in the CMake build directory.
Other IDF-specific extensions subscribe to this event and use it to set up some docs parameters based on build system info.
Other Extensions
################
:idf_file:`docs/idf_extensions/include_build_file.py`
The ``include-build-file`` directive is like the built-in ``include-file`` directive, but file path is evaluated relative to ``build_dir``.
:idf_file:`docs/idf_extensions/kconfig_reference.py`
Subscribes to ``idf-info`` event and uses confgen to generate ``kconfig.inc`` from the components included in the default project build. This file is then included into :doc:`/api-reference/kconfig`.
:idf_file:`docs/idf_extensions/link_roles.py`
This is an implementation of a custom `Sphinx Roles <https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html>`_ to help linking from documentation to specific files and folders in `ESP-IDF`_. For description of implemented roles please see :ref:`link-custom-roles` and :ref:`link-language-versions`.
:idf_file:`docs/idf_extensions/run_doxygen.py`
Subscribes to ``idf-info`` event and runs Doxygen (:idf_file:`docs/Doxyfile`) to generate XML files describing key headers, and then runs Breathe to convert these to ``.inc`` files which can be included directly into API reference pages.
Pushes a number of target-specific custom environment variables into Doxygen, including all macros defined in the project's default ``sdkconfig.h`` file and all macros defined in all ``soc`` component ``xxx_caps.h`` headers. This means that public API headers can depend on target-specific configuration options or ``soc`` capabilities headers options as ``#ifdef`` & ``#if`` preprocessor selections in the header.
This means we can generate different Doxygen files, depending on the target we are building docs for.
Please refer to :doc:`documenting-code` and :doc:`../api-reference/template`, section **API Reference** for additional details on this process.
:idf_file:`docs/idf_extensions/esp_err_definitions.py`
Small wrapper extension that calls ``gen_esp_err_to_name.py`` and updates the included .rst file if it has changed.
:idf_file:`docs/idf_extensions/gen_toolchain_links.py`
There couple of places in documentation that provide links to download the toolchain. To provide one source of this information and reduce effort to manually update several files, this script generates toolchain download links and toolchain unpacking code snippets based on information found in :idf_file:`tools/toolchain_versions.mk`.
:idf_file:`docs/idf_extensions/gen_version_specific_includes.py`
Another extension to automatically generate reStructuredText Text ``.inc`` snippets with version-based content for this ESP-IDF version.
:idf_file:`docs/idf_extensions/util.py`
A collection of utility functions useful primarily when building documentation locally (see :ref:`setup-for-building-documentation`) to reduce the time to generate documentation on a second and subsequent builds.
Related Documents
-----------------
@ -75,3 +154,4 @@ Related Documents
.. _ESP-IDF: https://github.com/espressif/esp-idf/
.. _Sphinx selective exclude: https://github.com/pfalcon/sphinx_selective_exclude

View file

@ -0,0 +1 @@
See docs/en/contribute/add-ons-reference.rst (or in the IDF docs) for details.

View file

View file

@ -17,7 +17,7 @@
# Mechanism to generate static HTML redirect pages in the output
#
# Uses redirect_template.html and the list of pages given in
# conf.html_redirect_pages
# the file conf.html_redirect_pages
#
# Adapted from ideas in https://tech.signavio.com/2017/managing-sphinx-redirects
import os.path
@ -45,7 +45,7 @@ def setup(app):
# to create HTML redirects
app.connect('html-collect-pages', create_redirect_pages)
return { 'parallel_read_safe' : True, 'parallel_write_safe': True, 'version': '0.1' }
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
def create_redirect_pages(app):

View file

@ -1,13 +1,13 @@
# Based on https://stackoverflow.com/a/46600038 with some modifications
import re
from sphinx.directives.other import TocTree
from sphinx.util.nodes import explicit_title_re
from sphinx.util import docname_join
def setup(app):
app.add_directive('toctree', TocTreeFilt, override=True)
return { 'parallel_read_safe' : True, 'parallel_write_safe': True, 'version': '0.1' }
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
class TocTreeFilt(TocTree):
"""
@ -24,16 +24,14 @@ class TocTreeFilt(TocTree):
when it scan the src/ directory, so it's also necessary to make sure that the files
are covered by the exclude_patterns list in conf.py
"""
RE_PATTERN = re.compile('^\s*:(.+):\s*(.+)$')
RE_PATTERN = re.compile(r'^\s*:(.+):\s*(.+)$')
def run(self):
# Remove all TOC entries that should not be on display
env = self.state.document.settings.env
self.content = [ self.filter_entry(env, e) for e in self.content if e is not None ]
self.content = [self.filter_entry(env, e) for e in self.content if e is not None]
return super(TocTreeFilt, self).run()
def filter_entry(self, env, entry):
m = self.RE_PATTERN.match(entry)
if m is not None:
@ -41,4 +39,3 @@ class TocTreeFilt(TocTree):
if not env.app.builder.tags.eval_condition(tag_filter):
return None
return entry

View file

@ -0,0 +1,2 @@
See docs/en/contribute/add-ons-reference.rst (or in the IDF docs) for details.

View file

View file

@ -14,19 +14,43 @@ import json
# this directory also contains the dummy IDF project
project_path = os.path.abspath(os.path.dirname(__file__))
def setup(app):
builddir = os.path.dirname(app.doctreedir.rstrip(os.sep))
app.add_config_value('docs_root', "", 'env')
app.add_config_value('idf_path', os.environ.get("IDF_PATH", ""), 'env')
# Setup some common paths
try:
build_dir = os.environ["BUILDDIR"] # TODO see if we can remove this
except KeyError:
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
try:
os.mkdir(build_dir)
except OSError:
pass
try:
os.mkdir(os.path.join(build_dir, 'inc'))
except OSError:
pass
# Fill in a default IDF_PATH if it's missing (ie when Read The Docs is building the docs)
try:
idf_path = os.environ['IDF_PATH']
except KeyError:
idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
app.add_config_value('docs_root', os.path.join(idf_path, "docs"), 'env')
app.add_config_value('idf_path', idf_path, 'env')
app.add_config_value('idf_target', 'esp32', 'env')
app.add_config_value('build_dir', os.environ.get("BUILDDIR", ""), 'env') # not actually an IDF thing
app.add_config_value('build_dir', build_dir, 'env') # not actually an IDF thing
app.add_event('idf-info')
# Attaching the generate event to env-get-outdated is a bit of a hack,
# we want this to run early in the docs build but unclear exactly when
app.connect('env-get-outdated', generate_idf_info)
return { 'parallel_read_safe' : True, 'parallel_write_safe': True, 'version': '0.1' }
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
def generate_idf_info(app, env, added, changed, removed):
print("Running CMake on dummy project to get build info...")
@ -43,17 +67,20 @@ def generate_idf_info(app, env, added, changed, removed):
if not os.path.exists(os.path.join(cmake_build_dir, "CMakeCache.txt")):
# if build directory not created yet, run a reconfigure pass over it
print("Starting new dummy IDF project...")
subprocess.check_call(idf_py + [ "set-target", app.config.idf_target])
subprocess.check_call(idf_py + ["set-target", app.config.idf_target])
else:
print("Re-running CMake on the existing IDF project in {}".format(cmake_build_dir))
subprocess.check_call(idf_py + [ "reconfigure"])
subprocess.check_call(idf_py + ["reconfigure"])
with open(os.path.join(cmake_build_dir, "project_description.json")) as f:
project_description = json.load(f)
if project_description["target"] != app.config.idf_target:
# this shouldn't really happen unless someone has been moving around directories inside _build, as
# the cmake_build_dir path should be target-specific
raise RuntimeError("Error configuring the dummy IDF project for {}. Target in project description is {}. Is _build directory contents corrupt?".format(app.config.idf_target, project_description["target"]))
raise RuntimeError(("Error configuring the dummy IDF project for {}. " +
"Target in project description is {}. " +
"Is build directory contents corrupt?")
.format(app.config.idf_target, project_description["target"]))
app.emit('idf-info', project_description)
return []

View file

@ -0,0 +1,14 @@
# Extension to generate esp_err definition as .rst
from util import copy_if_modified, call_with_python
def setup(app):
app.connect('idf-info', generate_err_defs)
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
def generate_err_defs(app, project_description):
# Generate 'esp_err_defs.inc' file with ESP_ERR_ error code definitions from inc file
esp_err_inc_path = '{}/inc/esp_err_defs.inc'.format(app.config.build_dir)
call_with_python('{}/tools/gen_esp_err_to_name.py --rst_output {}.in'.format(app.config.idf_path, esp_err_inc_path))
copy_if_modified(esp_err_inc_path + '.in', esp_err_inc_path)

View file

@ -0,0 +1,19 @@
# Generate toolchain download links from toolchain info makefile
from __future__ import print_function
import os.path
from util import copy_if_modified, call_with_python
def setup(app):
# we don't actually need idf-info, just a convenient event to trigger this on
app.connect('idf-info', generate_idf_tools_links)
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
def generate_idf_tools_links(app, project_description):
print("Generating IDF Tools list")
os.environ["IDF_MAINTAINER"] = "1"
tools_rst = os.path.join(app.config.build_dir, 'inc', 'idf-tools-inc.rst')
tools_rst_tmp = os.path.join(app.config.build_dir, 'idf-tools-inc.rst')
call_with_python("{}/tools/idf_tools.py gen-doc --output {}".format(app.config.idf_path, tools_rst_tmp))
copy_if_modified(tools_rst_tmp, tools_rst)

View file

@ -1,15 +1,10 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This script generates toolchain download links and toolchain unpacking
# code snippets based on information found in $IDF_PATH/tools/toolchain_versions.mk
#
# Generate toolchain download links from toolchain info makefile
from __future__ import print_function
import sys
import os
import os.path
from collections import namedtuple
from util import copy_if_modified
BASE_URL = 'https://dl.espressif.com/dl/'
PlatformInfo = namedtuple("PlatformInfo", [
"platform_name",
@ -20,19 +15,22 @@ PlatformInfo = namedtuple("PlatformInfo", [
])
def main():
if len(sys.argv) != 4:
print("Usage: gen-toolchain-links.py <versions file> <base download URL> <output directory>")
sys.exit(1)
def setup(app):
# we don't actually need idf-info, just a convenient event to trigger this on
app.connect('idf-info', generate_toolchain_download_links)
out_dir = sys.argv[3]
if not os.path.exists(out_dir):
print("Creating directory %s" % out_dir)
os.mkdir(out_dir)
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
base_url = sys.argv[2]
versions_file = sys.argv[1]
def generate_toolchain_download_links(app, project_description):
print("Generating toolchain download links")
toolchain_tmpdir = '{}/toolchain_inc'.format(app.config.build_dir)
toolchain_versions = os.path.join(app.config.idf_path, "tools/toolchain_versions.mk")
gen_toolchain_links(toolchain_versions, toolchain_tmpdir)
copy_if_modified(toolchain_tmpdir, '{}/inc'.format(app.config.build_dir))
def gen_toolchain_links(versions_file, out_dir):
version_vars = {}
with open(versions_file) as f:
for line in f:
@ -67,13 +65,18 @@ def main():
PlatformInfo("win32", "win32", "zip", None, None)
]
try:
os.mkdir(out_dir)
except OSError:
pass
with open(os.path.join(out_dir, 'download-links.inc'), "w") as links_file:
for p in platform_info:
archive_name = 'xtensa-esp32-elf-gcc{}-{}-{}.{}'.format(
gcc_version.replace('.', '_'), toolchain_desc, p.platform_archive_suffix, p.extension)
print('.. |download_link_{}| replace:: {}{}'.format(
p.platform_name, base_url, archive_name), file=links_file)
p.platform_name, BASE_URL, archive_name), file=links_file)
if p.unpack_code is not None:
with open(os.path.join(out_dir, 'unpack-code-%s.inc' % p.platform_name), "w") as f:
@ -81,7 +84,3 @@ def main():
with open(os.path.join(out_dir, 'scratch-build-code.inc'), "w") as code_file:
print(scratch_build_code_linux_macos.format(toolchain_desc), file=code_file)
if __name__ == "__main__":
main()

View file

@ -1,15 +1,15 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Python script to generate ReSTructured Text .inc snippets
# Sphinx extension to generate ReSTructured Text .inc snippets
# with version-based content for this IDF version
from __future__ import print_function
from __future__ import unicode_literals
from io import open
from util import copy_if_modified
import subprocess
import os
import sys
import re
TEMPLATES = {
@ -120,23 +120,26 @@ TEMPLATES = {
}
def main():
if len(sys.argv) != 3:
print("Usage: gen-git-clone.py <language> <output file path>")
sys.exit(1)
def setup(app):
# doesn't need to be this event specifically, but this is roughly the right time
app.connect('idf-info', generate_version_specific_includes)
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
language = sys.argv[1]
out_dir = sys.argv[2]
if not os.path.exists(out_dir):
print("Creating directory %s" % out_dir)
os.mkdir(out_dir)
def generate_version_specific_includes(app, project_description):
language = app.config.language
tmp_out_dir = os.path.join(app.config.build_dir, "version_inc")
if not os.path.exists(tmp_out_dir):
print("Creating directory %s" % tmp_out_dir)
os.mkdir(tmp_out_dir)
template = TEMPLATES[language]
version, ver_type, is_stable = get_version()
write_git_clone_inc_files(template, out_dir, version, ver_type, is_stable)
write_version_note(template["version-note"], out_dir, version, ver_type, is_stable)
write_git_clone_inc_files(template, tmp_out_dir, version, ver_type, is_stable)
write_version_note(template["version-note"], tmp_out_dir, version, ver_type, is_stable)
copy_if_modified(tmp_out_dir, os.path.join(app.config.build_dir, "inc"))
print("Done")
@ -217,7 +220,3 @@ def get_version():
return ("master", "branch", False)
else:
return (branches[0], "branch", False) # take whatever the first branch is
if __name__ == "__main__":
main()

View file

@ -1,8 +1,9 @@
import os.path
from docutils.parsers.rst import Directive, directives
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives.misc import Include as BaseInclude
from sphinx.util.docutils import SphinxDirective
class IncludeBuildFile(BaseInclude, SphinxDirective):
"""
Like the standard "Include" directive, but relative to the app
@ -14,7 +15,8 @@ class IncludeBuildFile(BaseInclude, SphinxDirective):
self.env.note_included(abspath)
return super(IncludeBuildFile, self).run()
def setup(app):
directives.register_directive('include-build-file', IncludeBuildFile)
return { 'parallel_read_safe' : True, 'parallel_write_safe': True, 'version': '0.1' }
def setup(app):
directives.register_directive('include-build-file', IncludeBuildFile)
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}

View file

@ -3,14 +3,16 @@ import os.path
import sys
import subprocess
from local_util import copy_if_modified
from util import copy_if_modified
def setup(app):
# The idf_build_system extension will emit this event once it
# has parsed the IDF project's information
app.connect('idf-info', generate_reference)
return { 'parallel_read_safe' : True, 'parallel_write_safe': True, 'version': '0.1' }
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
def generate_reference(app, project_description):
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
@ -40,7 +42,7 @@ def generate_reference(app, project_description):
"--env", "COMPONENT_KCONFIGS_PROJBUILD={}".format(" ".join(kconfig_projbuilds)),
"--env", "COMPONENT_KCONFIGS_SOURCE_FILE={}".format(kconfigs_source_path),
"--env", "COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}".format(kconfig_projbuilds_source_path),
]
]
subprocess.check_call(prepare_kconfig_files_args)
confgen_args = [sys.executable,
@ -56,8 +58,6 @@ def generate_reference(app, project_description):
"--env", "IDF_PATH={}".format(app.config.idf_path),
"--env", "IDF_TARGET={}".format(app.config.idf_target),
"--output", "docs", kconfig_inc_path + '.in'
]
]
subprocess.check_call(confgen_args, cwd=app.config.idf_path)
copy_if_modified(kconfig_inc_path + '.in', kconfig_inc_path)

View file

@ -4,17 +4,20 @@ from __future__ import print_function
from __future__ import unicode_literals
import re
import os
import subprocess
from docutils import nodes
from local_util import run_cmd_get_output
def get_github_rev():
path = run_cmd_get_output('git rev-parse --short HEAD')
tag = run_cmd_get_output('git describe --exact-match')
path = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
try:
tag = subprocess.check_output(['git', 'describe', '--exact-match']).strip()
except subprocess.CalledProcessError:
tag = None
print('Git commit ID: ', path)
if len(tag):
if tag:
print('Git tag: ', tag)
path = tag
return tag
return path
@ -38,15 +41,15 @@ def setup(app):
if on_rtd:
# provide RTD specific commit identification to be included in the link
tag_rev = 'latest'
if (run_cmd_get_output('git rev-parse --short HEAD') != rev):
if (subprocess.check_output(['git','rev-parse', '--short', 'HEAD']).strip() != rev):
tag_rev = rev
else:
# if not on the RTD then provide generic identification
tag_rev = run_cmd_get_output('git describe --always')
tag_rev = subprocess.check_output(['git', 'describe', '--always']).strip()
app.add_role('link_to_translation', crosslink('%s../../%s/{}/%s.html'.format(tag_rev)))
return { 'parallel_read_safe' : True, 'parallel_write_safe': True, 'version': '0.1' }
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
def autolink(pattern):

227
docs/gen-dxd.py → docs/idf_extensions/run_doxygen.py Executable file → Normal file
View file

@ -1,40 +1,15 @@
#!/usr/bin/env python
#
# gen-dxd.py - Generate Doxygen Directives
#
# This code is in the Public Domain (or CC0 licensed, at your option.)
# Unless required by applicable law or agreed to in writing, this
# software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied.
#
# Extension to generate Doxygen XML include files, with IDF config & soc macros included
from __future__ import print_function
from __future__ import unicode_literals
from builtins import range
from io import open
import sys
import glob
import os
import os.path
import re
import subprocess
from util import copy_if_modified
# Determime build directory
builddir = '_build'
if 'BUILDDIR' in os.environ:
builddir = os.environ['BUILDDIR']
# Script configuration
header_file_path_prefix = "../components/"
"""string: path prefix for header files.
"""
doxyfile_path = "./Doxyfile"
"""string: path to a file containing header files to processs.
"""
xml_directory_path = os.path.join(builddir, "xml")
"""string: path to directory with XML files by Doxygen.
"""
inc_directory_path = os.path.join(builddir, 'inc')
"""string: path prefix for header files.
"""
all_kinds = [
ALL_KINDS = [
("function", "Functions"),
("union", "Unions"),
("struct", "Structures"),
@ -124,7 +99,7 @@ def convert_api_xml_to_inc(app, doxyfile):
if not os.path.exists(inc_directory_path):
os.makedirs(inc_directory_path)
header_paths = get_doxyfile_input_paths(app, doxyfile)
header_paths = get_doxyfile_input_paths(doxyfile)
print("Generating 'api_name.inc' files with Doxygen directives")
for header_file_path in header_paths:
api_name = get_api_name(header_file_path)
@ -141,7 +116,7 @@ def convert_api_xml_to_inc(app, doxyfile):
inc_file.write(rst_output)
def get_doxyfile_input_paths(app, doxyfile_path):
def get_doxyfile_input_paths(doxyfile_path):
"""Get contents of Doxyfile's INPUT statement.
Returns:
@ -149,37 +124,35 @@ def get_doxyfile_input_paths(app, doxyfile_path):
"""
if not os.path.isfile(doxyfile_path):
print("Doxyfile '%s' does not exist!" % doxyfile_path)
sys.exit()
raise RuntimeError("Doxyfile '{}' does not exist!".format(doxyfile_path))
print("Getting Doxyfile's INPUT")
input_file = open(doxyfile_path, "r", encoding='utf-8')
line = input_file.readline()
# read contents of Doxyfile until 'INPUT' statement
while line:
if line.find("INPUT") == 0:
break
with open(doxyfile_path, "r", encoding='utf-8') as input_file:
line = input_file.readline()
# read contents of Doxyfile until 'INPUT' statement
while line:
if line.find("INPUT") == 0:
break
line = input_file.readline()
doxyfile_INPUT = ""
line = input_file.readline()
# skip input_file contents until end of 'INPUT' statement
while line:
if line.isspace():
# we have reached the end of 'INPUT' statement
break
# process only lines that are not comments
if line.find("#") == -1:
# extract header file path inside components folder
m = re.search(header_file_path_prefix + "(.*\.h)", line) # noqa: W605 - regular expression
header_file_path = m.group(1)
doxyfile_INPUT += header_file_path + "\n"
# proceed reading next line
doxyfile_INPUT = []
line = input_file.readline()
# skip input_file contents until end of 'INPUT' statement
while line:
if line.isspace():
# we have reached the end of 'INPUT' statement
break
# process only lines that are not comments
if line.find("#") == -1:
# extract header file path inside components folder
m = re.search("components/(.*\.h)", line) # noqa: W605 - regular expression
header_file_path = m.group(1)
doxyfile_INPUT.append(header_file_path)
# proceed reading next line
line = input_file.readline()
input_file.close()
return doxyfile_INPUT
@ -202,6 +175,42 @@ def get_api_name(header_file_path):
return api_name
def generate_directives(header_file_path, xml_directory_path):
"""Generate API reference with Doxygen directives for a header file.
Args:
header_file_path: a path to the header file with API.
Returns:
Doxygen directives for the header file.
"""
api_name = get_api_name(header_file_path)
# in XLT file name each "_" in the api name is expanded by Doxygen to "__"
xlt_api_name = api_name.replace("_", "__")
xml_file_path = "%s/%s_8h.xml" % (xml_directory_path, xlt_api_name)
rst_output = ""
rst_output = ".. File automatically generated by 'gen-dxd.py'\n"
rst_output += "\n"
rst_output += get_rst_header("Header File")
rst_output += "* :component_file:`" + header_file_path + "`\n"
rst_output += "\n"
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
tree = ET.ElementTree(file=xml_file_path)
for kind, label in ALL_KINDS:
rst_output += get_directives(tree, kind)
return rst_output
def get_rst_header(header_name):
"""Get rst formatted code with a header.
@ -305,109 +314,7 @@ def get_directives(tree, kind):
rst_output += ".. doxygen%s:: " % kind
rst_output += name.text + "\n"
if rst_output:
all_kinds_dict = dict(all_kinds)
all_kinds_dict = dict(ALL_KINDS)
rst_output = get_rst_header(all_kinds_dict[kind]) + rst_output + "\n"
return rst_output
def generate_directives(header_file_path):
"""Generate API reference with Doxygen directives for a header file.
Args:
header_file_path: a path to the header file with API.
Returns:
Doxygen directives for the header file.
"""
api_name = get_api_name(header_file_path)
# in XLT file name each "_" in the api name is expanded by Doxygen to "__"
xlt_api_name = api_name.replace("_", "__")
xml_file_path = "%s/%s_8h.xml" % (xml_directory_path, xlt_api_name)
rst_output = ""
rst_output = ".. File automatically generated by 'gen-dxd.py'\n"
rst_output += "\n"
rst_output += get_rst_header("Header File")
rst_output += "* :component_file:`" + header_file_path + "`\n"
rst_output += "\n"
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
tree = ET.ElementTree(file=xml_file_path)
for i in range(len(all_kinds)):
kind = all_kinds[i][0]
rst_output += get_directives(tree, kind)
return rst_output
def generate_api_inc_files():
"""Generate header_file.inc files
with API reference made of doxygen directives
for each header file
specified in the 'INPUT' statement of Doxyfile.
"""
if not os.path.isdir(xml_directory_path):
print("Directory %s does not exist!" % xml_directory_path)
sys.exit()
if not os.path.exists(inc_directory_path):
os.makedirs(inc_directory_path)
list_to_generate = get_doxyfile_input()
print("Generating 'api_name.inc' files with Doxygen directives")
for header_file_path in list_to_generate.splitlines():
api_name = get_api_name(header_file_path)
inc_file_path = inc_directory_path + "/" + api_name + ".inc"
rst_output = generate_directives(header_file_path)
previous_rst_output = ''
if os.path.isfile(inc_file_path):
with open(inc_file_path, "r", encoding='utf-8') as inc_file_old:
previous_rst_output = inc_file_old.read()
if previous_rst_output != rst_output:
with open(inc_file_path, "w", encoding='utf-8') as inc_file:
inc_file.write(rst_output)
if __name__ == "__main__":
"""The main script that generates
Doxygen directives.
"""
# Process command line arguments, if any
if len(sys.argv) > 1:
if not os.path.isdir(xml_directory_path):
print("Directory %s does not exist!" % xml_directory_path)
sys.exit()
header_file_path = sys.argv[1]
api_name = get_api_name(header_file_path)
if api_name:
rst_output = generate_directives(header_file_path)
print("Doxygen directives for '%s'" % header_file_path)
print()
print(rst_output)
else:
print("Options to execute 'gen-dxd.py' application:")
print("1: $ python gen-dxd.py")
print(" Generate API 'header_file.inc' files for headers defined in '%s'" % doxyfile_path)
print("2: $ python gen-dxd.py header_file_path")
print(" Print out Doxygen directives for a single header file")
print(" example: $ python gen-dxd.py mdns/include/mdns.h")
print(" NOTE: Run Doxygen first to get XML files for the header file")
sys.exit()
# No command line arguments given
generate_api_inc_files()

View file

@ -29,10 +29,6 @@ except ImportError:
_urlretrieve = urllib.urlretrieve
def run_cmd_get_output(cmd):
return os.popen(cmd).read().strip()
def files_equal(path_1, path_2):
if not os.path.exists(path_1) or not os.path.exists(path_2):
return False
@ -77,6 +73,7 @@ def download_file_if_missing(from_url, to_path):
with open(tmp_file, 'rb') as tmp:
fobj.write(tmp.read())
def call_with_python(cmd):
# using sys.executable ensures that the scripts are called with the same Python interpreter
if os.system('{} {}'.format(sys.executable, cmd)) != 0:

View file

@ -12,4 +12,4 @@ sphinxcontrib-nwdiag>=0.9.5, <2.0.0
nwdiag==1.0.4
recommonmark
future>=0.16.0 # for ../tools/gen_esp_err_to_name.py
sphinx_selective_exclude==1.0.1
sphinx_selective_exclude>=1.0.3

View file

@ -10,7 +10,7 @@ import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from conf_common import * # noqa: F401, F403 - need to make available everything from common
from local_util import download_file_if_missing # noqa: E402 - need to import from common folder
from idf_extensions.util import download_file_if_missing # noqa: E402 - need to import from common folder
# General information about the project.
project = u'ESP-IDF 编程指南'
@ -32,5 +32,4 @@ nwdiag_fontpath = '../_static/NotoSansSC-Regular.otf'
rackdiag_fontpath = '../_static/NotoSansSC-Regular.otf'
packetdiag_fontpath = '../_static/NotoSansSC-Regular.otf'
update_exclude_patterns(tags)
update_exclude_patterns(tags) # noqa: F405, need to import * from conf_common

View file

@ -16,6 +16,7 @@
import argparse
import os
import re
import sys
try:
@ -52,6 +53,8 @@ if __name__ == "__main__":
# adjustments for options which we use.
if line.startswith('file://'):
line = os.path.basename(line)
if line.startswith('-e') and '#egg=' in line: # version control URLs, take the egg= part at the end only
line = re.search(r'#egg=([^\s]+)', line).group(1)
try:
pkg_resources.require(line)
except Exception: