From 1c798393e239be4bd624abd9f9949d2f6ab35c48 Mon Sep 17 00:00:00 2001 From: Sergei Silnov Date: Thu, 3 Oct 2019 18:26:44 +0200 Subject: [PATCH] Add idf.py extensions and move core actions to separate files --- tools/idf.py | 1105 ++--------------- tools/idf_py_actions/README.md | 41 + tools/idf_py_actions/__init__.py | 0 tools/idf_py_actions/contstants.py | 32 + tools/idf_py_actions/errors.py | 6 + tools/idf_py_actions/global_options.py | 6 + tools/idf_py_actions/idf_01_core_actions.py | 342 +++++ tools/idf_py_actions/idf_02_serial_actions.py | 209 ++++ tools/idf_py_actions/tools.py | 221 ++++ .../test_idf_py/extra_path/idf_some_module.py | 21 + .../idf_test_extension/__init__.py | 1 + .../idf_test_extension/test_extension.py | 24 + tools/test_idf_py/test_idf_py.py | 53 +- 13 files changed, 1077 insertions(+), 984 deletions(-) create mode 100644 tools/idf_py_actions/README.md create mode 100644 tools/idf_py_actions/__init__.py create mode 100644 tools/idf_py_actions/contstants.py create mode 100644 tools/idf_py_actions/errors.py create mode 100644 tools/idf_py_actions/global_options.py create mode 100644 tools/idf_py_actions/idf_01_core_actions.py create mode 100644 tools/idf_py_actions/idf_02_serial_actions.py create mode 100644 tools/idf_py_actions/tools.py create mode 100644 tools/test_idf_py/extra_path/idf_some_module.py create mode 100644 tools/test_idf_py/test_idf_extensions/idf_test_extension/__init__.py create mode 100644 tools/test_idf_py/test_idf_extensions/idf_test_extension/test_extension.py diff --git a/tools/idf.py b/tools/idf.py index 628126050..60c002331 100755 --- a/tools/idf.py +++ b/tools/idf.py @@ -29,24 +29,16 @@ import codecs import json import locale -import multiprocessing import os import os.path -import platform -import re -import shutil import subprocess import sys from collections import Counter, OrderedDict +from importlib import import_module +from pkgutil import iter_modules - -class FatalError(RuntimeError): - """ - Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s - """ - - pass - +from idf_py_actions.errors import FatalError +from idf_py_actions.tools import (executable_exists, idf_version, merge_action_lists, realpath) # Use this Python interpreter for any subprocesses we launch PYTHON = sys.executable @@ -59,62 +51,6 @@ os.environ["PYTHON"] = sys.executable # Can be overridden from idf.bat using IDF_PY_PROGRAM_NAME PROG = os.getenv("IDF_PY_PROGRAM_NAME", sys.argv[0]) -# Make flavors, across the various kinds of Windows environments & POSIX... -if "MSYSTEM" in os.environ: # MSYS - MAKE_CMD = "make" - MAKE_GENERATOR = "MSYS Makefiles" -elif os.name == "nt": # other Windows - MAKE_CMD = "mingw32-make" - MAKE_GENERATOR = "MinGW Makefiles" -elif platform.system() == "FreeBSD": - MAKE_CMD = "gmake" - MAKE_GENERATOR = "Unix Makefiles" -else: - MAKE_CMD = "make" - MAKE_GENERATOR = "Unix Makefiles" - -GENERATORS = [ - # ('generator name', 'build command line', 'version command line', 'verbose flag') - ("Ninja", ["ninja"], ["ninja", "--version"], "-v"), - ( - MAKE_GENERATOR, - [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)], - [MAKE_CMD, "--version"], - "VERBOSE=1", - ), -] -GENERATOR_CMDS = dict((a[0], a[1]) for a in GENERATORS) -GENERATOR_VERBOSE = dict((a[0], a[3]) for a in GENERATORS) - -SUPPORTED_TARGETS = ["esp32", "esp32s2beta"] - - -def _run_tool(tool_name, args, cwd): - def quote_arg(arg): - " Quote 'arg' if necessary " - if " " in arg and not (arg.startswith('"') or arg.startswith("'")): - return "'" + arg + "'" - return arg - - display_args = " ".join(quote_arg(arg) for arg in args) - print("Running %s in directory %s" % (tool_name, quote_arg(cwd))) - print('Executing "%s"...' % str(display_args)) - try: - # Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup - subprocess.check_call(args, env=os.environ, cwd=cwd) - except subprocess.CalledProcessError as e: - raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode)) - - -def _realpath(path): - """ - Return the cannonical path with normalized case. - - It is useful on Windows to comparision paths in case-insensitive manner. - On Unix and Mac OS X it works as `os.path.realpath()` only. - """ - return os.path.normcase(os.path.realpath(path)) - def check_environment(): """ @@ -130,15 +66,13 @@ def check_environment(): # verify that IDF_PATH env variable is set # find the directory idf.py is in, then the parent directory of this, and assume this is IDF_PATH - detected_idf_path = _realpath(os.path.join(os.path.dirname(__file__), "..")) + detected_idf_path = realpath(os.path.join(os.path.dirname(__file__), "..")) if "IDF_PATH" in os.environ: - set_idf_path = _realpath(os.environ["IDF_PATH"]) + set_idf_path = realpath(os.environ["IDF_PATH"]) if set_idf_path != detected_idf_path: - print( - "WARNING: IDF_PATH environment variable is set to %s but %s path indicates IDF directory %s. " - "Using the environment variable directory, but results may be unexpected..." - % (set_idf_path, PROG, detected_idf_path) - ) + print("WARNING: IDF_PATH environment variable is set to %s but %s path indicates IDF directory %s. " + "Using the environment variable directory, but results may be unexpected..." % + (set_idf_path, PROG, detected_idf_path)) else: print("Setting IDF_PATH environment variable: %s" % detected_idf_path) os.environ["IDF_PATH"] = detected_idf_path @@ -149,356 +83,20 @@ def check_environment(): out = subprocess.check_output( [ os.environ["PYTHON"], - os.path.join( - os.environ["IDF_PATH"], "tools", "check_python_dependencies.py" - ), + os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py"), ], env=os.environ, ) - checks_output.append(out.decode('utf-8','ignore').strip()) + checks_output.append(out.decode('utf-8', 'ignore').strip()) except subprocess.CalledProcessError as e: - print(e.output.decode('utf-8','ignore')) + print(e.output.decode('utf-8', 'ignore')) print_idf_version() raise SystemExit(1) return checks_output -def executable_exists(args): - try: - subprocess.check_output(args) - return True - except Exception: - return False - - -def detect_cmake_generator(): - """ - Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found. - """ - for (generator, _, version_check, _) in GENERATORS: - if executable_exists(version_check): - return generator - raise FatalError( - "To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH" - % PROG - ) - - -def _strip_quotes(value, regexp=re.compile(r"^\"(.*)\"$|^'(.*)'$|^(.*)$")): - """ - Strip quotes like CMake does during parsing cache entries - """ - - return [x for x in regexp.match(value).groups() if x is not None][0].rstrip() - - -def _new_cmakecache_entries(cache_path, new_cache_entries): - if not os.path.exists(cache_path): - return True - - if new_cache_entries: - current_cache = parse_cmakecache(cache_path) - - for entry in new_cache_entries: - key, value = entry.split("=", 1) - current_value = current_cache.get(key, None) - if current_value is None or _strip_quotes(value) != current_value: - return True - - return False - - -def _ensure_build_directory(args, always_run_cmake=False): - """Check the build directory exists and that cmake has been run there. - - If this isn't the case, create the build directory (if necessary) and - do an initial cmake run to configure it. - - This function will also check args.generator parameter. If the parameter is incompatible with - the build directory, an error is raised. If the parameter is None, this function will set it to - an auto-detected default generator or to the value already configured in the build directory. - """ - project_dir = args.project_dir - # Verify the project directory - if not os.path.isdir(project_dir): - if not os.path.exists(project_dir): - raise FatalError("Project directory %s does not exist" % project_dir) - else: - raise FatalError("%s must be a project directory" % project_dir) - if not os.path.exists(os.path.join(project_dir, "CMakeLists.txt")): - raise FatalError( - "CMakeLists.txt not found in project directory %s" % project_dir - ) - - # Verify/create the build directory - build_dir = args.build_dir - if not os.path.isdir(build_dir): - os.makedirs(build_dir) - cache_path = os.path.join(build_dir, "CMakeCache.txt") - - args.define_cache_entry.append("CCACHE_ENABLE=%d" % args.ccache) - - if always_run_cmake or _new_cmakecache_entries(cache_path, args.define_cache_entry): - if args.generator is None: - args.generator = detect_cmake_generator() - try: - cmake_args = [ - "cmake", - "-G", - args.generator, - "-DPYTHON_DEPS_CHECKED=1", - "-DESP_PLATFORM=1", - ] - if not args.no_warnings: - cmake_args += ["--warn-uninitialized"] - cmake_args += ["-DWARN_UNINITIALIZED=1"] - - if args.define_cache_entry: - cmake_args += ["-D" + d for d in args.define_cache_entry] - cmake_args += [project_dir] - - _run_tool("cmake", cmake_args, cwd=args.build_dir) - except Exception: - # don't allow partially valid CMakeCache.txt files, - # to keep the "should I run cmake?" logic simple - if os.path.exists(cache_path): - os.remove(cache_path) - raise - - # Learn some things from the CMakeCache.txt file in the build directory - cache = parse_cmakecache(cache_path) - try: - generator = cache["CMAKE_GENERATOR"] - except KeyError: - generator = detect_cmake_generator() - if args.generator is None: - args.generator = ( - generator - ) # reuse the previously configured generator, if none was given - if generator != args.generator: - raise FatalError( - "Build is configured for generator '%s' not '%s'. Run '%s fullclean' to start again." - % (generator, args.generator, PROG) - ) - - try: - home_dir = cache["CMAKE_HOME_DIRECTORY"] - if _realpath(home_dir) != _realpath(project_dir): - raise FatalError( - "Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again." - % (build_dir, _realpath(home_dir), _realpath(project_dir), PROG) - ) - except KeyError: - pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet - - -def parse_cmakecache(path): - """ - Parse the CMakeCache file at 'path'. - - Returns a dict of name:value. - - CMakeCache entries also each have a "type", but this is currently ignored. - """ - result = {} - with open(path) as f: - for line in f: - # cmake cache lines look like: CMAKE_CXX_FLAGS_DEBUG:STRING=-g - # groups are name, type, value - m = re.match(r"^([^#/:=]+):([^:=]+)=(.*)\n$", line) - if m: - result[m.group(1)] = m.group(3) - return result - - -def build_target(target_name, ctx, args): - """ - Execute the target build system to build target 'target_name' - - Calls _ensure_build_directory() which will run cmake to generate a build - directory (with the specified generator) as needed. - """ - _ensure_build_directory(args) - generator_cmd = GENERATOR_CMDS[args.generator] - - if args.verbose: - generator_cmd += [GENERATOR_VERBOSE[args.generator]] - - _run_tool(generator_cmd[0], generator_cmd + [target_name], args.build_dir) - - -def _get_esptool_args(args): - esptool_path = os.path.join( - os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py" - ) - if args.port is None: - args.port = get_default_serial_port() - result = [PYTHON, esptool_path] - result += ["-p", args.port] - result += ["-b", str(args.baud)] - - with open(os.path.join(args.build_dir, "flasher_args.json")) as f: - flasher_args = json.load(f) - - extra_esptool_args = flasher_args["extra_esptool_args"] - result += ["--after", extra_esptool_args["after"]] - return result - - -def flash(action, ctx, args): - """ - Run esptool to flash the entire project, from an argfile generated by the build system - """ - flasher_args_path = { # action -> name of flasher args file generated by build system - "bootloader-flash": "flash_bootloader_args", - "partition_table-flash": "flash_partition_table_args", - "app-flash": "flash_app_args", - "flash": "flash_project_args", - "encrypted-app-flash": "flash_encrypted_app_args", - "encrypted-flash": "flash_encrypted_project_args", - }[ - action - ] - esptool_args = _get_esptool_args(args) - esptool_args += ["write_flash", "@" + flasher_args_path] - _run_tool("esptool.py", esptool_args, args.build_dir) - - -def erase_flash(action, ctx, args): - esptool_args = _get_esptool_args(args) - esptool_args += ["erase_flash"] - _run_tool("esptool.py", esptool_args, args.build_dir) - - -def monitor(action, ctx, args, print_filter): - """ - Run idf_monitor.py to watch build output - """ - if args.port is None: - args.port = get_default_serial_port() - desc_path = os.path.join(args.build_dir, "project_description.json") - if not os.path.exists(desc_path): - _ensure_build_directory(args) - with open(desc_path, "r") as f: - project_desc = json.load(f) - - elf_file = os.path.join(args.build_dir, project_desc["app_elf"]) - if not os.path.exists(elf_file): - raise FatalError( - "ELF file '%s' not found. You need to build & flash the project before running 'monitor', " - "and the binary on the device must match the one in the build directory exactly. " - "Try '%s flash monitor'." % (elf_file, PROG) - ) - idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py") - monitor_args = [PYTHON, idf_monitor] - if args.port is not None: - monitor_args += ["-p", args.port] - monitor_args += ["-b", project_desc["monitor_baud"]] - monitor_args += ["--toolchain-prefix", project_desc["monitor_toolprefix"]] - - if print_filter is not None: - monitor_args += ["--print_filter", print_filter] - monitor_args += [elf_file] - - idf_py = [PYTHON] + get_commandline_options(ctx) # commands to re-run idf.py - monitor_args += ["-m", " ".join("'%s'" % a for a in idf_py)] - - if "MSYSTEM" in os.environ: - monitor_args = ["winpty"] + monitor_args - _run_tool("idf_monitor", monitor_args, args.project_dir) - - -def clean(action, ctx, args): - if not os.path.isdir(args.build_dir): - print("Build directory '%s' not found. Nothing to clean." % args.build_dir) - return - build_target("clean", ctx, args) - - -def set_target(action, ctx, args, idf_target): - args.define_cache_entry.append("IDF_TARGET=" + idf_target) - sdkconfig_path = os.path.join(args.project_dir, 'sdkconfig') - sdkconfig_old = sdkconfig_path + ".old" - if os.path.exists(sdkconfig_old): - os.remove(sdkconfig_old) - if os.path.exists(sdkconfig_path): - os.rename(sdkconfig_path, sdkconfig_old) - print("Set Target to: %s, new sdkconfig created. Existing sdkconfig renamed to sdkconfig.old." % idf_target) - _ensure_build_directory(args, True) - - -def reconfigure(action, ctx, args): - _ensure_build_directory(args, True) - - -def _delete_windows_symlinks(directory): - """ - It deletes symlinks recursively on Windows. It is useful for Python 2 which doesn't detect symlinks on Windows. - """ - deleted_paths = [] - if os.name == "nt": - import ctypes - - for root, dirnames, _filenames in os.walk(directory): - for d in dirnames: - full_path = os.path.join(root, d) - try: - full_path = full_path.decode("utf-8") - except Exception: - pass - if ctypes.windll.kernel32.GetFileAttributesW(full_path) & 0x0400: - os.rmdir(full_path) - deleted_paths.append(full_path) - return deleted_paths - - -def fullclean(action, ctx, args): - build_dir = args.build_dir - if not os.path.isdir(build_dir): - print("Build directory '%s' not found. Nothing to clean." % build_dir) - return - if len(os.listdir(build_dir)) == 0: - print("Build directory '%s' is empty. Nothing to clean." % build_dir) - return - - if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")): - raise FatalError( - "Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically " - "delete files in this directory. Delete the directory manually to 'clean' it." - % build_dir - ) - red_flags = ["CMakeLists.txt", ".git", ".svn"] - for red in red_flags: - red = os.path.join(build_dir, red) - if os.path.exists(red): - raise FatalError( - "Refusing to automatically delete files in directory containing '%s'. Delete files manually if you're sure." - % red - ) - # OK, delete everything in the build directory... - # Note: Python 2.7 doesn't detect symlinks on Windows (it is supported form 3.2). Tools promising to not - # follow symlinks will actually follow them. Deleting the build directory with symlinks deletes also items - # outside of this directory. - deleted_symlinks = _delete_windows_symlinks(build_dir) - if args.verbose and len(deleted_symlinks) > 1: - print( - "The following symlinks were identified and removed:\n%s" - % "\n".join(deleted_symlinks) - ) - for f in os.listdir( - build_dir - ): # TODO: once we are Python 3 only, this can be os.scandir() - f = os.path.join(build_dir, f) - if args.verbose: - print("Removing: %s" % f) - if os.path.isdir(f): - shutil.rmtree(f) - else: - os.remove(f) - - def _safe_relpath(path, start=None): """ Return a relative path, same as os.path.relpath, but only if this is possible. @@ -510,42 +108,6 @@ def _safe_relpath(path, start=None): return os.path.abspath(path) -def _idf_version_from_cmake(): - version_path = os.path.join(os.environ["IDF_PATH"], "tools/cmake/version.cmake") - regex = re.compile(r"^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)") - ver = {} - try: - with open(version_path) as f: - for line in f: - m = regex.match(line) - - if m: - ver[m.group(1)] = m.group(2) - - return "v%s.%s.%s" % (ver["MAJOR"], ver["MINOR"], ver["PATCH"]) - except (KeyError, OSError): - sys.stderr.write("WARNING: Cannot find ESP-IDF version in version.cmake\n") - return None - - -def idf_version(): - """Print version of ESP-IDF""" - - # Try to get version from git: - try: - version = subprocess.check_output([ - "git", - "--git-dir=%s" % os.path.join(os.environ["IDF_PATH"], '.git'), - "--work-tree=%s" % os.environ["IDF_PATH"], "describe", "--tags", "--dirty" - ]).decode('utf-8', 'ignore').strip() - except (subprocess.CalledProcessError, UnicodeError): - # if failed, then try to parse cmake.version file - sys.stderr.write("WARNING: Git version unavailable, reading from source\n") - version = _idf_version_from_cmake() - - return version - - def print_idf_version(): version = idf_version() if version: @@ -554,63 +116,6 @@ def print_idf_version(): print("ESP-IDF version unknown") -def idf_version_callback(ctx, param, value): - if not value or ctx.resilient_parsing: - return - - version = idf_version() - - if not version: - raise FatalError("ESP-IDF version cannot be determined") - - print("ESP-IDF %s" % version) - sys.exit(0) - - -def verbose_callback(ctx, param, value): - if not value or ctx.resilient_parsing: - return - - for line in ctx.command.verbose_output: - print(line) - - -def get_commandline_options(ctx): - """ Return all the command line options up to first action """ - # This approach ignores argument parsing done Click - result = [] - - for arg in sys.argv: - if arg in ctx.command.commands_with_aliases: - break - - result.append(arg) - - return result - - -def get_default_serial_port(): - """ Return a default serial port. esptool can do this (smarter), but it can create - inconsistencies where esptool.py uses one port and idf_monitor uses another. - - Same logic as esptool.py search order, reverse sort by name and choose the first port. - """ - # Import is done here in order to move it after the check_environment() ensured that pyserial has been installed - import serial.tools.list_ports - - ports = list(reversed(sorted(p.device for p in serial.tools.list_ports.comports()))) - try: - print( - "Choosing default port %s (use '-p PORT' option to set a specific serial port)" - % ports[0].encode("ascii", "ignore") - ) - return ports[0] - except IndexError: - raise RuntimeError( - "No serial ports found. Connect a device, or use '-p PORT' option to set a specific port." - ) - - class PropertyDict(dict): def __getattr__(self, name): if name in self: @@ -650,11 +155,8 @@ def init_cli(verbose_output=None): def full_message(self, type="Option"): return "%s is deprecated %sand will be removed in%s.%s" % ( - type, - "since %s " % self.since if self.since else "", - " %s" % self.removed if self.removed else " future versions", - " %s" % self.custom_message if self.custom_message else "" - ) + type, "since %s " % self.since if self.since else "", " %s" % self.removed + if self.removed else " future versions", " %s" % self.custom_message if self.custom_message else "") def help(self, text, type="Option", separator=" "): text = text or "" @@ -669,13 +171,10 @@ def init_cli(verbose_output=None): for option in ctx.command.params: default = () if option.multiple else option.default if isinstance(option, Option) and option.deprecated and ctx.params[option.name] != default: - print("Warning: %s" % DeprecationMessage(option.deprecated). - full_message('Option "%s"' % option.name)) + print("Warning: %s" % DeprecationMessage(option.deprecated).full_message('Option "%s"' % option.name)) class Task(object): - def __init__( - self, callback, name, aliases, dependencies, order_dependencies, action_args - ): + def __init__(self, callback, name, aliases, dependencies, order_dependencies, action_args): self.callback = callback self.name = name self.dependencies = dependencies @@ -690,15 +189,13 @@ def init_cli(verbose_output=None): self.callback(self.name, context, global_args, **action_args) class Action(click.Command): - def __init__( - self, - name=None, - aliases=None, - deprecated=False, - dependencies=None, - order_dependencies=None, - **kwargs - ): + def __init__(self, + name=None, + aliases=None, + deprecated=False, + dependencies=None, + order_dependencies=None, + **kwargs): super(Action, self).__init__(name, **kwargs) self.name = self.name or self.callback.__name__ @@ -839,7 +336,7 @@ def init_cli(verbose_output=None): class CLI(click.MultiCommand): """Action list contains all actions with options available for CLI""" - def __init__(self, action_lists=None, verbose_output=None, help=None): + def __init__(self, all_actions=None, verbose_output=None, help=None): super(CLI, self).__init__( chain=True, invoke_without_command=True, @@ -856,63 +353,56 @@ def init_cli(verbose_output=None): self.verbose_output = verbose_output - if action_lists is None: - action_lists = [] + if all_actions is None: + all_actions = {} shared_options = [] - for action_list in action_lists: - # Global options - for option_args in action_list.get("global_options", []): + # Global options + for option_args in all_actions.get("global_options", []): + option = Option(**option_args) + self.params.append(option) + + if option.scope.is_shared: + shared_options.append(option) + + # Global options validators + self.global_action_callbacks = all_actions.get("global_action_callbacks", []) + + # Actions + for name, action in all_actions.get("actions", {}).items(): + arguments = action.pop("arguments", []) + options = action.pop("options", []) + + if arguments is None: + arguments = [] + + if options is None: + options = [] + + self._actions[name] = Action(name=name, **action) + for alias in [name] + action.get("aliases", []): + self.commands_with_aliases[alias] = name + + for argument_args in arguments: + self._actions[name].params.append(Argument(**argument_args)) + + # Add all shared options + for option in shared_options: + self._actions[name].params.append(option) + + for option_args in options: option = Option(**option_args) - self.params.append(option) if option.scope.is_shared: - shared_options.append(option) + raise FatalError('"%s" is defined for action "%s". ' + ' "shared" options can be declared only on global level' % (option.name, name)) - for action_list in action_lists: - # Global options validators - self.global_action_callbacks.extend( - action_list.get("global_action_callbacks", []) - ) + # Promote options to global if see for the first time + if option.scope.is_global and option.name not in [o.name for o in self.params]: + self.params.append(option) - for action_list in action_lists: - # Actions - for name, action in action_list.get("actions", {}).items(): - arguments = action.pop("arguments", []) - options = action.pop("options", []) - - if arguments is None: - arguments = [] - - if options is None: - options = [] - - self._actions[name] = Action(name=name, **action) - for alias in [name] + action.get("aliases", []): - self.commands_with_aliases[alias] = name - - for argument_args in arguments: - self._actions[name].params.append(Argument(**argument_args)) - - # Add all shared options - for option in shared_options: - self._actions[name].params.append(option) - - for option_args in options: - option = Option(**option_args) - - if option.scope.is_shared: - raise FatalError( - '"%s" is defined for action "%s". ' - ' "shared" options can be declared only on global level' % (option.name, name) - ) - - # Promote options to global if see for the first time - if option.scope.is_global and option.name not in [o.name for o in self.params]: - self.params.append(option) - - self._actions[name].params.append(option) + self._actions[name].params.append(option) def list_commands(self, ctx): return sorted(self._actions) @@ -944,9 +434,7 @@ def init_cli(verbose_output=None): if key != "project": # flashing a single item cmd = "" - if ( - key == "bootloader" - ): # bootloader needs --flash-mode, etc to be passed in + if (key == "bootloader"): # bootloader needs --flash-mode, etc to be passed in cmd = " ".join(flasher_args["write_flash_args"]) + " " cmd += flasher_args[key]["offset"] + " " @@ -954,36 +442,23 @@ def init_cli(verbose_output=None): else: # flashing the whole project cmd = " ".join(flasher_args["write_flash_args"]) + " " flash_items = sorted( - ( - (o, f) - for (o, f) in flasher_args["flash_files"].items() - if len(o) > 0 - ), + ((o, f) for (o, f) in flasher_args["flash_files"].items() if len(o) > 0), key=lambda x: int(x[0], 0), ) for o, f in flash_items: cmd += o + " " + flasher_path(f) + " " - print( - "%s -p %s -b %s --after %s write_flash %s" - % ( - _safe_relpath( - "%s/components/esptool_py/esptool/esptool.py" - % os.environ["IDF_PATH"] - ), - args.port or "(PORT)", - args.baud, - flasher_args["extra_esptool_args"]["after"], - cmd.strip(), - ) - ) - print( - "or run 'idf.py -p %s %s'" - % ( - args.port or "(PORT)", - key + "-flash" if key != "project" else "flash", - ) - ) + print("%s -p %s -b %s --after %s write_flash %s" % ( + _safe_relpath("%s/components/esptool_py/esptool/esptool.py" % os.environ["IDF_PATH"]), + args.port or "(PORT)", + args.baud, + flasher_args["extra_esptool_args"]["after"], + cmd.strip(), + )) + print("or run 'idf.py -p %s %s'" % ( + args.port or "(PORT)", + key + "-flash" if key != "project" else "flash", + )) if "all" in actions or "build" in actions: print_flashing_message("Project", "project") @@ -1000,12 +475,13 @@ def init_cli(verbose_output=None): global_args = PropertyDict(kwargs) # Show warning if some tasks are present several times in the list - dupplicated_tasks = sorted([item for item, count in Counter(task.name for task in tasks).items() if count > 1]) + dupplicated_tasks = sorted( + [item for item, count in Counter(task.name for task in tasks).items() if count > 1]) if dupplicated_tasks: dupes = ", ".join('"%s"' % t for t in dupplicated_tasks) - print("WARNING: Command%s found in the list of commands more than once. " - % ("s %s are" % dupes if len(dupplicated_tasks) > 1 else " %s is" % dupes) - + "Only first occurence will be executed.") + print("WARNING: Command%s found in the list of commands more than once. " % + ("s %s are" % dupes if len(dupplicated_tasks) > 1 else " %s is" % dupes) + + "Only first occurence will be executed.") # Set propagated global options. # These options may be set on one subcommand, but available in the list of global arguments @@ -1019,10 +495,9 @@ def init_cli(verbose_output=None): default = () if option.multiple else option.default if global_value != default and local_value != default and global_value != local_value: - raise FatalError( - 'Option "%s" provided for "%s" is already defined to a different value. ' - "This option can appear at most once in the command line." % (key, task.name) - ) + raise FatalError('Option "%s" provided for "%s" is already defined to a different value. ' + "This option can appear at most once in the command line." % + (key, task.name)) if local_value != default: global_args[key] = local_value @@ -1058,10 +533,8 @@ def init_cli(verbose_output=None): # Otherwise invoke it with default set of options # and put to the front of the list of unprocessed tasks else: - print( - 'Adding "%s"\'s dependency "%s" to list of commands with default set of options.' - % (task.name, dep) - ) + print('Adding "%s"\'s dependency "%s" to list of commands with default set of options.' % + (task.name, dep)) dep_task = ctx.invoke(ctx.command.get_command(ctx, dep)) # Remove options with global scope from invoke tasks because they are alread in global_args @@ -1088,8 +561,8 @@ def init_cli(verbose_output=None): tasks_to_run.update([(task.name, task)]) # Run all tasks in the queue - # when global_args.no_run is true idf.py works in idle mode and skips actual task execution - if not global_args.no_run: + # when global_args.dry_run is true idf.py works in idle mode and skips actual task execution + if not global_args.dry_run: for task in tasks_to_run.values(): name_with_aliases = task.name if task.aliases: @@ -1102,370 +575,59 @@ def init_cli(verbose_output=None): return tasks_to_run - @staticmethod - def merge_action_lists(*action_lists): - merged_actions = { - "global_options": [], - "actions": {}, - "global_action_callbacks": [], - } - for action_list in action_lists: - merged_actions["global_options"].extend( - action_list.get("global_options", []) - ) - merged_actions["actions"].update(action_list.get("actions", {})) - merged_actions["global_action_callbacks"].extend( - action_list.get("global_action_callbacks", []) - ) - return merged_actions - # That's a tiny parser that parse project-dir even before constructing # fully featured click parser to be sure that extensions are loaded from the right place @click.command( add_help_option=False, - context_settings={"allow_extra_args": True, "ignore_unknown_options": True}, + context_settings={ + "allow_extra_args": True, + "ignore_unknown_options": True + }, ) @click.option("-C", "--project-dir", default=os.getcwd()) def parse_project_dir(project_dir): - return _realpath(project_dir) + return realpath(project_dir) project_dir = parse_project_dir(standalone_mode=False) - # Load base idf commands - def validate_root_options(ctx, args, tasks): - args.project_dir = _realpath(args.project_dir) - if args.build_dir is not None and args.project_dir == _realpath(args.build_dir): - raise FatalError( - "Setting the build directory to the project directory is not supported. Suggest dropping " - "--build-dir option, the default is a 'build' subdirectory inside the project directory." - ) - if args.build_dir is None: - args.build_dir = os.path.join(args.project_dir, "build") - args.build_dir = _realpath(args.build_dir) + all_actions = {} + # Load extensions from components dir + idf_py_extensions_path = os.path.join(os.environ["IDF_PATH"], "tools", "idf_py_actions") + extra_pathes = os.environ.get("IDF_EXTRA_ACTIONS_PATH", "").split(';') + extension_dirs = [idf_py_extensions_path] + extra_pathes + extensions = {} - # Possible keys for action dict are: global_options, actions and global_action_callbacks - global_options = [ - { - "names": ["-D", "--define-cache-entry"], - "help": "Create a cmake cache entry.", - "scope": "global", - "multiple": True, - } - ] + for directory in extension_dirs: + if directory and not os.path.exists(directory): + print('WARNING: Directroy with idf.py extensions doesn\'t exist:\n %s' % directory) + continue - root_options = { - "global_options": [ - { - "names": ["--version"], - "help": "Show IDF version and exit.", - "is_flag": True, - "callback": idf_version_callback - }, - { - "names": ["-C", "--project-dir"], - "help": "Project directory.", - "type": click.Path(), - "default": os.getcwd(), - }, - { - "names": ["-B", "--build-dir"], - "help": "Build directory.", - "type": click.Path(), - "default": None, - }, - { - "names": ["-n", "--no-warnings"], - "help": "Disable Cmake warnings.", - "is_flag": True, - "default": False, - }, - { - "names": ["-v", "--verbose"], - "help": "Verbose build output.", - "is_flag": True, - "is_eager": True, - "default": False, - "callback": verbose_callback - }, - { - "names": ["--ccache/--no-ccache"], - "help": "Use ccache in build. Disabled by default, unless IDF_CCACHE_ENABLE environment variable is set to a non-zero value.", - "is_flag": True, - "default": os.getenv("IDF_CCACHE_ENABLE") not in [None, "", "0"], - }, - { - "names": ["-G", "--generator"], - "help": "CMake generator.", - "type": click.Choice(GENERATOR_CMDS.keys()), - }, - { - "names": ["--no-run"], - "help": "Only process arguments, but don't execute actions.", - "is_flag": True, - "hidden": True, - "default": False - }, - ], - "global_action_callbacks": [validate_root_options], - } + sys.path.append(directory) + for _finder, name, _ispkg in sorted(iter_modules([directory])): + if name.startswith('idf_'): + extensions[name] = import_module(name) - build_actions = { - "actions": { - "all": { - "aliases": ["build"], - "callback": build_target, - "short_help": "Build the project.", - "help": "Build the project. This can involve multiple steps:\n\n" - + "1. Create the build directory if needed. The sub-directory 'build' is used to hold build output, " - + "although this can be changed with the -B option.\n\n" - + "2. Run CMake as necessary to configure the project and generate build files for the main build tool.\n\n" - + "3. Run the main build tool (Ninja or GNU Make). By default, the build tool is automatically detected " - + "but it can be explicitly set by passing the -G option to idf.py.\n\n", - "options": global_options, - "order_dependencies": [ - "reconfigure", - "menuconfig", - "clean", - "fullclean", - ], - }, - "menuconfig": { - "callback": build_target, - "help": 'Run "menuconfig" project configuration tool.', - "options": global_options, - }, - "confserver": { - "callback": build_target, - "help": "Run JSON configuration server.", - "options": global_options, - }, - "size": { - "callback": build_target, - "help": "Print basic size information about the app.", - "options": global_options, - "dependencies": ["app"], - }, - "size-components": { - "callback": build_target, - "help": "Print per-component size information.", - "options": global_options, - "dependencies": ["app"], - }, - "size-files": { - "callback": build_target, - "help": "Print per-source-file size information.", - "options": global_options, - "dependencies": ["app"], - }, - "bootloader": { - "callback": build_target, - "help": "Build only bootloader.", - "options": global_options, - }, - "app": { - "callback": build_target, - "help": "Build only the app.", - "order_dependencies": ["clean", "fullclean", "reconfigure"], - "options": global_options, - }, - "efuse_common_table": { - "callback": build_target, - "help": "Genereate C-source for IDF's eFuse fields.", - "order_dependencies": ["reconfigure"], - "options": global_options, - }, - "efuse_custom_table": { - "callback": build_target, - "help": "Genereate C-source for user's eFuse fields.", - "order_dependencies": ["reconfigure"], - "options": global_options, - }, - "show_efuse_table": { - "callback": build_target, - "help": "Print eFuse table.", - "order_dependencies": ["reconfigure"], - "options": global_options, - }, - "partition_table": { - "callback": build_target, - "help": "Build only partition table.", - "order_dependencies": ["reconfigure"], - "options": global_options, - }, - "erase_otadata": { - "callback": build_target, - "help": "Erase otadata partition.", - "options": global_options, - }, - "read_otadata": { - "callback": build_target, - "help": "Read otadata partition.", - "options": global_options, - }, - } - } + for name, extension in extensions.items(): + try: + all_actions = merge_action_lists(all_actions, extension.action_extensions(all_actions, project_dir)) + except AttributeError: + print('WARNING: Cannot load idf.py extension "%s"' % name) - clean_actions = { - "actions": { - "reconfigure": { - "callback": reconfigure, - "short_help": "Re-run CMake.", - "help": "Re-run CMake even if it doesn't seem to need re-running. This isn't necessary during normal usage, " - + "but can be useful after adding/removing files from the source tree, or when modifying CMake cache variables. " - + "For example, \"idf.py -DNAME='VALUE' reconfigure\" " - + 'can be used to set variable "NAME" in CMake cache to value "VALUE".', - "options": global_options, - "order_dependencies": ["menuconfig", "fullclean"], - }, - "set-target": { - "callback": set_target, - "short_help": "Set the chip target to build.", - "help": "Set the chip target to build. This will remove the " - + "existing sdkconfig file and corresponding CMakeCache and " - + "create new ones according to the new target.\nFor example, " - + "\"idf.py set-target esp32\" will select esp32 as the new chip " - + "target.", - "arguments": [{ - "names": ["idf-target"], - "nargs": 1, - "type": click.Choice(SUPPORTED_TARGETS), - }], - "dependencies": ["fullclean"], - }, - "clean": { - "callback": clean, - "short_help": "Delete build output files from the build directory.", - "help": "Delete build output files from the build directory , forcing a 'full rebuild' the next time " - + "the project is built. Cleaning doesn't delete CMake configuration output and some other files", - "order_dependencies": ["fullclean"], - }, - "fullclean": { - "callback": fullclean, - "short_help": "Delete the entire build directory contents.", - "help": "Delete the entire build directory contents. This includes all CMake configuration output." - + "The next time the project is built, CMake will configure it from scratch. " - + "Note that this option recursively deletes all files in the build directory, so use with care." - + "Project configuration is not deleted.", - }, - } - } - - baud_rate = { - "names": ["-b", "--baud"], - "help": "Baud rate.", - "scope": "global", - "envvar": "ESPBAUD", - "default": 460800, - } - - port = { - "names": ["-p", "--port"], - "help": "Serial port.", - "scope": "global", - "envvar": "ESPPORT", - "default": None, - } - - serial_actions = { - "actions": { - "flash": { - "callback": flash, - "help": "Flash the project.", - "options": global_options + [baud_rate, port], - "dependencies": ["all"], - "order_dependencies": ["erase_flash"], - }, - "erase_flash": { - "callback": erase_flash, - "help": "Erase entire flash chip.", - "options": [baud_rate, port], - }, - "monitor": { - "callback": monitor, - "help": "Display serial output.", - "options": [ - port, - { - "names": ["--print-filter", "--print_filter"], - "help": ( - "Filter monitor output.\n" - "Restrictions on what to print can be specified as a series of : items " - "where is the tag string and is a character from the set " - "{N, E, W, I, D, V, *} referring to a level. " - 'For example, "tag1:W" matches and prints only the outputs written with ' - 'ESP_LOGW("tag1", ...) or at lower verbosity level, i.e. ESP_LOGE("tag1", ...). ' - 'Not specifying a or using "*" defaults to Verbose level.\n' - 'Please see the IDF Monitor section of the ESP-IDF documentation ' - 'for a more detailed description and further examples.'), - "default": None, - }, - ], - "order_dependencies": [ - "flash", - "partition_table-flash", - "bootloader-flash", - "app-flash", - ], - }, - "partition_table-flash": { - "callback": flash, - "help": "Flash partition table only.", - "options": [baud_rate, port], - "dependencies": ["partition_table"], - "order_dependencies": ["erase_flash"], - }, - "bootloader-flash": { - "callback": flash, - "help": "Flash bootloader only.", - "options": [baud_rate, port], - "dependencies": ["bootloader"], - "order_dependencies": ["erase_flash"], - }, - "app-flash": { - "callback": flash, - "help": "Flash the app only.", - "options": [baud_rate, port], - "dependencies": ["app"], - "order_dependencies": ["erase_flash"], - }, - "encrypted-app-flash": { - "callback": flash, - "help": "Flash the encrypted app only.", - "dependencies": ["app"], - "order_dependencies": ["erase_flash"], - }, - "encrypted-flash": { - "callback": flash, - "help": "Flash the encrypted project.", - "dependencies": ["all"], - "order_dependencies": ["erase_flash"], - }, - }, - } - - base_actions = CLI.merge_action_lists( - root_options, build_actions, clean_actions, serial_actions - ) - all_actions = [base_actions] - - # Load extensions + # Load extensions from project dir if os.path.exists(os.path.join(project_dir, "idf_ext.py")): sys.path.append(project_dir) try: from idf_ext import action_extensions except ImportError: print("Error importing extension file idf_ext.py. Skipping.") - print( - "Please make sure that it contains implementation (even if it's empty) of add_action_extensions" - ) + print("Please make sure that it contains implementation (even if it's empty) of add_action_extensions") - # Add actions extensions - try: - all_actions.append(action_extensions(base_actions, project_dir)) - except NameError: - pass + try: + all_actions = merge_action_lists(all_actions, action_extensions(all_actions, project_dir)) + except NameError: + pass - return CLI(help="ESP-IDF build management", verbose_output=verbose_output, action_lists=all_actions) + return CLI(help="ESP-IDF build management", verbose_output=verbose_output, all_actions=all_actions) def main(): @@ -1488,9 +650,7 @@ def _valid_unicode_config(): def _find_usable_locale(): try: - locales = subprocess.Popen( - ["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE - ).communicate()[0] + locales = subprocess.Popen(["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] except OSError: locales = "" if isinstance(locales, bytes): @@ -1515,8 +675,7 @@ def _find_usable_locale(): if not usable_locales: raise FatalError( "Support for Unicode filenames is required, but no suitable UTF-8 locale was found on your system." - " Please refer to the manual for your operating system for details on locale reconfiguration." - ) + " Please refer to the manual for your operating system for details on locale reconfiguration.") return usable_locales[0] @@ -1529,9 +688,8 @@ if __name__ == "__main__": # option as os.environment['_'] contains "winpty" only when it is run manually from console. WINPTY_VAR = "WINPTY" WINPTY_EXE = "winpty" - if ("MSYSTEM" in os.environ) and ( - not os.environ.get("_", "").endswith(WINPTY_EXE) and WINPTY_VAR not in os.environ - ): + if ("MSYSTEM" in os.environ) and (not os.environ.get("_", "").endswith(WINPTY_EXE) + and WINPTY_VAR not in os.environ): if 'menuconfig' in sys.argv: # don't use winpty for menuconfig because it will print weird characters @@ -1539,9 +697,7 @@ if __name__ == "__main__": else: os.environ[WINPTY_VAR] = "1" # the value is of no interest to us # idf.py calls itself with "winpty" and WINPTY global variable set - ret = subprocess.call( - [WINPTY_EXE, sys.executable] + sys.argv, env=os.environ - ) + ret = subprocess.call([WINPTY_EXE, sys.executable] + sys.argv, env=os.environ) if ret: raise SystemExit(ret) @@ -1549,11 +705,8 @@ if __name__ == "__main__": # Trying to find best utf-8 locale available on the system and restart python with it best_locale = _find_usable_locale() - print( - "Your environment is not configured to handle unicode filenames outside of ASCII range." - " Environment variable LC_ALL is temporary set to %s for unicode support." - % best_locale - ) + print("Your environment is not configured to handle unicode filenames outside of ASCII range." + " Environment variable LC_ALL is temporary set to %s for unicode support." % best_locale) os.environ["LC_ALL"] = best_locale ret = subprocess.call([sys.executable] + sys.argv, env=os.environ) diff --git a/tools/idf_py_actions/README.md b/tools/idf_py_actions/README.md new file mode 100644 index 000000000..8b6d2af69 --- /dev/null +++ b/tools/idf_py_actions/README.md @@ -0,0 +1,41 @@ +# idf.py extensions +Python modules (subdirectories and files) in this directory named `idf_[your_extension]` will be loaded as idf.py extensions. +Command line arguments parsing and extension mechanism is implemented on top of [Click](https://click.palletsprojects.com/en/5.x/) (versions >=5.0 are supported). + +They should define a function `action_extensions(base_actions, project_path)` where: + +- base_actions - dictionary with actions that are already available for idf.py +- project_path - working dir, may be defaulted to `os.getcwd()` + +This function have to return a dict with 3 possible keys: + +```python +{ + # Additional options that will be available from id + "global_options": [{ + "names": ["--option-name"], + "help": "Help for option --option-name.", + }], + # List of functions that will have access to full app context, and can mangle with arguments + "global_action_callbacks": [global_callback], + # Additional subcommands for idf.py + "actions": { + "subcommand_name": { + "callback": subcommand_callback, + "help": "Help for subcommand.", + }, + }, +} +``` + +Where function `global_callback(ctx, global_args, tasks)` accepts 3 arguments: + +- ctx - [Click context](https://click.palletsprojects.com/en/5.x/api/#context) +- global_args - dictionary of all available global arguments +- tasks - list of Task objects + +And `subcommand_callback(subcommand_name, ctx, args)` accepts 3 arguments: + +- subcommand_name - name of subcommand +- ctx - [Click context](https://click.palletsprojects.com/en/5.x/api/#context) +- args - list of command's arguments diff --git a/tools/idf_py_actions/__init__.py b/tools/idf_py_actions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tools/idf_py_actions/contstants.py b/tools/idf_py_actions/contstants.py new file mode 100644 index 000000000..4cfc6a02c --- /dev/null +++ b/tools/idf_py_actions/contstants.py @@ -0,0 +1,32 @@ +import multiprocessing +import os +import platform + +# Make flavors, across the various kinds of Windows environments & POSIX... +if "MSYSTEM" in os.environ: # MSYS + MAKE_CMD = "make" + MAKE_GENERATOR = "MSYS Makefiles" +elif os.name == "nt": # other Windows + MAKE_CMD = "mingw32-make" + MAKE_GENERATOR = "MinGW Makefiles" +elif platform.system() == "FreeBSD": + MAKE_CMD = "gmake" + MAKE_GENERATOR = "Unix Makefiles" +else: + MAKE_CMD = "make" + MAKE_GENERATOR = "Unix Makefiles" + +GENERATORS = [ + # ('generator name', 'build command line', 'version command line', 'verbose flag') + ("Ninja", ["ninja"], ["ninja", "--version"], "-v"), + ( + MAKE_GENERATOR, + [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)], + [MAKE_CMD, "--version"], + "VERBOSE=1", + ), +] +GENERATOR_CMDS = dict((a[0], a[1]) for a in GENERATORS) +GENERATOR_VERBOSE = dict((a[0], a[3]) for a in GENERATORS) + +SUPPORTED_TARGETS = ["esp32", "esp32s2beta"] diff --git a/tools/idf_py_actions/errors.py b/tools/idf_py_actions/errors.py new file mode 100644 index 000000000..d3d5c9e94 --- /dev/null +++ b/tools/idf_py_actions/errors.py @@ -0,0 +1,6 @@ +class FatalError(RuntimeError): + """ + Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s + """ + + pass diff --git a/tools/idf_py_actions/global_options.py b/tools/idf_py_actions/global_options.py new file mode 100644 index 000000000..7f9a171a9 --- /dev/null +++ b/tools/idf_py_actions/global_options.py @@ -0,0 +1,6 @@ +global_options = [{ + "names": ["-D", "--define-cache-entry"], + "help": "Create a cmake cache entry.", + "scope": "global", + "multiple": True, +}] diff --git a/tools/idf_py_actions/idf_01_core_actions.py b/tools/idf_py_actions/idf_01_core_actions.py new file mode 100644 index 000000000..bf9daaf91 --- /dev/null +++ b/tools/idf_py_actions/idf_01_core_actions.py @@ -0,0 +1,342 @@ +import os +import shutil +import sys + +import click + +from idf_py_actions.contstants import GENERATOR_CMDS, GENERATOR_VERBOSE, SUPPORTED_TARGETS +from idf_py_actions.errors import FatalError +from idf_py_actions.global_options import global_options +from idf_py_actions.tools import ensure_build_directory, idf_version, merge_action_lists, realpath, run_tool + + +def action_extensions(base_actions, project_path): + def build_target(target_name, ctx, args): + """ + Execute the target build system to build target 'target_name' + + Calls ensure_build_directory() which will run cmake to generate a build + directory (with the specified generator) as needed. + """ + ensure_build_directory(args, ctx.info_name) + generator_cmd = GENERATOR_CMDS[args.generator] + + if args.verbose: + generator_cmd += [GENERATOR_VERBOSE[args.generator]] + + run_tool(generator_cmd[0], generator_cmd + [target_name], args.build_dir) + + def verbose_callback(ctx, param, value): + if not value or ctx.resilient_parsing: + return + + for line in ctx.command.verbose_output: + print(line) + + def clean(action, ctx, args): + if not os.path.isdir(args.build_dir): + print("Build directory '%s' not found. Nothing to clean." % args.build_dir) + return + build_target("clean", ctx, args) + + def _delete_windows_symlinks(directory): + """ + It deletes symlinks recursively on Windows. It is useful for Python 2 which doesn't detect symlinks on Windows. + """ + deleted_paths = [] + if os.name == "nt": + import ctypes + + for root, dirnames, _filenames in os.walk(directory): + for d in dirnames: + full_path = os.path.join(root, d) + try: + full_path = full_path.decode("utf-8") + except Exception: + pass + if ctypes.windll.kernel32.GetFileAttributesW(full_path) & 0x0400: + os.rmdir(full_path) + deleted_paths.append(full_path) + return deleted_paths + + def fullclean(action, ctx, args): + build_dir = args.build_dir + if not os.path.isdir(build_dir): + print("Build directory '%s' not found. Nothing to clean." % build_dir) + return + if len(os.listdir(build_dir)) == 0: + print("Build directory '%s' is empty. Nothing to clean." % build_dir) + return + + if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")): + raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically " + "delete files in this directory. Delete the directory manually to 'clean' it." % build_dir) + red_flags = ["CMakeLists.txt", ".git", ".svn"] + for red in red_flags: + red = os.path.join(build_dir, red) + if os.path.exists(red): + raise FatalError( + "Refusing to automatically delete files in directory containing '%s'. Delete files manually if you're sure." + % red) + # OK, delete everything in the build directory... + # Note: Python 2.7 doesn't detect symlinks on Windows (it is supported form 3.2). Tools promising to not + # follow symlinks will actually follow them. Deleting the build directory with symlinks deletes also items + # outside of this directory. + deleted_symlinks = _delete_windows_symlinks(build_dir) + if args.verbose and len(deleted_symlinks) > 1: + print("The following symlinks were identified and removed:\n%s" % "\n".join(deleted_symlinks)) + for f in os.listdir(build_dir): # TODO: once we are Python 3 only, this can be os.scandir() + f = os.path.join(build_dir, f) + if args.verbose: + print("Removing: %s" % f) + if os.path.isdir(f): + shutil.rmtree(f) + else: + os.remove(f) + + def set_target(action, ctx, args, idf_target): + args.define_cache_entry.append("IDF_TARGET=" + idf_target) + sdkconfig_path = os.path.join(args.project_dir, 'sdkconfig') + sdkconfig_old = sdkconfig_path + ".old" + if os.path.exists(sdkconfig_old): + os.remove(sdkconfig_old) + if os.path.exists(sdkconfig_path): + os.rename(sdkconfig_path, sdkconfig_old) + print("Set Target to: %s, new sdkconfig created. Existing sdkconfig renamed to sdkconfig.old." % idf_target) + ensure_build_directory(args, ctx.info_name, True) + + def reconfigure(action, ctx, args): + ensure_build_directory(args, ctx.info_name, True) + + def validate_root_options(ctx, args, tasks): + args.project_dir = realpath(args.project_dir) + if args.build_dir is not None and args.project_dir == realpath(args.build_dir): + raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping " + "--build-dir option, the default is a 'build' subdirectory inside the project directory.") + if args.build_dir is None: + args.build_dir = os.path.join(args.project_dir, "build") + args.build_dir = realpath(args.build_dir) + + def idf_version_callback(ctx, param, value): + if not value or ctx.resilient_parsing: + return + + version = idf_version() + + if not version: + raise FatalError("ESP-IDF version cannot be determined") + + print("ESP-IDF %s" % version) + sys.exit(0) + + root_options = { + "global_options": [ + { + "names": ["--version"], + "help": "Show IDF version and exit.", + "is_flag": True, + "callback": idf_version_callback + }, + { + "names": ["-C", "--project-dir"], + "help": "Project directory.", + "type": click.Path(), + "default": os.getcwd(), + }, + { + "names": ["-B", "--build-dir"], + "help": "Build directory.", + "type": click.Path(), + "default": None, + }, + { + "names": ["-n", "--no-warnings"], + "help": "Disable Cmake warnings.", + "is_flag": True, + "default": False, + }, + { + "names": ["-v", "--verbose"], + "help": "Verbose build output.", + "is_flag": True, + "is_eager": True, + "default": False, + "callback": verbose_callback + }, + { + "names": ["--ccache/--no-ccache"], + "help": ("Use ccache in build. Disabled by default, unless " + "IDF_CCACHE_ENABLE environment variable is set to a non-zero value."), + "is_flag": True, + "default": os.getenv("IDF_CCACHE_ENABLE") not in [None, "", "0"], + }, + { + "names": ["-G", "--generator"], + "help": "CMake generator.", + "type": click.Choice(GENERATOR_CMDS.keys()), + }, + { + "names": ["--dry-run"], + "help": "Only process arguments, but don't execute actions.", + "is_flag": True, + "hidden": True, + "default": False + }, + ], + "global_action_callbacks": [validate_root_options], + } + + build_actions = { + "actions": { + "all": { + "aliases": ["build"], + "callback": build_target, + "short_help": "Build the project.", + "help": ("Build the project. This can involve multiple steps:\n\n" + "1. Create the build directory if needed. " + "The sub-directory 'build' is used to hold build output, " + "although this can be changed with the -B option.\n\n" + "2. Run CMake as necessary to configure the project " + "and generate build files for the main build tool.\n\n" + "3. Run the main build tool (Ninja or GNU Make). " + "By default, the build tool is automatically detected " + "but it can be explicitly set by passing the -G option to idf.py.\n\n"), + "options": global_options, + "order_dependencies": [ + "reconfigure", + "menuconfig", + "clean", + "fullclean", + ], + }, + "menuconfig": { + "callback": build_target, + "help": 'Run "menuconfig" project configuration tool.', + "options": global_options, + }, + "confserver": { + "callback": build_target, + "help": "Run JSON configuration server.", + "options": global_options, + }, + "size": { + "callback": build_target, + "help": "Print basic size information about the app.", + "options": global_options, + "dependencies": ["app"], + }, + "size-components": { + "callback": build_target, + "help": "Print per-component size information.", + "options": global_options, + "dependencies": ["app"], + }, + "size-files": { + "callback": build_target, + "help": "Print per-source-file size information.", + "options": global_options, + "dependencies": ["app"], + }, + "bootloader": { + "callback": build_target, + "help": "Build only bootloader.", + "options": global_options, + }, + "app": { + "callback": build_target, + "help": "Build only the app.", + "order_dependencies": ["clean", "fullclean", "reconfigure"], + "options": global_options, + }, + "efuse_common_table": { + "callback": build_target, + "help": "Genereate C-source for IDF's eFuse fields.", + "order_dependencies": ["reconfigure"], + "options": global_options, + }, + "efuse_custom_table": { + "callback": build_target, + "help": "Genereate C-source for user's eFuse fields.", + "order_dependencies": ["reconfigure"], + "options": global_options, + }, + "show_efuse_table": { + "callback": build_target, + "help": "Print eFuse table.", + "order_dependencies": ["reconfigure"], + "options": global_options, + }, + "partition_table": { + "callback": build_target, + "help": "Build only partition table.", + "order_dependencies": ["reconfigure"], + "options": global_options, + }, + "erase_otadata": { + "callback": build_target, + "help": "Erase otadata partition.", + "options": global_options, + }, + "read_otadata": { + "callback": build_target, + "help": "Read otadata partition.", + "options": global_options, + }, + } + } + + clean_actions = { + "actions": { + "reconfigure": { + "callback": reconfigure, + "short_help": "Re-run CMake.", + "help": ("Re-run CMake even if it doesn't seem to need re-running. " + "This isn't necessary during normal usage, " + "but can be useful after adding/removing files from the source tree, " + "or when modifying CMake cache variables. " + "For example, \"idf.py -DNAME='VALUE' reconfigure\" " + 'can be used to set variable "NAME" in CMake cache to value "VALUE".'), + "options": global_options, + "order_dependencies": ["menuconfig", "fullclean"], + }, + "set-target": { + "callback": set_target, + "short_help": "Set the chip target to build.", + "help": ("Set the chip target to build. This will remove the " + "existing sdkconfig file and corresponding CMakeCache and " + "create new ones according to the new target.\nFor example, " + "\"idf.py set-target esp32\" will select esp32 as the new chip " + "target."), + "arguments": [ + { + "names": ["idf-target"], + "nargs": 1, + "type": click.Choice(SUPPORTED_TARGETS), + }, + ], + "dependencies": ["fullclean"], + }, + "clean": { + "callback": clean, + "short_help": "Delete build output files from the build directory.", + "help": ("Delete build output files from the build directory, " + "forcing a 'full rebuild' the next time " + "the project is built. Cleaning doesn't delete " + "CMake configuration output and some other files"), + "order_dependencies": ["fullclean"], + }, + "fullclean": { + "callback": fullclean, + "short_help": "Delete the entire build directory contents.", + "help": ("Delete the entire build directory contents. " + "This includes all CMake configuration output." + "The next time the project is built, " + "CMake will configure it from scratch. " + "Note that this option recursively deletes all files " + "in the build directory, so use with care." + "Project configuration is not deleted.") + }, + } + } + + return merge_action_lists(root_options, build_actions, clean_actions) diff --git a/tools/idf_py_actions/idf_02_serial_actions.py b/tools/idf_py_actions/idf_02_serial_actions.py new file mode 100644 index 000000000..76d80a1af --- /dev/null +++ b/tools/idf_py_actions/idf_02_serial_actions.py @@ -0,0 +1,209 @@ +import json +import os +import sys + +from idf_py_actions.errors import FatalError +from idf_py_actions.global_options import global_options +from idf_py_actions.tools import ensure_build_directory, run_tool + +PYTHON = sys.executable + + +def action_extensions(base_actions, project_path): + def _get_default_serial_port(): + """ Return a default serial port. esptool can do this (smarter), but it can create + inconsistencies where esptool.py uses one port and idf_monitor uses another. + + Same logic as esptool.py search order, reverse sort by name and choose the first port. + """ + # Import is done here in order to move it after the check_environment() ensured that pyserial has been installed + import serial.tools.list_ports + + ports = list(reversed(sorted(p.device for p in serial.tools.list_ports.comports()))) + try: + print("Choosing default port %s (use '-p PORT' option to set a specific serial port)" % + ports[0].encode("ascii", "ignore")) + return ports[0] + except IndexError: + raise RuntimeError( + "No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.") + + def _get_esptool_args(args): + esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py") + if args.port is None: + args.port = _get_default_serial_port() + result = [PYTHON, esptool_path] + result += ["-p", args.port] + result += ["-b", str(args.baud)] + + with open(os.path.join(args.build_dir, "flasher_args.json")) as f: + flasher_args = json.load(f) + + extra_esptool_args = flasher_args["extra_esptool_args"] + result += ["--after", extra_esptool_args["after"]] + return result + + def _get_commandline_options(ctx): + """ Return all the command line options up to first action """ + # This approach ignores argument parsing done Click + result = [] + + for arg in sys.argv: + if arg in ctx.command.commands_with_aliases: + break + + result.append(arg) + + return result + + def monitor(action, ctx, args, print_filter): + """ + Run idf_monitor.py to watch build output + """ + if args.port is None: + args.port = _get_default_serial_port() + desc_path = os.path.join(args.build_dir, "project_description.json") + if not os.path.exists(desc_path): + ensure_build_directory(args, ctx.info_name) + with open(desc_path, "r") as f: + project_desc = json.load(f) + + elf_file = os.path.join(args.build_dir, project_desc["app_elf"]) + if not os.path.exists(elf_file): + raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', " + "and the binary on the device must match the one in the build directory exactly. " + "Try '%s flash monitor'." % (elf_file, ctx.info_name)) + idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py") + monitor_args = [PYTHON, idf_monitor] + if args.port is not None: + monitor_args += ["-p", args.port] + monitor_args += ["-b", project_desc["monitor_baud"]] + monitor_args += ["--toolchain-prefix", project_desc["monitor_toolprefix"]] + + if print_filter is not None: + monitor_args += ["--print_filter", print_filter] + monitor_args += [elf_file] + + idf_py = [PYTHON] + _get_commandline_options(ctx) # commands to re-run idf.py + monitor_args += ["-m", " ".join("'%s'" % a for a in idf_py)] + + if "MSYSTEM" in os.environ: + monitor_args = ["winpty"] + monitor_args + run_tool("idf_monitor", monitor_args, args.project_dir) + + def flash(action, ctx, args): + """ + Run esptool to flash the entire project, from an argfile generated by the build system + """ + flasher_args_path = { + # action -> name of flasher args file generated by build system + "bootloader-flash": "flash_bootloader_args", + "partition_table-flash": "flash_partition_table_args", + "app-flash": "flash_app_args", + "flash": "flash_project_args", + "encrypted-app-flash": "flash_encrypted_app_args", + "encrypted-flash": "flash_encrypted_project_args", + }[action] + esptool_args = _get_esptool_args(args) + esptool_args += ["write_flash", "@" + flasher_args_path] + run_tool("esptool.py", esptool_args, args.build_dir) + + def erase_flash(action, ctx, args): + esptool_args = _get_esptool_args(args) + esptool_args += ["erase_flash"] + run_tool("esptool.py", esptool_args, args.build_dir) + + baud_rate = { + "names": ["-b", "--baud"], + "help": "Baud rate.", + "scope": "global", + "envvar": "ESPBAUD", + "default": 460800, + } + + port = { + "names": ["-p", "--port"], + "help": "Serial port.", + "scope": "global", + "envvar": "ESPPORT", + "default": None, + } + + serial_actions = { + "actions": { + "flash": { + "callback": flash, + "help": "Flash the project.", + "options": global_options + [baud_rate, port], + "dependencies": ["all"], + "order_dependencies": ["erase_flash"], + }, + "erase_flash": { + "callback": erase_flash, + "help": "Erase entire flash chip.", + "options": [baud_rate, port], + }, + "monitor": { + "callback": monitor, + "help": "Display serial output.", + "options": [ + port, + { + "names": ["--print-filter", "--print_filter"], + "help": ( + "Filter monitor output.\n" + "Restrictions on what to print can be specified as a series of : items " + "where is the tag string and is a character from the set " + "{N, E, W, I, D, V, *} referring to a level. " + 'For example, "tag1:W" matches and prints only the outputs written with ' + 'ESP_LOGW("tag1", ...) or at lower verbosity level, i.e. ESP_LOGE("tag1", ...). ' + 'Not specifying a or using "*" defaults to Verbose level.\n' + 'Please see the IDF Monitor section of the ESP-IDF documentation ' + 'for a more detailed description and further examples.'), + "default": None, + }, + ], + "order_dependencies": [ + "flash", + "partition_table-flash", + "bootloader-flash", + "app-flash", + ], + }, + "partition_table-flash": { + "callback": flash, + "help": "Flash partition table only.", + "options": [baud_rate, port], + "dependencies": ["partition_table"], + "order_dependencies": ["erase_flash"], + }, + "bootloader-flash": { + "callback": flash, + "help": "Flash bootloader only.", + "options": [baud_rate, port], + "dependencies": ["bootloader"], + "order_dependencies": ["erase_flash"], + }, + "app-flash": { + "callback": flash, + "help": "Flash the app only.", + "options": [baud_rate, port], + "dependencies": ["app"], + "order_dependencies": ["erase_flash"], + }, + "encrypted-app-flash": { + "callback": flash, + "help": "Flash the encrypted app only.", + "dependencies": ["app"], + "order_dependencies": ["erase_flash"], + }, + "encrypted-flash": { + "callback": flash, + "help": "Flash the encrypted project.", + "dependencies": ["all"], + "order_dependencies": ["erase_flash"], + }, + }, + } + + return serial_actions diff --git a/tools/idf_py_actions/tools.py b/tools/idf_py_actions/tools.py new file mode 100644 index 000000000..5ec6d8b97 --- /dev/null +++ b/tools/idf_py_actions/tools.py @@ -0,0 +1,221 @@ +import os +import re +import subprocess +import sys + +from .contstants import GENERATORS +from .errors import FatalError + + +def executable_exists(args): + try: + subprocess.check_output(args) + return True + + except Exception: + return False + + +def realpath(path): + """ + Return the cannonical path with normalized case. + + It is useful on Windows to comparision paths in case-insensitive manner. + On Unix and Mac OS X it works as `os.path.realpath()` only. + """ + return os.path.normcase(os.path.realpath(path)) + + +def _idf_version_from_cmake(): + version_path = os.path.join(os.environ["IDF_PATH"], "tools/cmake/version.cmake") + regex = re.compile(r"^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)") + ver = {} + try: + with open(version_path) as f: + for line in f: + m = regex.match(line) + + if m: + ver[m.group(1)] = m.group(2) + + return "v%s.%s.%s" % (ver["MAJOR"], ver["MINOR"], ver["PATCH"]) + except (KeyError, OSError): + sys.stderr.write("WARNING: Cannot find ESP-IDF version in version.cmake\n") + return None + + +def idf_version(): + """Print version of ESP-IDF""" + + # Try to get version from git: + try: + version = subprocess.check_output([ + "git", + "--git-dir=%s" % os.path.join(os.environ["IDF_PATH"], '.git'), + "--work-tree=%s" % os.environ["IDF_PATH"], "describe", "--tags", "--dirty" + ]).decode('utf-8', 'ignore').strip() + except (subprocess.CalledProcessError, UnicodeError): + # if failed, then try to parse cmake.version file + sys.stderr.write("WARNING: Git version unavailable, reading from source\n") + version = _idf_version_from_cmake() + + return version + + +def run_tool(tool_name, args, cwd): + def quote_arg(arg): + " Quote 'arg' if necessary " + if " " in arg and not (arg.startswith('"') or arg.startswith("'")): + return "'" + arg + "'" + return arg + + display_args = " ".join(quote_arg(arg) for arg in args) + print("Running %s in directory %s" % (tool_name, quote_arg(cwd))) + print('Executing "%s"...' % str(display_args)) + try: + # Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup + subprocess.check_call(args, env=os.environ, cwd=cwd) + except subprocess.CalledProcessError as e: + raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode)) + + +def _strip_quotes(value, regexp=re.compile(r"^\"(.*)\"$|^'(.*)'$|^(.*)$")): + """ + Strip quotes like CMake does during parsing cache entries + """ + + return [x for x in regexp.match(value).groups() if x is not None][0].rstrip() + + +def _parse_cmakecache(path): + """ + Parse the CMakeCache file at 'path'. + + Returns a dict of name:value. + + CMakeCache entries also each have a "type", but this is currently ignored. + """ + result = {} + with open(path) as f: + for line in f: + # cmake cache lines look like: CMAKE_CXX_FLAGS_DEBUG:STRING=-g + # groups are name, type, value + m = re.match(r"^([^#/:=]+):([^:=]+)=(.*)\n$", line) + if m: + result[m.group(1)] = m.group(3) + return result + + +def _new_cmakecache_entries(cache_path, new_cache_entries): + if not os.path.exists(cache_path): + return True + + if new_cache_entries: + current_cache = _parse_cmakecache(cache_path) + + for entry in new_cache_entries: + key, value = entry.split("=", 1) + current_value = current_cache.get(key, None) + if current_value is None or _strip_quotes(value) != current_value: + return True + + return False + + +def _detect_cmake_generator(prog_name): + """ + Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found. + """ + for (generator, _, version_check, _) in GENERATORS: + if executable_exists(version_check): + return generator + raise FatalError("To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH" % prog_name) + + +def ensure_build_directory(args, prog_name, always_run_cmake=False): + """Check the build directory exists and that cmake has been run there. + + If this isn't the case, create the build directory (if necessary) and + do an initial cmake run to configure it. + + This function will also check args.generator parameter. If the parameter is incompatible with + the build directory, an error is raised. If the parameter is None, this function will set it to + an auto-detected default generator or to the value already configured in the build directory. + """ + project_dir = args.project_dir + # Verify the project directory + if not os.path.isdir(project_dir): + if not os.path.exists(project_dir): + raise FatalError("Project directory %s does not exist" % project_dir) + else: + raise FatalError("%s must be a project directory" % project_dir) + if not os.path.exists(os.path.join(project_dir, "CMakeLists.txt")): + raise FatalError("CMakeLists.txt not found in project directory %s" % project_dir) + + # Verify/create the build directory + build_dir = args.build_dir + if not os.path.isdir(build_dir): + os.makedirs(build_dir) + cache_path = os.path.join(build_dir, "CMakeCache.txt") + + args.define_cache_entry.append("CCACHE_ENABLE=%d" % args.ccache) + + if always_run_cmake or _new_cmakecache_entries(cache_path, args.define_cache_entry): + if args.generator is None: + args.generator = _detect_cmake_generator(prog_name) + try: + cmake_args = [ + "cmake", + "-G", + args.generator, + "-DPYTHON_DEPS_CHECKED=1", + "-DESP_PLATFORM=1", + ] + if not args.no_warnings: + cmake_args += ["--warn-uninitialized"] + + if args.define_cache_entry: + cmake_args += ["-D" + d for d in args.define_cache_entry] + cmake_args += [project_dir] + + run_tool("cmake", cmake_args, cwd=args.build_dir) + except Exception: + # don't allow partially valid CMakeCache.txt files, + # to keep the "should I run cmake?" logic simple + if os.path.exists(cache_path): + os.remove(cache_path) + raise + + # Learn some things from the CMakeCache.txt file in the build directory + cache = _parse_cmakecache(cache_path) + try: + generator = cache["CMAKE_GENERATOR"] + except KeyError: + generator = _detect_cmake_generator(prog_name) + if args.generator is None: + args.generator = (generator) # reuse the previously configured generator, if none was given + if generator != args.generator: + raise FatalError("Build is configured for generator '%s' not '%s'. Run '%s fullclean' to start again." % + (generator, args.generator, prog_name)) + + try: + home_dir = cache["CMAKE_HOME_DIRECTORY"] + if realpath(home_dir) != realpath(project_dir): + raise FatalError( + "Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again." % + (build_dir, realpath(home_dir), realpath(project_dir), prog_name)) + except KeyError: + pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet + + +def merge_action_lists(*action_lists): + merged_actions = { + "global_options": [], + "actions": {}, + "global_action_callbacks": [], + } + for action_list in action_lists: + merged_actions["global_options"].extend(action_list.get("global_options", [])) + merged_actions["actions"].update(action_list.get("actions", {})) + merged_actions["global_action_callbacks"].extend(action_list.get("global_action_callbacks", [])) + return merged_actions diff --git a/tools/test_idf_py/extra_path/idf_some_module.py b/tools/test_idf_py/extra_path/idf_some_module.py new file mode 100644 index 000000000..fad438470 --- /dev/null +++ b/tools/test_idf_py/extra_path/idf_some_module.py @@ -0,0 +1,21 @@ +def action_extensions(base_actions, project_path): + def some_callback(ut_apply_config_name, ctx, args): + print("!!! From some subcommand") + + def some_global_callback(ctx, global_args, tasks): + print("!!! From some global callback: %s" % global_args.some_extension_option) + + return { + "global_options": [{ + "names": ["--some-extension-option"], + "help": "Help for option --some-extension-option", + "default": "test", + }], + "global_action_callbacks": [some_global_callback], + "actions": { + "extra_subcommand": { + "callback": some_callback, + "help": "Help for some subcommand.", + }, + }, + } diff --git a/tools/test_idf_py/test_idf_extensions/idf_test_extension/__init__.py b/tools/test_idf_py/test_idf_extensions/idf_test_extension/__init__.py new file mode 100644 index 000000000..2f7b7f1ac --- /dev/null +++ b/tools/test_idf_py/test_idf_extensions/idf_test_extension/__init__.py @@ -0,0 +1 @@ +from .test_extension import action_extensions # noqa: F401 diff --git a/tools/test_idf_py/test_idf_extensions/idf_test_extension/test_extension.py b/tools/test_idf_py/test_idf_extensions/idf_test_extension/test_extension.py new file mode 100644 index 000000000..089ee8623 --- /dev/null +++ b/tools/test_idf_py/test_idf_extensions/idf_test_extension/test_extension.py @@ -0,0 +1,24 @@ +import os + + +def action_extensions(base_actions, project_path=os.getcwd()): + def test_callback(ut_apply_config_name, ctx, args): + print("!!! From test_subcommand") + + def test_global_callback(ctx, global_args, tasks): + print("!!! From test global callback: %s" % global_args.test_extension_option) + + return { + "global_options": [{ + "names": ["--test-extension-option"], + "help": "Help for option --test-extension-option", + "default": "test", + }], + "global_action_callbacks": [test_global_callback], + "actions": { + "test_subcommand": { + "callback": test_callback, + "help": "Help for test subcommand.", + }, + }, + } diff --git a/tools/test_idf_py/test_idf_py.py b/tools/test_idf_py/test_idf_py.py index 1a9f8a073..672b76ac6 100755 --- a/tools/test_idf_py/test_idf_py.py +++ b/tools/test_idf_py/test_idf_py.py @@ -14,8 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import sys import unittest +import subprocess try: from StringIO import StringIO @@ -28,32 +30,67 @@ except ImportError: sys.path.append('..') import idf +current_dir = os.path.dirname(os.path.realpath(__file__)) +idf_py_path = os.path.join(current_dir, '..', 'idf.py') +extension_path = os.path.join(current_dir, 'test_idf_extensions', 'idf_test_extension') +link_path = os.path.join(current_dir, '..', 'idf_py_actions', 'idf_test_extension') + + +class TestExtensions(unittest.TestCase): + def test_extension_loading(self): + try: + os.symlink(extension_path, link_path) + os.environ["IDF_EXTRA_ACTIONS_PATH"] = os.path.join(current_dir, 'extra_path') + output = subprocess.check_output([sys.executable, idf_py_path, "--help"], + env=os.environ).decode('utf-8', 'ignore') + + self.assertIn('--test-extension-option', output) + self.assertIn('test_subcommand', output) + self.assertIn('--some-extension-option', output) + self.assertIn('extra_subcommand', output) + finally: + os.remove(link_path) + + def test_extension_execution(self): + try: + os.symlink(extension_path, link_path) + os.environ["IDF_EXTRA_ACTIONS_PATH"] = ";".join([os.path.join(current_dir, 'extra_path')]) + output = subprocess.check_output( + [sys.executable, idf_py_path, "--some-extension-option=awesome", 'test_subcommand', "extra_subcommand"], + env=os.environ).decode('utf-8', 'ignore') + self.assertIn('!!! From some global callback: awesome', output) + self.assertIn('!!! From some subcommand', output) + self.assertIn('!!! From test global callback: test', output) + self.assertIn('!!! From some subcommand', output) + finally: + os.remove(link_path) + class TestDependencyManagement(unittest.TestCase): def test_dependencies(self): result = idf.init_cli()( - args=['--no-run', 'flash'], + args=['--dry-run', 'flash'], standalone_mode=False, ) self.assertEqual(['all', 'flash'], list(result.keys())) def test_order_only_dependencies(self): result = idf.init_cli()( - args=['--no-run', 'build', 'fullclean', 'all'], + args=['--dry-run', 'build', 'fullclean', 'all'], standalone_mode=False, ) self.assertEqual(['fullclean', 'all'], list(result.keys())) def test_repeated_dependencies(self): result = idf.init_cli()( - args=['--no-run', 'fullclean', 'app', 'fullclean', 'fullclean'], + args=['--dry-run', 'fullclean', 'app', 'fullclean', 'fullclean'], standalone_mode=False, ) self.assertEqual(['fullclean', 'app'], list(result.keys())) def test_complex_case(self): result = idf.init_cli()( - args=['--no-run', 'clean', 'monitor', 'clean', 'fullclean', 'flash'], + args=['--dry-run', 'clean', 'monitor', 'clean', 'fullclean', 'flash'], standalone_mode=False, ) self.assertEqual(['fullclean', 'clean', 'all', 'flash', 'monitor'], list(result.keys())) @@ -62,7 +99,7 @@ class TestDependencyManagement(unittest.TestCase): capturedOutput = StringIO() sys.stdout = capturedOutput idf.init_cli()( - args=['--no-run', 'clean', 'monitor', 'build', 'clean', 'fullclean', 'all'], + args=['--dry-run', 'clean', 'monitor', 'build', 'clean', 'fullclean', 'all'], standalone_mode=False, ) sys.stdout = sys.__stdout__ @@ -71,7 +108,7 @@ class TestDependencyManagement(unittest.TestCase): sys.stdout = capturedOutput idf.init_cli()( - args=['--no-run', 'clean', 'clean'], + args=['--dry-run', 'clean', 'clean'], standalone_mode=False, ) sys.stdout = sys.__stdout__ @@ -84,7 +121,7 @@ class TestGlobalAndSubcommandParameters(unittest.TestCase): """Can set -D twice: globally and for subcommand if values are the same""" idf.init_cli()( - args=['--no-run', '-DAAA=BBB', '-DCCC=EEE', 'build', '-DAAA=BBB', '-DCCC=EEE'], + args=['--dry-run', '-DAAA=BBB', '-DCCC=EEE', 'build', '-DAAA=BBB', '-DCCC=EEE'], standalone_mode=False, ) @@ -93,7 +130,7 @@ class TestGlobalAndSubcommandParameters(unittest.TestCase): with self.assertRaises(idf.FatalError): idf.init_cli()( - args=['--no-run', '-DAAA=BBB', 'build', '-DAAA=EEE', '-DCCC=EEE'], + args=['--dry-run', '-DAAA=BBB', 'build', '-DAAA=EEE', '-DCCC=EEE'], standalone_mode=False, )