Merge branch 'test/assign_ut_cases_according_to_sdkconfig' into 'master'

CI: assign ut cases according to sdkconfig

See merge request idf/esp-idf!2407
This commit is contained in:
Ivan Grokhotkov 2018-05-24 19:28:03 +08:00
commit 1c2dcd78c8
9 changed files with 265 additions and 204 deletions

View file

@ -505,23 +505,16 @@ assign_test:
- build_ssc_02
- build_esp_idf_tests
variables:
UT_BIN_PATH: "tools/unit-test-app/output"
OUTPUT_BIN_PATH: "test_bins/ESP32_IDF"
TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw"
EXAMPLE_CONFIG_OUTPUT_PATH: "$CI_PROJECT_DIR/examples/test_configs"
artifacts:
paths:
- $OUTPUT_BIN_PATH
- components/idf_test/*/CIConfigs
- components/idf_test/*/TC.sqlite
- $EXAMPLE_CONFIG_OUTPUT_PATH
- tools/unit-test-app/output
expire_in: 1 mos
before_script: *add_gitlab_key_before
script:
# first move test bins together: test_bins/CHIP_SDK/TestApp/bin_files
- mkdir -p $OUTPUT_BIN_PATH
- cp -r SSC/ssc_bin/* $OUTPUT_BIN_PATH
# assign example tests
- python $TEST_FW_PATH/CIAssignExampleTest.py $IDF_PATH/examples $IDF_PATH/.gitlab-ci.yml $EXAMPLE_CONFIG_OUTPUT_PATH
# assign unit test cases
@ -572,6 +565,7 @@ assign_test:
stage: unit_test
dependencies:
- assign_test
- build_esp_idf_tests
variables:
TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw"
TEST_CASE_PATH: "$CI_PROJECT_DIR/tools/unit-test-app"
@ -662,287 +656,250 @@ UT_001_01:
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_02:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_03:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_04:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_05:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SDMODE
- UT_default
- UT_T1_1
UT_001_06:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SPIMODE
- UT_default
- UT_T1_1
UT_001_07:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_08:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_09:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_10:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_default
UT_001_11:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_12:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_13:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_14:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_15:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_16:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_17:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_18:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_19:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_20:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_21:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_22:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_23:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_001_24:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_002_01:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
- UT_T1_SDMODE
UT_002_02:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
- UT_T1_SDMODE
UT_002_03:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
UT_002_04:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
UT_002_05:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SDMODE
- UT_release
UT_002_06:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SPIMODE
- UT_release
UT_002_07:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
UT_002_08:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
UT_002_09:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
UT_002_10:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_release
UT_003_01:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
- UT_T1_SPIMODE
UT_003_02:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
- UT_T1_SPIMODE
UT_003_03:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
UT_003_04:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
UT_003_05:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SDMODE
- UT_single_core
UT_003_06:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SPIMODE
- UT_single_core
UT_003_07:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
UT_003_08:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
UT_003_09:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
UT_003_10:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_single_core
UT_004_01:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- psram
UT_004_02:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- psram
UT_004_03:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- psram
UT_004_04:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- psram
UT_004_05:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SDMODE
- UT_psram
- UT_T1_1
- psram
UT_004_06:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_SPIMODE
- UT_psram
- UT_T1_1
- psram
UT_004_07:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- psram
UT_004_08:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- psram
UT_004_09:
UT_005_01:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- UT_T1_SDMODE
- psram
UT_004_10:
UT_005_02:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
UT_004_11:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- UT_psram
- UT_T1_SPIMODE
- psram
IT_001_01:
<<: *test_template

View file

@ -26,7 +26,11 @@ test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path:
sys.path.insert(0, test_fw_path)
from Utility.CIAssignTest import AssignTest
from Utility.CIAssignTest import AssignTest, Group
class ExampleGroup(Group):
SORT_KEYS = CI_JOB_MATCH_KEYS = ["env_tag", "chip"]
class CIExampleAssignTest(AssignTest):
@ -43,6 +47,6 @@ if __name__ == '__main__':
help="output path of config files")
args = parser.parse_args()
assign_test = CIExampleAssignTest(args.test_case, args.ci_config_file)
assign_test = CIExampleAssignTest(args.test_case, args.ci_config_file, case_group=ExampleGroup)
assign_test.assign_cases()
assign_test.output_configs(args.output_path)

View file

@ -17,12 +17,18 @@ from Utility import CIAssignTest
class Group(CIAssignTest.Group):
SORT_KEYS = ["Test App", "SDK", "test environment", "multi_device", "multi_stage"]
CI_JOB_MATCH_KEYS = ["Test App", "SDK", "test environment"]
SORT_KEYS = ["config", "SDK", "test environment", "multi_device", "multi_stage", "tags"]
MAX_CASE = 30
ATTR_CONVERT_TABLE = {
"execution_time": "execution time"
}
# when IDF support multiple chips, SDK will be moved into tags, we can remove it
CI_JOB_MATCH_KEYS = ["test environment", "SDK"]
def __init__(self, case):
super(Group, self).__init__(case)
for tag in self._get_case_attr(case, "tags"):
self.ci_job_match_keys.add(tag)
@staticmethod
def _get_case_attr(case, attr):
@ -30,13 +36,6 @@ class Group(CIAssignTest.Group):
attr = Group.ATTR_CONVERT_TABLE[attr]
return case[attr]
@staticmethod
def _get_ut_config(test_app):
# we format test app "UT_ + config" when parsing test cases
# now we need to extract config
assert test_app[:3] == "UT_"
return test_app[3:]
def _create_extra_data(self, test_function):
"""
For unit test case, we need to copy some attributes of test cases into config file.
@ -45,7 +44,7 @@ class Group(CIAssignTest.Group):
case_data = []
for case in self.case_list:
one_case_data = {
"config": self._get_ut_config(self._get_case_attr(case, "Test App")),
"config": self._get_case_attr(case, "config"),
"name": self._get_case_attr(case, "summary"),
"reset": self._get_case_attr(case, "reset"),
"timeout": self._get_case_attr(case, "timeout"),
@ -101,8 +100,7 @@ class UnitTestAssignTest(CIAssignTest.AssignTest):
def __init__(self, test_case_path, ci_config_file):
CIAssignTest.AssignTest.__init__(self, test_case_path, ci_config_file, case_group=Group)
@staticmethod
def _search_cases(test_case_path, case_filter=None):
def _search_cases(self, test_case_path, case_filter=None):
"""
For unit test case, we don't search for test functions.
The unit test cases is stored in a yaml file which is created in job build-idf-test.

View file

@ -45,7 +45,7 @@ import json
import yaml
from Utility import (CaseConfig, SearchCases, GitlabCIJob)
from Utility import (CaseConfig, SearchCases, GitlabCIJob, console_log)
class Group(object):
@ -63,8 +63,8 @@ class Group(object):
self.execution_time = 0
self.case_list = [case]
self.filters = dict(zip(self.SORT_KEYS, [self._get_case_attr(case, x) for x in self.SORT_KEYS]))
self.ci_job_match_keys = dict(zip(self.CI_JOB_MATCH_KEYS,
[self._get_case_attr(case, x) for x in self.CI_JOB_MATCH_KEYS]))
# we use ci_job_match_keys to match CI job tags. It's a set of required tags.
self.ci_job_match_keys = set([self._get_case_attr(case, x) for x in self.CI_JOB_MATCH_KEYS])
@staticmethod
def _get_case_attr(case, attr):
@ -206,7 +206,11 @@ class AssignTest(object):
break
else:
failed_to_assign.append(group)
assert not failed_to_assign
if failed_to_assign:
console_log("Please add the following jobs to .gitlab-ci.yml with specific tags:", "R")
for group in failed_to_assign:
console_log("* Add job with: " + ",".join(group.ci_job_match_keys), "R")
raise RuntimeError("Failed to assign test case to CI jobs")
def output_configs(self, output_path):
"""

View file

@ -27,6 +27,7 @@ class Job(dict):
def __init__(self, job, job_name):
super(Job, self).__init__(job)
self["name"] = job_name
self.tags = set(self["tags"])
def match_group(self, group):
"""
@ -37,17 +38,8 @@ class Job(dict):
:return: True or False
"""
match_result = False
for _ in range(1):
if "case group" in self:
# this job is already assigned
break
for value in group.ci_job_match_keys.values():
if value not in self["tags"]:
break
else:
continue
break
else:
if "case group" not in self and group.ci_job_match_keys == self.tags:
# group not assigned and all tags match
match_result = True
return match_result

View file

@ -10,6 +10,7 @@ ESP-IDF unit tests are run using Unit Test App. The app can be built with the un
* `make menuconfig` to configure the Unit Test App.
* `make TEST_COMPONENTS=` with `TEST_COMPONENTS` set to names of the components to be included in the test app. Or `make TESTS_ALL=1` to build the test app with all the tests for components having `test` subdirectory.
* Follow the printed instructions to flash, or run `make flash`.
* Unit test have a few preset sdkconfigs. It provides command `make ut-clean-config_name` and `make ut-build-config_name` (where `config_name` is the file name under `unit-test-app/configs` folder) to build with preset configs. For example, you can use `make ut-build-default TESTS_ALL=1` to build with config file `unit-test-app/configs/default`. Built binary for this config will be copied to `unit-test-app/output/config_name` folder.
# Running Unit Tests
@ -20,3 +21,85 @@ The unit test loader will prompt by showing a menu of available tests to run:
* `[tagname]` to run tests with "tag"
* `![tagname]` to run tests without "tag" (`![ignore]` is very useful as it runs all CI-enabled tests.)
* `"test name here"` to run test with given name
# Testing Unit Tests with CI
## CI Test Flow for Unit Test
Unit test uses 3 stages in CI: `build`, `assign_test`, `unit_test`.
### Build Stage:
`build_esp_idf_tests` job will build all UT configs and parse test cases form built elf files. Built binary (`tools/unit-test-app/output`) and parsed cases (`components/idf_test/unit_test/TestCaseAll.yml`) will be saved as artifacts.
When we add new test case, it will construct a structure to save case data during build. We'll parse the test case from this structure. The description (defined in test case: `TEST_CASE("name", "description")`) is used to extend test case definition. The format of test description is a list of tags:
1. first tag is always group of test cases, it's mandatory
2. the rest tags should be [type=value]. Tags could have default value and omitted value. For example, reset tag default value is "POWERON_RESET", omitted value is "" (do not reset) :
* "[reset]" equal to [reset=POWERON_RESET]
* if reset tag doesn't exist, then it equals to [reset=""]
`TagDefinition.yml` defines how we should parse the description. In `TagDefinition.yml`, we declare the tags we are interested in, their default value and omitted value. Parser will parse the properities of test cases according to this file, and add them as test case attributes.
We will build unit-test-app with different sdkconfigs. Some config items requires specific board to run. For example, if `CONFIG_SPIRAM_SUPPORT` is enabled, then unit test app must run on board supports PSRAM. `ConfigDependency.yml` is used to define the mapping between sdkconfig items and tags. The tags will be saved as case attributes, used to select jobs and runners. In the previous example, `psram` tag is generated, will only select jobs and runners also contains `psram` tag.
### Assign Test Stage:
`assign_test` job will try to assign all cases to test jobs defined in `.gitlab-ci.yml`, according to test environment and tags. For each job, one config file with same name of test job will be generated in `components/idf_test/unit_test/CIConfigs/`(this folder will be passed to test jobs as artifacts). These config files will tell test jobs which cases it need to run, and pass some extra configs (like if the case will reset) of test case to runner.
Please check related document in tiny-test-fw for details.
### Unit Test Stage:
All jobs in `unit_test` stage will run job according to unit test configs. Then unit test jobs will use tiny-test-fw runner to run the test cases. The test logs will be saved as artifacts.
Unit test jobs will do reset before running each case (because some cases do not cleanup when failed). This makes test cases independent with each other during execution.
## Handle Unit Test CI Issues
### 1. Assign Test Failures
Gitlab CI do not support create jobs at runtime. We must maunally add all jobs to CI config file. To make test running in parallel, we limit the number of cases running on each job. When add new unit test cases, it could exceed the limitation that current unit test jobs support. In this case, assign test job will raise error, remind you to add jobs to `.gitlab-ci.yml`.
```
Please add the following jobs to .gitlab-ci.yml with specific tags:
* Add job with: UT_T1_1, ESP32_IDF, psram
* Add job with: UT_T1_1, ESP32_IDF
```
The above is an example of error message in assign test job. In this case, please add the following jobs in `.gitlab-ci.yml`:
```
UT_001_25:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
UT_004_09:
<<: *unit_test_template
tags:
- ESP32_IDF
- UT_T1_1
- psram
```
The naming rule of jobs are `UT` + `job type index` + `job index`. Each combination of tags is a different job type.
### 2. Debugging Failed Cases
First you can check the logs. It's saved as unit test job artifacts. You can download from the test job page.
If you want to reproduce locally, you need to:
1. Download artifacts of `build_esp_idf_tests`. The built binary is in `tools/unit-test-app/output` folder.
* Built binary in CI could be slightly different from locally built binary with the same revision, some cases might only fails with CI built binary.
2. Check the following print in CI job to get the config name: `Running unit test for config: config_name`. Then flash the binary of this config to your board.
3. Run the failed case on your board (refer to Running Unit Tests section).
* There're some special UT cases (multiple stages case, multiple devices cases) which requires user interaction:
* You can refer to [unit test document](https://esp-idf.readthedocs.io/en/latest/api-guides/unit-tests.html#running-unit-tests) to run test manually.
* Or, you can use `tools/unit-test-app/unit_test.py` to run the test cases:
* read document of tiny-test-fw, set correct `TEST_FW_PATH` and `IDF_PATH`
* modify `unit_test.py`, pass the test cases need to test as parameter (refer to test function doc string for supported parameter format) to test functions.
* use `python unit_test.py` to run test
* You can also use `tools/tiny-test-fw/Runner.py` to run test cases (it will be the same as what Runner do). Please use `python Runner.py -c $CONFIG_FILE $IDF_PATH/tools/unit-test-app` command, where `CONFIG_FILE` is a YAML file with same name with CI job in `components/idf_test/unit_test/CIConfigs` (artifacts, need to be download from `assign_test` job).

View file

@ -0,0 +1 @@
"psram": "CONFIG_SPIRAM_SUPPORT=y"

View file

@ -20,13 +20,8 @@ TEST_CASE_PATTERN = {
"test environment": "UT_T1_1",
"reset": "",
"expected result": "1. set succeed",
"cmd set": "test_unit_test_case"
}
CONFIG_FILE_PATTERN = {
"Config": {"execute count": 1, "execute order": "in order"},
"DUT": [],
"Filter": [{"Add": {"ID": []}}]
"cmd set": "test_unit_test_case",
"Test App": "UT",
}
@ -39,11 +34,12 @@ class Parser(object):
# file path (relative to idf path)
TAG_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "TagDefinition.yml")
MODULE_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "ModuleDefinition.yml")
CONFIG_DEPENDENCY_FILE = os.path.join("tools", "unit-test-app", "tools", "ConfigDependency.yml")
MODULE_ARTIFACT_FILE = os.path.join("components", "idf_test", "ModuleDefinition.yml")
TEST_CASE_FILE = os.path.join("components", "idf_test", "unit_test", "TestCaseAll.yml")
UT_BIN_FOLDER = os.path.join("tools", "unit-test-app", "builds")
UT_BIN_FOLDER = os.path.join("tools", "unit-test-app", "output")
ELF_FILE = "unit-test-app.elf"
APP_NAME_PREFIX = "UT_"
SDKCONFIG_FILE = "sdkconfig"
def __init__(self, idf_path=os.getenv("IDF_PATH")):
self.test_env_tags = {}
@ -52,21 +48,24 @@ class Parser(object):
self.idf_path = idf_path
self.tag_def = yaml.load(open(os.path.join(idf_path, self.TAG_DEF_FILE), "r"))
self.module_map = yaml.load(open(os.path.join(idf_path, self.MODULE_DEF_FILE), "r"))
self.config_dependency = yaml.load(open(os.path.join(idf_path, self.CONFIG_DEPENDENCY_FILE), "r"))
# used to check if duplicated test case names
self.test_case_names = set()
self.parsing_errors = []
def parse_test_cases_from_elf(self, elf_file, app_name):
def parse_test_cases_for_one_config(self, config_output_folder, config_name):
"""
parse test cases from elf and save test cases need to be executed to unit test folder
:param elf_file: elf file path
:param app_name: built unit test app name
:param config_output_folder: build folder of this config
:param config_name: built unit test config name
"""
elf_file = os.path.join(config_output_folder, self.ELF_FILE)
subprocess.check_output('xtensa-esp32-elf-objdump -t {} | grep test_desc > case_address.tmp'.format(elf_file),
shell=True)
subprocess.check_output('xtensa-esp32-elf-objdump -s {} > section_table.tmp'.format(elf_file), shell=True)
table = CreateSectionTable.SectionTable("section_table.tmp")
tags = self.parse_tags(os.path.join(config_output_folder, self.SDKCONFIG_FILE))
test_cases = []
with open("case_address.tmp", "r") as f:
for line in f:
@ -82,17 +81,17 @@ class Parser(object):
name = table.get_string("any", name_addr)
desc = table.get_string("any", desc_addr)
file_name = table.get_string("any", file_name_addr)
tc = self.parse_one_test_case(name, desc, file_name, app_name)
tc = self.parse_one_test_case(name, desc, file_name, config_name, tags)
# check if duplicated case names
# we need to use it to select case,
# if duplicated IDs, Unity could select incorrect case to run
# and we need to check all cases no matter if it's going te be executed by CI
# also add app_name here, we allow same case for different apps
if (tc["summary"] + app_name) in self.test_case_names:
if (tc["summary"] + config_name) in self.test_case_names:
self.parsing_errors.append("duplicated test case ID: " + tc["summary"])
else:
self.test_case_names.add(tc["summary"] + app_name)
self.test_case_names.add(tc["summary"] + config_name)
if tc["CI ready"] == "Yes":
# update test env list and the cases of same env list
@ -150,13 +149,32 @@ class Parser(object):
pass
return p
def parse_one_test_case(self, name, description, file_name, app_name):
def parse_tags(self, sdkconfig_file):
"""
Some test configs could requires different DUTs.
For example, if CONFIG_SPIRAM_SUPPORT is enabled, we need WROVER-Kit to run test.
This method will get tags for runners according to ConfigDependency.yml(maps tags to sdkconfig).
:param sdkconfig_file: sdkconfig file of the unit test config
:return: required tags for runners
"""
required_tags = []
with open(sdkconfig_file, "r") as f:
configs_raw_data = f.read()
configs = configs_raw_data.splitlines(False)
for tag in self.config_dependency:
if self.config_dependency[tag] in configs:
required_tags.append(tag)
return required_tags
def parse_one_test_case(self, name, description, file_name, config_name, tags):
"""
parse one test case
:param name: test case name (summary)
:param description: test case description (tag string)
:param file_name: the file defines this test case
:param app_name: built unit test app name
:param config_name: built unit test app name
:param tags: tags to select runners
:return: parsed test case
"""
prop = self.parse_case_properities(description)
@ -179,7 +197,7 @@ class Parser(object):
self.file_name_cache[file_name_hash])
test_case = deepcopy(TEST_CASE_PATTERN)
test_case.update({"Test App": self.APP_NAME_PREFIX + app_name,
test_case.update({"config": config_name,
"module": self.module_map[prop["module"]]['module'],
"CI ready": "No" if prop["ignore"] == "Yes" else "Yes",
"ID": tc_id,
@ -191,7 +209,8 @@ class Parser(object):
"summary": name,
"multi_device": prop["multi_device"],
"multi_stage": prop["multi_stage"],
"timeout": int(prop["timeout"])})
"timeout": int(prop["timeout"]),
"tags": tags})
return test_case
def dump_test_cases(self, test_cases):
@ -212,12 +231,12 @@ class Parser(object):
""" parse test cases from multiple built unit test apps """
test_cases = []
test_app_folder = os.path.join(self.idf_path, self.UT_BIN_FOLDER)
test_apps = os.listdir(test_app_folder)
for app in test_apps:
elf_file = os.path.join(test_app_folder, app, self.ELF_FILE)
if os.path.exists(elf_file):
test_cases.extend(self.parse_test_cases_from_elf(elf_file, app))
output_folder = os.path.join(self.idf_path, self.UT_BIN_FOLDER)
test_configs = os.listdir(output_folder)
for config in test_configs:
config_output_folder = os.path.join(output_folder, config)
if os.path.exists(config_output_folder):
test_cases.extend(self.parse_test_cases_for_one_config(config_output_folder, config))
self.dump_test_cases(test_cases)

View file

@ -131,6 +131,7 @@ def run_unit_test_cases(env, extra_data):
failed_cases = []
for ut_config in case_config:
Utility.console_log("Running unit test for config: " + ut_config, "O")
dut = env.get_dut("unit-test-app", app_path=ut_config)
dut.start_app()
@ -365,6 +366,7 @@ def run_multiple_devices_cases(env, extra_data):
case_config = format_test_case_config(extra_data)
DUTS = {}
for ut_config in case_config:
Utility.console_log("Running unit test for config: " + ut_config, "O")
for one_case in case_config[ut_config]:
case_run(DUTS, ut_config, env, one_case, failed_cases)
@ -396,6 +398,7 @@ def run_multiple_stage_cases(env, extra_data):
failed_cases = []
for ut_config in case_config:
Utility.console_log("Running unit test for config: " + ut_config, "O")
dut = env.get_dut("unit-test-app", app_path=ut_config)
dut.start_app()