From b124625a5252d8a5b22adad8e906bf93174e6384 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 29 Nov 2023 14:28:35 +0100 Subject: [PATCH] refactoring --- src/runner.py | 51 +++++++++++++++++++++++++++------------------------ 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/src/runner.py b/src/runner.py index 268cede..7d0011a 100644 --- a/src/runner.py +++ b/src/runner.py @@ -33,19 +33,35 @@ class Runner: assert self.test_dir_name self.root_dir = Path(__file__).parent - def _run_main_setup_and_test(self): + def run_tests(self): + # check if required dependencies have passed + self._assert_dependencies_passed() + + # run main setup if available if isinstance(self.main_setup_name, str): self._run_or_skip_test( identifier_string=self.combine_names(self.name, self.main_setup_name), test_path=self.root_dir / self.test_dir_name / self.main_setup_name, ) + # run main test if available if isinstance(self.main_test_name, str): self._run_or_skip_test( identifier_string=self.combine_names(self.name, self.main_test_name), test_path=self.root_dir / self.test_dir_name / self.main_test_name, ) + # run sub tests if conditions are met + for sub_test in self.sub_tests: + condition_function = sub_test["condition"] + sub_test_name = sub_test["test_file"] + identifier_string = self.combine_names(self.name, sub_test_name) + if condition_function(self.config): + test_path = self.root_dir / self.test_dir_name / sub_test_name + self._run_or_skip_test(identifier_string=identifier_string, test_path=test_path) + else: + self._create_result_file(result=-1, identifier_string=identifier_string) + def _run_or_skip_test(self, identifier_string: str, test_path: Path): if not self.prevent_skip and self._is_test_passed(identifier_string, remove_existing=True): logger.info(f"skipping {identifier_string}") @@ -116,19 +132,6 @@ class Runner: return pytest.main(command_arguments) - def run_tests(self): - self._assert_dependencies_passed() - self._run_main_setup_and_test() - for sub_test in self.sub_tests: - condition_function = sub_test["condition"] - sub_test_name = sub_test["test_file"] - identifier_string = self.combine_names(self.name, sub_test_name) - if condition_function(self.config): - test_path = self.root_dir / self.test_dir_name / sub_test_name - self._run_or_skip_test(identifier_string=identifier_string, test_path=test_path) - else: - self._create_result_file(result=-1, identifier_string=identifier_string) - def _create_result_file( self, result: int, @@ -141,6 +144,16 @@ class Runner: with open(file_path, "w") as _: pass # create empty file + def _assert_dependencies_passed(self): + """assert that all dependencie setups passed before""" + + passed_tests = [r.name for r in self.DIRS.RESULTS.glob("*") if "passed" in r.name] + for dependencie in self.dependencies: + dependencie_identifier = self.combine_names(dependencie.name, dependencie.main_setup_name) + assert any( + dependencie_identifier in f for f in passed_tests + ), f"could not run {self.name} because {dependencie} did not run before" + @staticmethod def result_int_to_str(result_int: int) -> str: match result_int: @@ -154,13 +167,3 @@ class Runner: @staticmethod def combine_names(*names: str) -> str: return "-".join(names) - - def _assert_dependencies_passed(self): - """assert that all dependencie setups passed before""" - - passed_tests = [r.name for r in self.DIRS.RESULTS.glob("*") if "passed" in r.name] - for dependencie in self.dependencies: - dependencie_identifier = self.combine_names(dependencie.name, dependencie.main_setup_name) - assert any( - dependencie_identifier in f for f in passed_tests - ), f"could not run {self.name} because {dependencie} did not run before"