From 17f4915cd40e3c2435384212f9b08a969a85cd1f Mon Sep 17 00:00:00 2001 From: Levente Meszaros Date: Tue, 3 Sep 2024 10:50:08 +0200 Subject: [PATCH] python: Added r prefix to regex string literals. --- bin/inet_diffingerprints | 2 +- python/inet/common/compile.py | 18 ++++++------- python/inet/common/summary.py | 4 +-- python/inet/common/util.py | 20 +++++++-------- python/inet/main.py | 2 +- python/inet/simulation/build.py | 8 +++--- python/inet/simulation/config.py | 2 +- python/inet/simulation/project.py | 32 +++++++++++------------ python/inet/simulation/task.py | 16 ++++++------ python/inet/test/chart.py | 10 ++++---- python/inet/test/feature.py | 26 +++++++++---------- python/inet/test/fingerprint/task.py | 10 ++++---- python/inet/test/sanitizer.py | 4 +-- python/inet/test/statistical.py | 10 ++++---- python/inet/test/validation.py | 38 ++++++++++++++-------------- 15 files changed, 101 insertions(+), 101 deletions(-) diff --git a/bin/inet_diffingerprints b/bin/inet_diffingerprints index f406bf50036..81b31b17e95 100755 --- a/bin/inet_diffingerprints +++ b/bin/inet_diffingerprints @@ -16,7 +16,7 @@ def computeUniqueFingerprints(fingerprints): def readFingerprints1(eventlogFile): "RegExp version" - regExp = re.compile("^E # ([0-9]+) t ((?:[0-9]*\.)?[0-9]+) m [0-9]+ ce [0-9]+ msg [0-9]+ f (?:\"([^\"]*)\"|(\S+))$") + regExp = re.compile(r"^E # ([0-9]+) t ((?:[0-9]*\.)?[0-9]+) m [0-9]+ ce [0-9]+ msg [0-9]+ f (?:\"([^\"]*)\"|(\S+))$") fingerprints = list() for line in eventlogFile: match = regExp.match(line) diff --git a/python/inet/common/compile.py b/python/inet/common/compile.py index db2b1342c25..4968739c392 100644 --- a/python/inet/common/compile.py +++ b/python/inet/common/compile.py @@ -68,26 +68,26 @@ def get_parameters_string(self, **kwargs): def get_input_files(self): output_folder = f"out/clang-{self.mode}" - object_path = re.sub("\\.msg", "_m.cc", self.file_path) - dependency_file_path = re.sub("\\.msg", "_m.h.d", self.file_path) + object_path = re.sub(r"\\.msg", "_m.cc", self.file_path) + dependency_file_path = re.sub(r"\\.msg", "_m.h.d", self.file_path) full_file_path = self.simulation_project.get_full_path(os.path.join(output_folder, dependency_file_path)) if os.path.exists(full_file_path): dependency = read_dependency_file(full_file_path) # KLUDGE: src folder hacked in and out - file_paths = dependency[re.sub("src/", "", object_path)] + file_paths = dependency[re.sub(r"src/", "", object_path)] return list(map(lambda file_path: self.simulation_project.get_full_path(os.path.join("src", file_path)), file_paths)) else: return [self.file_path] def get_output_files(self): - cpp_file_path = re.sub("\\.msg", "_m.cc", self.file_path) - header_file_path = re.sub("\\.msg", "_m.h", self.file_path) + cpp_file_path = re.sub(r"\\.msg", "_m.cc", self.file_path) + header_file_path = re.sub(r"\\.msg", "_m.h", self.file_path) return [f"{cpp_file_path}", f"{header_file_path}"] def get_arguments(self): executable = "opp_msgc" output_folder = f"out/clang-{self.mode}" - header_file_path = re.sub("\\.msg", "_m.h", self.file_path) + header_file_path = re.sub(r"\\.msg", "_m.h", self.file_path) import_paths = list(map(lambda msg_folder: self.simulation_project.get_full_path(msg_folder), self.simulation_project.msg_folders)) return [executable, "--msg6", @@ -118,8 +118,8 @@ def get_parameters_string(self, **kwargs): def get_input_files(self): output_folder = f"out/clang-{self.mode}" - object_path = re.sub("\\.cc", ".o", self.file_path) - dependency_file_path = re.sub("\\.cc", ".o.d", self.file_path) + object_path = re.sub(r"\\.cc", ".o", self.file_path) + dependency_file_path = re.sub(r"\\.cc", ".o.d", self.file_path) full_file_path = self.simulation_project.get_full_path(os.path.join(output_folder, dependency_file_path)) if os.path.exists(full_file_path): dependency = read_dependency_file(full_file_path) @@ -130,7 +130,7 @@ def get_input_files(self): def get_output_files(self): output_folder = f"out/clang-{self.mode}" - object_path = re.sub("\\.cc", ".o", self.file_path) + object_path = re.sub(r"\\.cc", ".o", self.file_path) return [f"{output_folder}/{object_path}"] def get_arguments(self): diff --git a/python/inet/common/summary.py b/python/inet/common/summary.py index a0015b63ee6..f75cd3b0057 100644 --- a/python/inet/common/summary.py +++ b/python/inet/common/summary.py @@ -37,7 +37,7 @@ def collect_modules(simulation_project, path="src"): match = re.match(r"^package ([\w\.]+)", line) if match: package = match.group(1) - package = re.sub("^\w+?\.", "", package) + package = re.sub(r"^\w+?\.", "", package) match = re.match(r"^(simple|module|network) (\w+)\b", line) if match: module = match.group(2) @@ -122,7 +122,7 @@ def collect_classes(simulation_project, path="src"): if match: class_name = match.group(1) relative_path = os.path.relpath(os.path.dirname(file_name), project_path) - relative_path = re.sub("^(\w+)/", "", relative_path) + relative_path = re.sub(r"^(\w+)/", "", relative_path) classes.append(relative_path + "/" + class_name) file.close() return classes diff --git a/python/inet/common/util.py b/python/inet/common/util.py index d3a3d9bed04..c16c7ed2cf7 100644 --- a/python/inet/common/util.py +++ b/python/inet/common/util.py @@ -74,7 +74,7 @@ def coalesce(*values): def convert_to_seconds(s): seconds_per_unit = {"ps": 1E-12, "ns": 1E-9, "us": 1E-6, "ms": 1E-3, "s": 1, "second": 1, "m": 60, "min": 60, "h": 3600, "hour": 3600, "d": 86400, "day": 86400, "w": 604800, "week": 604800} - match = re.match("(-?[0-9]*\.?[0-9]*) *([a-zA-Z]+)", s) + match = re.match(r"(-?[0-9]*\.?[0-9]*) *([a-zA-Z]+)", s) return float(match.group(1)) * seconds_per_unit[match.group(2)] def write_object(file_name, object): @@ -276,7 +276,7 @@ def __init__(self, logger): def collect_existing_ned_types(): types = set() for ini_file_path in glob.glob(get_inet_relative_path("**/*.ned"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(ini_file_path, "r") as f: text = f.read() for type in re.findall("^simple (\\w+)", text, re.M): @@ -294,17 +294,17 @@ def collect_existing_ned_types(): def collect_referenced_ned_types(): types = set() for ini_file_path in glob.glob(get_inet_relative_path("**/*.ini"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(ini_file_path, "r") as f: for type in re.findall("typename = \"(\\w+?)\"", f.read()): types.add(type) for ned_file_path in glob.glob(get_inet_relative_path("**/*.ned"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(ned_file_path, "r") as f: for type in re.findall("~(\\w+)", f.read()): types.add(type) for rst_file_path in glob.glob(get_inet_relative_path("**/*.rst"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(rst_file_path, "r") as f: for type in re.findall(":ned:`(\\w+?)`", f.read()): types.add(type) @@ -313,12 +313,12 @@ def collect_referenced_ned_types(): def collect_ned_type_reference_file_paths(type): references = [] for ini_file_path in glob.glob(get_inet_relative_path("**/*.ini"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(ini_file_path, "r") as f: if re.search(f"typename = \"{type}\"", f.read()): references.append(ini_file_path) for rst_file_path in glob.glob(get_inet_relative_path("**/*.rst"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(rst_file_path, "r") as f: if re.search(f":ned:`{type}`", f.read()): references.append(rst_file_path) @@ -327,7 +327,7 @@ def collect_ned_type_reference_file_paths(type): def collect_existing_msg_types(): types = set() for ini_file_path in glob.glob(get_inet_relative_path("**/*.msg"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(ini_file_path, "r") as f: text = f.read() for type in re.findall("^class (\\w+)", text, re.M): @@ -339,7 +339,7 @@ def collect_existing_msg_types(): def collect_existing_cpp_types(): types = set() for ini_file_path in glob.glob(get_inet_relative_path("**/*.h"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(ini_file_path, "r") as f: text = f.read() for type in re.findall("^class INET_API (\\w+)", text, re.M): @@ -347,7 +347,7 @@ def collect_existing_cpp_types(): for type in re.findall("^enum (\\w+)", text, re.M): types.add(type) for ini_file_path in glob.glob(get_inet_relative_path("**/*.cc"), recursive=True): - if not re.search("doc/src/_deploy", ini_file_path): + if not re.search(r"doc/src/_deploy", ini_file_path): with open(ini_file_path, "r") as f: text = f.read() for type in re.findall("Register_Packet_Dropper_Function\\((\\w+),", text, re.M): diff --git a/python/inet/main.py b/python/inet/main.py index 674e3567bed..0285207f1e5 100644 --- a/python/inet/main.py +++ b/python/inet/main.py @@ -59,7 +59,7 @@ def process_run_tasks_arguments(args): if not has_filter_kwarg and not args.simulation_project: kwargs["working_directory_filter"] = os.path.relpath(os.getcwd(), os.path.realpath(simulation_project.get_full_path("."))) if "working_directory_filter" in kwargs: - kwargs["working_directory_filter"] = re.sub("(.*)/$", "\\1", kwargs["working_directory_filter"]) + kwargs["working_directory_filter"] = re.sub(r"(.*)/$", "\\1", kwargs["working_directory_filter"]) if args.simulation_runner == "inprocess": import omnetpp.cffi del kwargs["hosts"] diff --git a/python/inet/simulation/build.py b/python/inet/simulation/build.py index b11464b26a6..0773838daa6 100644 --- a/python/inet/simulation/build.py +++ b/python/inet/simulation/build.py @@ -117,8 +117,8 @@ def __init__(self, simulation_project=None, name="MSG compile task", mode="relea self.simulation_project = simulation_project self.mode = mode self.input_files = list(map(lambda input_file: self.simulation_project.get_full_path(input_file), self.simulation_project.get_msg_files())) - self.output_files = list(map(lambda output_file: re.sub("\\.msg", "_m.cc", output_file), self.input_files)) + \ - list(map(lambda output_file: re.sub("\\.msg", "_m.h", output_file), self.input_files)) + self.output_files = list(map(lambda output_file: re.sub(r"\\.msg", "_m.cc", output_file), self.input_files)) + \ + list(map(lambda output_file: re.sub(r"\\.msg", "_m.h", output_file), self.input_files)) def get_description(self): return self.simulation_project.get_name() + " " + super().get_description() @@ -165,7 +165,7 @@ def get_object_files(self): object_files = [] for cpp_folder in self.simulation_project.cpp_folders: file_paths = glob.glob(self.simulation_project.get_full_path(os.path.join(cpp_folder, "**/*.cc")), recursive=True) - object_files = object_files + list(map(lambda file_path: os.path.join(output_folder, self.simulation_project.get_relative_path(re.sub("\\.cc", ".o", file_path))), file_paths)) + object_files = object_files + list(map(lambda file_path: os.path.join(output_folder, self.simulation_project.get_relative_path(re.sub(r"\\.cc", ".o", file_path))), file_paths)) return object_files def is_up_to_date(self): @@ -280,7 +280,7 @@ def get_build_tasks(self, **kwargs): os.makedirs(output_folder) msg_compile_tasks = list(map(lambda msg_file: MsgCompileTask(simulation_project=self.simulation_project, file_path=msg_file, mode=self.mode), self.simulation_project.get_msg_files())) multiple_msg_compile_tasks = MultipleMsgCompileTasks(simulation_project=self.simulation_project, mode=self.mode, tasks=msg_compile_tasks, concurrent=self.concurrent_child_tasks) - msg_cpp_compile_tasks = list(map(lambda msg_file: CppCompileTask(simulation_project=self.simulation_project, file_path=re.sub("\\.msg", "_m.cc", msg_file), mode=self.mode), self.simulation_project.get_msg_files())) + msg_cpp_compile_tasks = list(map(lambda msg_file: CppCompileTask(simulation_project=self.simulation_project, file_path=re.sub(r"\\.msg", "_m.cc", msg_file), mode=self.mode), self.simulation_project.get_msg_files())) cpp_compile_tasks = list(map(lambda cpp_file: CppCompileTask(simulation_project=self.simulation_project, file_path=cpp_file, mode=self.mode), self.simulation_project.get_cpp_files())) all_cpp_compile_tasks = msg_cpp_compile_tasks + cpp_compile_tasks multiple_cpp_compile_tasks = MultipleCppCompileTasks(simulation_project=self.simulation_project, mode=self.mode, tasks=all_cpp_compile_tasks, concurrent=self.concurrent_child_tasks) diff --git a/python/inet/simulation/config.py b/python/inet/simulation/config.py index 4884efeb0c5..6526ca2aff5 100644 --- a/python/inet/simulation/config.py +++ b/python/inet/simulation/config.py @@ -144,7 +144,7 @@ def clean_simulation_results(self): _logger.info("Cleaning simulation results, folder = " + self.working_directory) simulation_project = self.simulation_project path = os.path.join(simulation_project.get_full_path(self.working_directory), "results") - if not re.search(".*/home/.*", path): + if not re.search(r".*/home/.*", path): raise Exception("Path is not in home") if os.path.exists(path): shutil.rmtree(path) diff --git a/python/inet/simulation/project.py b/python/inet/simulation/project.py index aa6db691c71..af3ef1b6e84 100644 --- a/python/inet/simulation/project.py +++ b/python/inet/simulation/project.py @@ -271,14 +271,14 @@ def get_effective_include_folders(self): def get_cpp_files(self): cpp_files = [] for cpp_folder in self.cpp_folders: - file_paths = list(filter(lambda file_path: not re.search("_m\\.cc", file_path), glob.glob(self.get_full_path(os.path.join(cpp_folder, "**/*.cc")), recursive=True))) + file_paths = list(filter(lambda file_path: not re.search(r"_m\\.cc", file_path), glob.glob(self.get_full_path(os.path.join(cpp_folder, "**/*.cc")), recursive=True))) cpp_files = cpp_files + list(map(lambda file_path: self.get_relative_path(file_path), file_paths)) return cpp_files def get_header_files(self): header_files = [] for cpp_folder in self.cpp_folders: - file_paths = list(filter(lambda file_path: not re.search("_m\\.h", file_path), glob.glob(self.get_full_path(os.path.join(cpp_folder, "**/*.h")), recursive=True))) + file_paths = list(filter(lambda file_path: not re.search(r"_m\\.h", file_path), glob.glob(self.get_full_path(os.path.join(cpp_folder, "**/*.h")), recursive=True))) header_files = header_files + list(map(lambda file_path: self.get_relative_path(file_path), file_paths)) return header_files @@ -313,34 +313,34 @@ def create_config_dict(config): config_dicts = {"General": create_config_dict("General")} config_dict = {} for line in file: - match = re.match("\\[(Config +)?(.*?)\\]", line) + match = re.match(r"\\[(Config +)?(.*?)\\]", line) if match: config = match.group(2) or match.group(3) config_dict = create_config_dict(config) config_dicts[config] = config_dict - match = re.match("#? *abstract-config *= *(\w+)", line) + match = re.match(r"#? *abstract-config *= *(\w+)", line) if match: config_dict["abstract_config"] = bool(match.group(1)) - match = re.match("#? *emulation *= *(\w+)", line) + match = re.match(r"#? *emulation *= *(\w+)", line) if match: config_dict["emulation"] = bool(match.group(1)) - match = re.match("#? *expected-result *= *\"(\w+)\"", line) + match = re.match(r"#? *expected-result *= *\"(\w+)\"", line) if match: config_dict["expected_result"] = match.group(1) - line = re.sub("(.*)#.*", "//1", line).strip() - match = re.match(" *extends *= *(\w+)", line) + line = re.sub(r"(.*)#.*", "//1", line).strip() + match = re.match(r" *extends *= *(\w+)", line) if match: config_dict["extends"] = match.group(1) - match = re.match(" *user-interface *= \"*(\w+)\"", line) + match = re.match(r" *user-interface *= \"*(\w+)\"", line) if match: config_dict["user_interface"] = match.group(1) - match = re.match("description *= *\"(.*)\"", line) + match = re.match(r"description *= *\"(.*)\"", line) if match: config_dict["description"] = match.group(1) - match = re.match("network *= *(.*)", line) + match = re.match(r"network *= *(.*)", line) if match: config_dict["network"] = match.group(1) - match = re.match("sim-time-limit *= *(.*)", line) + match = re.match(r"sim-time-limit *= *(.*)", line) if match: config_dict["sim_time_limit"] = match.group(1) general_config_dict = config_dicts["General"] @@ -362,14 +362,14 @@ def create_config_dict(config): result = subprocess.run(args, cwd=working_directory, capture_output=True, env=self.get_env()) if result.returncode == 0: # KLUDGE: this was added to test source dependency based task result caching - result.stdout = re.sub("INI dependency: (.*)", "", result.stdout.decode("utf-8")) + result.stdout = re.sub(r"INI dependency: (.*)", "", result.stdout.decode("utf-8")) num_runs = int(result.stdout) else: _logger.warn("Cannot determine number of runs: " + result.stderr.decode("utf-8") + " in " + working_directory) continue sim_time_limit = get_sim_time_limit(config_dicts, config) description = config_dict["description"] - description_abstract = (re.search("\((a|A)bstract\)", description) is not None) if description else False + description_abstract = (re.search(r"\((a|A)bstract\)", description) is not None) if description else False abstract = (config_dict["network"] is None and config_dict["config"] == "General") or config_dict["abstract_config"] or description_abstract emulation = config_dict["emulation"] expected_result = config_dict["expected_result"] @@ -420,7 +420,7 @@ def append_file_if_exists(file_name): file_paths.append(self.get_executable(mode="release")) file_paths.append(self.get_executable(mode="debug")) file_paths += list(glob.glob(get_omnetpp_relative_path("lib/*.so"))) - file_paths += list(filter(lambda path: not re.search("formatter", path), glob.glob(get_omnetpp_relative_path("python/**/*.py"), recursive=True))) + file_paths += list(filter(lambda path: not re.search(r"formatter", path), glob.glob(get_omnetpp_relative_path("python/**/*.py"), recursive=True))) append_file_if_exists(self.get_full_path(".omnetpp")) append_file_if_exists(self.get_full_path(".nedfolders")) append_file_if_exists(self.get_full_path(".nedexclusions")) @@ -432,7 +432,7 @@ def append_file_if_exists(file_name): append_file_if_exists(self.get_full_path(os.path.join(self.library_folder, "lib" + dynamic_library + ".so"))) append_file_if_exists(self.get_full_path(os.path.join(self.library_folder, "lib" + dynamic_library + "_dbg.so"))) for ned_folder in self.ned_folders: - if not re.search("test", ned_folder): + if not re.search(r"test", ned_folder): file_paths += glob.glob(self.get_full_path(os.path.join(ned_folder, "**/*.ini")), recursive=True) file_paths += glob.glob(self.get_full_path(os.path.join(ned_folder, "**/*.ned")), recursive=True) for python_folder in self.python_folders: diff --git a/python/inet/simulation/task.py b/python/inet/simulation/task.py index 15d0ce1dca0..176aad5d932 100644 --- a/python/inet/simulation/task.py +++ b/python/inet/simulation/task.py @@ -74,16 +74,16 @@ def __init__(self, subprocess_result=None, cancel=False, **kwargs): self.last_event_number = int(match.group(2)) if match else None self.last_simulation_time = match.group(1) if match else None self.elapsed_cpu_time = None # TODO - match = re.search(" Error: (.*) -- in module (.*)", stderr) + match = re.search(r" Error: (.*) -- in module (.*)", stderr) self.error_message = match.group(1).strip() if match else None self.error_module = match.group(2).strip() if match else None - matching_lines = [re.sub("CREATE (.*)", "\\1", line) for line in stdout.split("\n") if re.search("inet\.", line)] + matching_lines = [re.sub(r"CREATE (.*)", "\\1", line) for line in stdout.split("\n") if re.search(r"inet\.", line)] self.used_types = sorted(list(set(matching_lines))) if self.error_message is None: - match = re.search(" Error: (.*)", stderr) + match = re.search(r" Error: (.*)", stderr) self.error_message = match.group(1).strip() if match else None if self.error_message: - if re.search("The simulation attempted to prompt for user input", self.error_message): + if re.search(r"The simulation attempted to prompt for user input", self.error_message): self.result = "SKIP" self.color = COLOR_CYAN self.expected_result = "SKIP" @@ -303,18 +303,18 @@ def run_protected(self, capture_output=True, extra_args=[], simulation_runner=" # ned_dependency_file_paths = [] # cpp_dependency_file_paths = [] # for line in stdout.splitlines(): - # match = re.match("INI dependency: (.*)", line) + # match = re.match(r"INI dependency: (.*)", line) # if match: # ini_full_path = simulation_project.get_full_path(os.path.join(self.simulation_config.working_directory, match.group(1))) # if not ini_full_path in ini_dependency_file_paths: # ini_dependency_file_paths.append(ini_full_path) - # match = re.match("NED dependency: (.*)", line) + # match = re.match(r"NED dependency: (.*)", line) # if match: # ned_full_path = match.group(1) # if os.path.exists(ned_full_path): # if not ned_full_path in ned_dependency_file_paths: # ned_dependency_file_paths.append(ned_full_path) - # match = re.match("CC dependency: (.*)", line) + # match = re.match(r"CC dependency: (.*)", line) # if match: # cpp_full_path = match.group(1) # if not cpp_full_path in cpp_dependency_file_paths: @@ -328,7 +328,7 @@ def run_protected(self, capture_output=True, extra_args=[], simulation_runner=" # while True: # file_names_copy = file_names.copy() # for file_name in file_names_copy: - # full_file_path = simulation_project.get_full_path(f"out/clang-{self.mode}/" + re.sub(".cc", ".o.d", file_name)) + # full_file_path = simulation_project.get_full_path(f"out/clang-{self.mode}/" + re.sub(r".cc", ".o.d", file_name)) # if os.path.exists(full_file_path): # dependency = read_dependency_file(full_file_path) # for key, depends_on_file_names in dependency.items(): diff --git a/python/inet/test/chart.py b/python/inet/test/chart.py index fade1eccb10..2e56d6ff785 100644 --- a/python/inet/test/chart.py +++ b/python/inet/test/chart.py @@ -51,8 +51,8 @@ def run_protected(self, keep_charts=True, output_stream=sys.stdout, **kwargs): folder = os.path.dirname(self.simulation_project.get_full_path(self.analysis_file_name)) file_name = analysis.export_image(chart, folder, workspace, format="png", dpi=150, target_folder=self.simulation_project.media_folder, filename=image_export_filename + "-new") new_file_name = os.path.join(folder, file_name) - old_file_name = os.path.join(folder, re.sub("-new\.png$", ".png", file_name)) - diff_file_name = os.path.join(folder, re.sub("-new\.png$", "-diff.png", file_name)) + old_file_name = os.path.join(folder, re.sub(r"-new\.png$", ".png", file_name)) + diff_file_name = os.path.join(folder, re.sub(r"-new\.png$", "-diff.png", file_name)) if os.path.exists(diff_file_name): os.remove(diff_file_name) if os.path.exists(old_file_name): @@ -157,8 +157,8 @@ def run_protected(self, keep_charts=True, **kwargs): folder = os.path.dirname(self.simulation_project.get_full_path(self.analysis_file_name)) file_name = analysis.export_image(chart, folder, workspace, format="png", dpi=150, target_folder=self.simulation_project.media_folder, filename=image_export_filename + "-new") new_file_name = os.path.join(folder, file_name) - old_file_name = os.path.join(folder, re.sub("-new\.png$", ".png", file_name)) - diff_file_name = os.path.join(folder, re.sub("-new\.png$", "-diff.png", file_name)) + old_file_name = os.path.join(folder, re.sub(r"-new\.png$", ".png", file_name)) + diff_file_name = os.path.join(folder, re.sub(r"-new\.png$", "-diff.png", file_name)) if os.path.exists(diff_file_name): os.remove(diff_file_name) if os.path.exists(old_file_name): @@ -169,7 +169,7 @@ def run_protected(self, keep_charts=True, **kwargs): os.remove(new_file_name) else: if keep_charts: - os.rename(old_file_name, re.sub("-new\.png$", "-old.png", file_name)) + os.rename(old_file_name, re.sub(r"-new\.png$", "-old.png", file_name)) image_diff = numpy.abs(new_image - old_image) matplotlib.image.imsave(diff_file_name, image_diff[:, :, :3]) else: diff --git a/python/inet/test/feature.py b/python/inet/test/feature.py index 6d5755d75c2..0e57ac71932 100644 --- a/python/inet/test/feature.py +++ b/python/inet/test/feature.py @@ -108,18 +108,18 @@ def read_xml_file(filename, repair_hint=None): fail("Cannot parse XML file '{}': {}".format(filename, e), repair_hint) def get_package_folder(package): - if re.search("inet.examples", package): - return re.sub("inet/", "", re.sub("\\.", "/", package)) - elif re.search("inet.showcases", package): - return re.sub("inet/", "", re.sub("\\.", "/", package)) - elif re.search("inet.tutorials", package): - return re.sub("inet/", "", re.sub("\\.", "/", package)) - elif re.search("inet.tests", package): - return re.sub("inet/", "", re.sub("\\.", "/", package)) - elif re.search("inet.validation", package): - return re.sub("inet/", "tests/", re.sub("\\.", "/", package)) + if re.search(r"inet.examples", package): + return re.sub(r"inet/", "", re.sub(r"\\.", "/", package)) + elif re.search(r"inet.showcases", package): + return re.sub(r"inet/", "", re.sub(r"\\.", "/", package)) + elif re.search(r"inet.tutorials", package): + return re.sub(r"inet/", "", re.sub(r"\\.", "/", package)) + elif re.search(r"inet.tests", package): + return re.sub(r"inet/", "", re.sub(r"\\.", "/", package)) + elif re.search(r"inet.validation", package): + return re.sub(r"inet/", "tests/", re.sub(r"\\.", "/", package)) else: - return "src/" + re.sub("\\.", "/", package) + return "src/" + re.sub(r"\\.", "/", package) def get_features(oppfeatures): result = [] @@ -240,9 +240,9 @@ def get_package_to_used_headers(packages): with open(file_name, "r") as file: if_counter = 0 for line in file: - if re.search("^#if[ d]", line): + if re.search(r"^#if[ d]", line): if_counter += 1 - elif re.search("^#endif", line): + elif re.search(r"^#endif", line): if_counter -= 1 if if_counter == 0: match = re.match(r"^#include \"([\w\.\/]+)\"", line) diff --git a/python/inet/test/fingerprint/task.py b/python/inet/test/fingerprint/task.py index 3b32c2feb80..bef1b899c2b 100644 --- a/python/inet/test/fingerprint/task.py +++ b/python/inet/test/fingerprint/task.py @@ -58,7 +58,7 @@ def __composite_values__(self): @classmethod def parse(self, text): - match = re.match("(.*)/(.*)", text) + match = re.match(r"(.*)/(.*)", text) fingerprint = match.groups()[0] ingredients = match.groups()[1] return Fingerprint(fingerprint, ingredients) @@ -92,7 +92,7 @@ def get_fingerprint_trajectory(self): eventlog_file = open(eventlog_file_path) fingerprints = [] for line in eventlog_file: - match = re.match("E # .* f (.*)", line) + match = re.match(r"E # .* f (.*)", line) if match: fingerprints.append(Fingerprint(match.group(1))) eventlog_file.close() @@ -310,15 +310,15 @@ def debug(self): def get_calculated_fingerprint(simulation_result, ingredients): stdout = simulation_result.subprocess_result.stdout.decode("utf-8") stderr = simulation_result.subprocess_result.stderr.decode("utf-8") - match = re.search("Fingerprint successfully verified:.*? ([0-9a-f]{4}-[0-9a-f]{4})/" + ingredients, stdout) + match = re.search(r"Fingerprint successfully verified:.*? ([0-9a-f]{4}-[0-9a-f]{4})/" + ingredients, stdout) if match: value = match.groups()[0] else: - match = re.search("Fingerprint mismatch! calculated:.*? ([0-9a-f]{4}-[0-9a-f]{4})/" + ingredients + ".*expected", stdout) + match = re.search(r"Fingerprint mismatch! calculated:.*? ([0-9a-f]{4}-[0-9a-f]{4})/" + ingredients + ".*expected", stdout) if match: value = match.groups()[0] else: - match = re.search("Fingerprint mismatch! calculated:.*? ([0-9a-f]{4}-[0-9a-f]{4})/" + ingredients + ".*expected", stderr) + match = re.search(r"Fingerprint mismatch! calculated:.*? ([0-9a-f]{4}-[0-9a-f]{4})/" + ingredients + ".*expected", stderr) if match: value = match.groups()[0] else: diff --git a/python/inet/test/sanitizer.py b/python/inet/test/sanitizer.py index 016fc8b9019..0e82e4d4841 100644 --- a/python/inet/test/sanitizer.py +++ b/python/inet/test/sanitizer.py @@ -12,9 +12,9 @@ def run_protected(self, output_stream=sys.stdout, **kwargs): simulation_task_result = self.simulation_task.run_protected(output_stream=output_stream, **kwargs) stderr = simulation_task_result.subprocess_result.stderr.decode("utf-8") test_task_result = super().check_simulation_task_result(simulation_task_result, **kwargs) - match = re.search("SUMMARY: (.*)", stderr) + match = re.search(r"SUMMARY: (.*)", stderr) if match: - test_task_result.reason = re.sub(" in", "", match.group(1)) + test_task_result.reason = re.sub(r" in", "", match.group(1)) # TODO isn't there a better way? if test_task_result.result == "PASS": test_task_result.result = "FAIL" diff --git a/python/inet/test/statistical.py b/python/inet/test/statistical.py index 8d3fbf73f6f..9316f5f18da 100644 --- a/python/inet/test/statistical.py +++ b/python/inet/test/statistical.py @@ -54,7 +54,7 @@ def check_simulation_task_result(self, simulation_task_result, result_name_filte stored_scalar_result_file_name = simulation_project.get_full_path(os.path.join(simulation_project.statistics_folder, working_directory, self.get_scalar_file_name())) _logger.debug(f"Reading result file {current_scalar_result_file_name}") current_df = _read_scalar_result_file(current_scalar_result_file_name) - scalar_result_diff_file_name = re.sub(".sca$", ".diff", stored_scalar_result_file_name) + scalar_result_diff_file_name = re.sub(r".sca$", ".diff", stored_scalar_result_file_name) if os.path.exists(scalar_result_diff_file_name): os.remove(scalar_result_diff_file_name) if os.path.exists(stored_scalar_result_file_name): @@ -77,14 +77,14 @@ def check_simulation_task_result(self, simulation_task_result, result_name_filte df = df[df.apply(lambda row: matches_filter(row["name"], result_name_filter, exclude_result_name_filter, full_match) and \ matches_filter(row["module"], result_module_filter, exclude_result_module_filter, full_match), axis=1)] sorted_df = df.sort_values(by="relative_error", ascending=False) - scalar_result_csv_file_name = re.sub(".sca$", ".csv", stored_scalar_result_file_name) + scalar_result_csv_file_name = re.sub(r".sca$", ".csv", stored_scalar_result_file_name) sorted_df.to_csv(scalar_result_csv_file_name, float_format="%.15g") id = df["relative_error"].idxmax() if math.isnan(id): id = next(iter(df.index), None) reason = df.loc[id].to_string() - reason = re.sub(" +", " = ", reason) - reason = re.sub("\\n", ", ", reason) + reason = re.sub(r" +", " = ", reason) + reason = re.sub(r"\\n", ", ", reason) return self.task_result_class(task=self, simulation_task_result=simulation_task_result, result="FAIL", reason=reason) else: return self.task_result_class(task=self, simulation_task_result=simulation_task_result, result="PASS") @@ -154,7 +154,7 @@ def check_simulation_task_result(self, simulation_task_result, result_name_filte stored_scalar_result_file_name = simulation_project.get_full_path(os.path.join(simulation_project.statistics_folder, working_directory, self.get_scalar_file_name())) _logger.debug(f"Reading result file {current_scalar_result_file_name}") current_df = _read_scalar_result_file(current_scalar_result_file_name) - scalar_result_diff_file_name = re.sub(".sca$", ".diff", stored_scalar_result_file_name) + scalar_result_diff_file_name = re.sub(r".sca$", ".diff", stored_scalar_result_file_name) if os.path.exists(scalar_result_diff_file_name): os.remove(scalar_result_diff_file_name) if not os.path.exists(stored_scalar_result_file_name): diff --git a/python/inet/test/validation.py b/python/inet/test/validation.py index 3d3b9fd87dd..baa287fca2d 100644 --- a/python/inet/test/validation.py +++ b/python/inet/test/validation.py @@ -109,15 +109,15 @@ def compute_asynchronousshaper_icct_endtoend_delay_from_simulation_results(**kwa filter_expression = """type =~ scalar AND name =~ meanBitLifeTimePerPacket:histogram:max""" df = read_result_files(inet_project.get_full_path("tests/validation/tsn/trafficshaping/asynchronousshaper/icct/results/*.sca"), filter_expression=filter_expression, include_fields_as_scalars=True) df = get_scalars(df) - df["name"] = df["name"].map(lambda name: re.sub(".*(min|max)", "\\1", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N6.app\\[[0-4]\\].*", "Flow 4, Class A", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N6.app\\[[5-9]\\].*", "Flow 5, Class B", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N7.app\\[[0-9]\\].*", "Flow 1, CDT", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N7.app\\[1[0-9]\\].*", "Flow 2, Class A", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N7.app\\[2[0-9]\\].*", "Flow 3, Class B", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N7.app\\[3[0-4]\\].*", "Flow 6, Class A", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N7.app\\[3[5-9]\\].*", "Flow 7, Class B", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*N7.app\\[40\\].*", "Flow 8, Best Effort", name)) + df["name"] = df["name"].map(lambda name: re.sub(r".*(min|max)", "\\1", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N6.app\\[[0-4]\\].*", "Flow 4, Class A", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N6.app\\[[5-9]\\].*", "Flow 5, Class B", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N7.app\\[[0-9]\\].*", "Flow 1, CDT", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N7.app\\[1[0-9]\\].*", "Flow 2, Class A", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N7.app\\[2[0-9]\\].*", "Flow 3, Class B", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N7.app\\[3[0-4]\\].*", "Flow 6, Class A", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N7.app\\[3[5-9]\\].*", "Flow 7, Class B", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*N7.app\\[40\\].*", "Flow 8, Best Effort", name)) df = pd.pivot_table(df, index="module", columns="name", values="value", aggfunc="max") return df * 1000000 @@ -155,11 +155,11 @@ def compute_asynchronousshaper_core4inet_endtoend_delay_from_simulation_results( filter_expression = """type =~ scalar AND (name =~ meanBitLifeTimePerPacket:histogram:min OR name =~ meanBitLifeTimePerPacket:histogram:max OR name =~ meanBitLifeTimePerPacket:histogram:mean OR name =~ meanBitLifeTimePerPacket:histogram:stddev)""" df = read_result_files(inet_project.get_full_path("tests/validation/tsn/trafficshaping/asynchronousshaper/core4inet/results/*.sca"), filter_expression=filter_expression, include_fields_as_scalars=True) df = get_scalars(df) - df["name"] = df["name"].map(lambda name: re.sub(".*(min|max|mean|stddev)", "\\1", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[0\\].*", "Best effort", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[1\\].*", "Medium", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[2\\].*", "High", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[3\\].*", "Critical", name)) + df["name"] = df["name"].map(lambda name: re.sub(r".*(min|max|mean|stddev)", "\\1", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[0\\].*", "Best effort", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[1\\].*", "Medium", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[2\\].*", "High", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[3\\].*", "Critical", name)) df = df.loc[df["module"]!="Best effort"] df = pd.pivot_table(df, index="module", columns="name", values="value") return df * 1000000 @@ -222,11 +222,11 @@ def compute_creditbasedshaper_endtoend_delay_from_simulation_results(**kwargs): filter_expression = """type =~ scalar AND (name =~ meanBitLifeTimePerPacket:histogram:min OR name =~ meanBitLifeTimePerPacket:histogram:max OR name =~ meanBitLifeTimePerPacket:histogram:mean OR name =~ meanBitLifeTimePerPacket:histogram:stddev)""" df = read_result_files(inet_project.get_full_path("tests/validation/tsn/trafficshaping/creditbasedshaper/results/*.sca"), filter_expression=filter_expression, include_fields_as_scalars=True) df = get_scalars(df) - df["name"] = df["name"].map(lambda name: re.sub(".*(min|max|mean|stddev)", "\\1", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[0\\].*", "Best effort", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[1\\].*", "Medium", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[2\\].*", "High", name)) - df["module"] = df["module"].map(lambda name: re.sub(".*app\\[3\\].*", "Critical", name)) + df["name"] = df["name"].map(lambda name: re.sub(r".*(min|max|mean|stddev)", "\\1", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[0\\].*", "Best effort", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[1\\].*", "Medium", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[2\\].*", "High", name)) + df["module"] = df["module"].map(lambda name: re.sub(r".*app\\[3\\].*", "Critical", name)) df = df.loc[df["module"]!="Best effort"] df = pd.pivot_table(df, index="module", columns="name", values="value") return df * 1000000