diff --git a/python/pdstools/adm/ADMDatamart.py b/python/pdstools/adm/ADMDatamart.py index 8d0c6fb5..a7de33dd 100644 --- a/python/pdstools/adm/ADMDatamart.py +++ b/python/pdstools/adm/ADMDatamart.py @@ -1258,7 +1258,6 @@ def fillMissing(self) -> ADMDatamart: def generateReport( self, name: Optional[str] = None, - output_location: Path = Path("."), working_dir: Path = Path("."), *, modelid: Optional[str] = "", @@ -1277,8 +1276,6 @@ def generateReport( ---------- name : Optional[str], default = None The name of the report. - output_location : Path, default = Path(".") - The location where the report will be saved. working_dir : Path, default = Path(".") The working directory. Cached files will be written here. * @@ -1325,9 +1322,31 @@ def get_report_files(modelid): def get_output_filename(name, report, modelid, output_type): if name is not None: - return f"{report}{name}{modelid}.{output_type}" + name = name.replace(" ", "_") + if report == "ModelReport": + if modelid is not None: + if name is not None: + return f"{report}_{name}_{modelid}.{output_type}" + else: + return f"{report}_{modelid}.{output_type}" + else: + raise ValueError("ModelID cannot be None for a ModelReport.") + elif report == "HealthCheck": + if name is not None: + return f"{report}_{name}.{output_type}" + else: + return f"{report}.{output_type}" else: - return f"{report}{modelid}.{output_type}" + raise ValueError("Invalid report type.") + + def check_output_file(working_dir, output_filename, verbose, delete_temp_files): + if not os.path.exists(working_dir / output_filename): + msg = "Error when generating healthcheck." + if not verbose and not kwargs.get("output_to_file", False): + msg += "Set 'verbose' to True to see the full output" + if delete_temp_files: + _delete_temp_files(working_dir) + raise ValueError(msg) def get_files(working_dir, cached_data): if not cached_data: @@ -1376,44 +1395,8 @@ def run_bash_command(bashCommand, working_dir, **kwargs): ) process.communicate() - def check_output_file( - working_dir, output_filename, verbose, delete_temp_files, files - ): - if not os.path.exists(working_dir / output_filename): - msg = "Error when generating healthcheck." - if not verbose and not kwargs.get("output_to_file", False): - msg += "Set 'verbose' to True to see the full output" - if delete_temp_files: - _delete_temp_files(working_dir) - raise ValueError(msg) - - def check_output_location(output_location): - if not os.path.exists(output_location): - try: - os.makedirs(output_location) - except OSError as e: - raise OSError( - "Creation of the directory %s failed. Please provide a valid path." - % output_location - ) from e - - def get_filename(output_location, output_filename): - return f"{output_location}/{output_filename}" - - def move_file_if_needed(working_dir, output_location, output_filename): - if output_location != working_dir: - filename = get_filename(output_location, output_filename) - if os.path.isfile(filename): - counter = 1 - filename, ext = output_filename.rsplit(".", 1) - while os.path.isfile(f"{filename} ({counter}).{ext}"): - counter += 1 - filename = f"{filename} ({counter}).{ext}" - shutil.move(f"{working_dir}/{output_filename}", filename) - # Main function logic healthcheck_file, report = get_report_files(modelid) - working_dir, output_location = Path(working_dir), Path(output_location) verbose = kwargs.get("verbose", self.verbose) if self.import_strategy == "lazy" and not allow_collect: @@ -1433,16 +1416,11 @@ def move_file_if_needed(working_dir, output_location, output_filename): bashCommand = f"quarto render {healthcheck_file} --to {output_type} --output {output_filename} --execute-params params.yaml" run_bash_command(bashCommand, working_dir, **kwargs) - check_output_file( - working_dir, output_filename, verbose, delete_temp_files, files - ) - check_output_location(output_location) - filename = get_filename(output_location, output_filename) - move_file_if_needed(working_dir, output_location, output_filename) + check_output_file(working_dir, output_filename, verbose, delete_temp_files) if delete_temp_files: _delete_temp_files(working_dir, files) - return filename + return f"{working_dir}/{output_filename}" def exportTables(self, file: Path = "Tables.xlsx", predictorBinning=False): """Exports all tables from `pdstools.adm.Tables` into one Excel file. diff --git a/python/pdstools/app/pages/3- Reports.py b/python/pdstools/app/pages/3- Reports.py index 5c444ce1..3c798ffa 100644 --- a/python/pdstools/app/pages/3- Reports.py +++ b/python/pdstools/app/pages/3- Reports.py @@ -46,7 +46,6 @@ name=name, output_type=output_type, working_dir=working_dir, - output_location=working_dir, delete_temp_files=delete_temp_files, output_to_file=True, verbose=True, @@ -173,14 +172,8 @@ outfile = ( st.session_state["dm"] .applyGlobalQuery(st.session_state.get("filters", None)) - .applyGlobalQuery( - pl.col("ModelID").is_in( - st.session_state["selected_models"] - ) - ) .generateReport( name="", - output_location=working_dir, working_dir=working_dir, modelid=modelid, delete_temp_files=del_cache, @@ -213,9 +206,11 @@ file_name=st.session_state["model_report_name"], ) st.balloons() + st.session_state["data_is_cached"] = False except Exception as e: st.error(f"""An error occured: {e}""") traceback_str = traceback.format_exc() + st.session_state["data_is_cached"] = False with open(working_dir / "log.txt", "a") as f: f.write(traceback_str) with open(working_dir / "log.txt", "rb") as f: diff --git a/python/pdstools/reports/ModelReport.qmd b/python/pdstools/reports/ModelReport.qmd index f608fe26..81bcee41 100644 --- a/python/pdstools/reports/ModelReport.qmd +++ b/python/pdstools/reports/ModelReport.qmd @@ -137,7 +137,6 @@ else: ```{python} channel_name = ( datamart.last(strategy="lazy") - .filter(pl.col("ModelID") == modelid) .select(pl.format("{}/{}", "Direction", "Channel")) .unique() .collect() @@ -145,7 +144,6 @@ channel_name = ( ) model_name_in_context = ( datamart.last(strategy="lazy") - .filter(pl.col("ModelID") == modelid) .select(pl.format("{}/{}/{}/{}", "Issue", "Group", "Name", "Treatment")) .unique() .collect() @@ -153,7 +151,6 @@ model_name_in_context = ( ) model_name = ( datamart.last(strategy="lazy") - .filter(pl.col("ModelID") == modelid) .select(pl.format("{}/{}", "Name", "Treatment")) .unique() .collect() diff --git a/python/pdstools/utils/streamlit_utils.py b/python/pdstools/utils/streamlit_utils.py index bdcc670c..693ea47f 100644 --- a/python/pdstools/utils/streamlit_utils.py +++ b/python/pdstools/utils/streamlit_utils.py @@ -339,6 +339,7 @@ def process_files(file_paths: List[str], file_name: str) -> Tuple[bytes, str]: or the zip file's data as bytes and the zip file's name if there are multiple files. """ if len(file_paths) == 1: + file_name = file_name.split("/")[-1] if "/" in file_name else file_name with open(file_paths[0], "rb") as file: return file.read(), file_name elif len(file_paths) > 1: