Skip to content

Commit

Permalink
cleaning and fixing
Browse files Browse the repository at this point in the history
  • Loading branch information
yusufuyanik1 committed Nov 21, 2023
1 parent d29c8c2 commit bb673fa
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 58 deletions.
74 changes: 26 additions & 48 deletions python/pdstools/adm/ADMDatamart.py
Original file line number Diff line number Diff line change
Expand Up @@ -1258,7 +1258,6 @@ def fillMissing(self) -> ADMDatamart:
def generateReport(
self,
name: Optional[str] = None,
output_location: Path = Path("."),
working_dir: Path = Path("."),
*,
modelid: Optional[str] = "",
Expand All @@ -1277,8 +1276,6 @@ def generateReport(
----------
name : Optional[str], default = None
The name of the report.
output_location : Path, default = Path(".")
The location where the report will be saved.
working_dir : Path, default = Path(".")
The working directory. Cached files will be written here.
*
Expand Down Expand Up @@ -1325,9 +1322,31 @@ def get_report_files(modelid):

def get_output_filename(name, report, modelid, output_type):
if name is not None:
return f"{report}{name}{modelid}.{output_type}"
name = name.replace(" ", "_")
if report == "ModelReport":
if modelid is not None:
if name is not None:
return f"{report}_{name}_{modelid}.{output_type}"
else:
return f"{report}_{modelid}.{output_type}"
else:
raise ValueError("ModelID cannot be None for a ModelReport.")
elif report == "HealthCheck":
if name is not None:
return f"{report}_{name}.{output_type}"
else:
return f"{report}.{output_type}"
else:
return f"{report}{modelid}.{output_type}"
raise ValueError("Invalid report type.")

def check_output_file(working_dir, output_filename, verbose, delete_temp_files):
if not os.path.exists(working_dir / output_filename):
msg = "Error when generating healthcheck."
if not verbose and not kwargs.get("output_to_file", False):
msg += "Set 'verbose' to True to see the full output"
if delete_temp_files:
_delete_temp_files(working_dir)
raise ValueError(msg)

def get_files(working_dir, cached_data):
if not cached_data:
Expand Down Expand Up @@ -1376,44 +1395,8 @@ def run_bash_command(bashCommand, working_dir, **kwargs):
)
process.communicate()

def check_output_file(
working_dir, output_filename, verbose, delete_temp_files, files
):
if not os.path.exists(working_dir / output_filename):
msg = "Error when generating healthcheck."
if not verbose and not kwargs.get("output_to_file", False):
msg += "Set 'verbose' to True to see the full output"
if delete_temp_files:
_delete_temp_files(working_dir)
raise ValueError(msg)

def check_output_location(output_location):
if not os.path.exists(output_location):
try:
os.makedirs(output_location)
except OSError as e:
raise OSError(
"Creation of the directory %s failed. Please provide a valid path."
% output_location
) from e

def get_filename(output_location, output_filename):
return f"{output_location}/{output_filename}"

def move_file_if_needed(working_dir, output_location, output_filename):
if output_location != working_dir:
filename = get_filename(output_location, output_filename)
if os.path.isfile(filename):
counter = 1
filename, ext = output_filename.rsplit(".", 1)
while os.path.isfile(f"{filename} ({counter}).{ext}"):
counter += 1
filename = f"{filename} ({counter}).{ext}"
shutil.move(f"{working_dir}/{output_filename}", filename)

# Main function logic
healthcheck_file, report = get_report_files(modelid)
working_dir, output_location = Path(working_dir), Path(output_location)
verbose = kwargs.get("verbose", self.verbose)

if self.import_strategy == "lazy" and not allow_collect:
Expand All @@ -1433,16 +1416,11 @@ def move_file_if_needed(working_dir, output_location, output_filename):

bashCommand = f"quarto render {healthcheck_file} --to {output_type} --output {output_filename} --execute-params params.yaml"
run_bash_command(bashCommand, working_dir, **kwargs)
check_output_file(
working_dir, output_filename, verbose, delete_temp_files, files
)
check_output_location(output_location)
filename = get_filename(output_location, output_filename)
move_file_if_needed(working_dir, output_location, output_filename)
check_output_file(working_dir, output_filename, verbose, delete_temp_files)
if delete_temp_files:
_delete_temp_files(working_dir, files)

return filename
return f"{working_dir}/{output_filename}"

def exportTables(self, file: Path = "Tables.xlsx", predictorBinning=False):
"""Exports all tables from `pdstools.adm.Tables` into one Excel file.
Expand Down
9 changes: 2 additions & 7 deletions python/pdstools/app/pages/3- Reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
name=name,
output_type=output_type,
working_dir=working_dir,
output_location=working_dir,
delete_temp_files=delete_temp_files,
output_to_file=True,
verbose=True,
Expand Down Expand Up @@ -173,14 +172,8 @@
outfile = (
st.session_state["dm"]
.applyGlobalQuery(st.session_state.get("filters", None))
.applyGlobalQuery(
pl.col("ModelID").is_in(
st.session_state["selected_models"]
)
)
.generateReport(
name="",
output_location=working_dir,
working_dir=working_dir,
modelid=modelid,
delete_temp_files=del_cache,
Expand Down Expand Up @@ -213,9 +206,11 @@
file_name=st.session_state["model_report_name"],
)
st.balloons()
st.session_state["data_is_cached"] = False
except Exception as e:
st.error(f"""An error occured: {e}""")
traceback_str = traceback.format_exc()
st.session_state["data_is_cached"] = False
with open(working_dir / "log.txt", "a") as f:
f.write(traceback_str)
with open(working_dir / "log.txt", "rb") as f:
Expand Down
3 changes: 0 additions & 3 deletions python/pdstools/reports/ModelReport.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -137,23 +137,20 @@ else:
```{python}
channel_name = (
datamart.last(strategy="lazy")
.filter(pl.col("ModelID") == modelid)
.select(pl.format("{}/{}", "Direction", "Channel"))
.unique()
.collect()
.item(0, 0)
)
model_name_in_context = (
datamart.last(strategy="lazy")
.filter(pl.col("ModelID") == modelid)
.select(pl.format("{}/{}/{}/{}", "Issue", "Group", "Name", "Treatment"))
.unique()
.collect()
.item(0, 0)
)
model_name = (
datamart.last(strategy="lazy")
.filter(pl.col("ModelID") == modelid)
.select(pl.format("{}/{}", "Name", "Treatment"))
.unique()
.collect()
Expand Down
1 change: 1 addition & 0 deletions python/pdstools/utils/streamlit_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,7 @@ def process_files(file_paths: List[str], file_name: str) -> Tuple[bytes, str]:
or the zip file's data as bytes and the zip file's name if there are multiple files.
"""
if len(file_paths) == 1:
file_name = file_name.split("/")[-1] if "/" in file_name else file_name
with open(file_paths[0], "rb") as file:
return file.read(), file_name
elif len(file_paths) > 1:
Expand Down

0 comments on commit bb673fa

Please sign in to comment.