Skip to content

Commit

Permalink
aggregated performance updated push
Browse files Browse the repository at this point in the history
  • Loading branch information
Ashawini Chudaman Thakur committed Jul 19, 2023
1 parent 758672b commit 4d588b9
Showing 1 changed file with 17 additions and 16 deletions.
33 changes: 17 additions & 16 deletions bin/pbt_to_ctf.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def read_pbt(pbt_files_list):

print('There are ' + str(len(trace.events)) + ' events in this trace', end=' ')
for e in range(len(trace.events)):
print('id===', trace.events.id[e], ' node_id=', trace.events.node_id[e],' stream_id=',trace.events.stream_id[e], 'key=' ,trace.events.key[e],' type=',trace.events.type[e],' b=',trace.events.begin[e],' e=',trace.events.end[e])
print('id===', trace.events.id[e], ' node_id=', trace.events.node_id[e], ' stream_id=', trace.events.stream_id[e], 'key=' ,trace.events.key[e], ' type=', trace.events.type[e], ' b=', trace.events.begin[e], ' e=', trace.events.end[e])

import json
import re
Expand All @@ -41,18 +41,18 @@ def bool(str):
def pbt_to_ctf(pbt_files_list, ctf_filename, skip_parsec_events, skip_mpi_events):
ctf_data = {"traceEvents": []}
# Dictionary to store aggregated durations
aggregated_durations = {}
aggregated_durations = {}

ptt_filename = pbt2ptt.convert(pbt_files_list, multiprocess=False)
trace = ptt.from_hdf(ptt_filename)

for e in range(len(trace.events)):
# print('id=',trace.events.id[e],' node_id=',trace.events.node_id[e],' stream_id=',trace.events.stream_id[e],'key=',trace.events.key[e],' type=',trace.events.type[e],' b=',trace.events.begin[e],' e=',trace.events.end[e])
# print('id=', trace.events.id[e], ' node_id=', trace.events.node_id[e], ' stream_id=', trace.events.stream_id[e], 'key=', trace.events.key[e], ' type=', trace.events.type[e], ' b=', trace.events.begin[e], ' e=', trace.events.end[e])
# print('\n')

if(skip_parsec_events == True and trace.event_names[trace.events.type[e]].startswith("PARSEC")):
if skip_parsec_events == True and trace.event_names[trace.events.type[e]].startswith("PARSEC"):
continue
if(skip_mpi_events == True and trace.event_names[trace.events.type[e]].startswith("MPI")):
if skip_mpi_events == True and trace.event_names[trace.events.type[e]].startswith("MPI"):
continue

ctf_event = {}
Expand All @@ -61,9 +61,9 @@ def pbt_to_ctf(pbt_files_list, ctf_filename, skip_parsec_events, skip_mpi_events
ctf_event["dur"] = 0.001 * (trace.events.end[e] - trace.events.begin[e]) # when we started, in ms
ctf_event["name"] = trace.event_names[trace.events.type[e]]

if trace.events.key[e] != None:
if trace.events.key[e] is not None:
ctf_event["args"] = trace.events.key[e].decode('utf-8').rstrip('\x00')
ctf_event["name"] = trace.event_names[trace.events.type[e]]+"<"+ctf_event["args"]+">"
ctf_event["name"] = trace.event_names[trace.events.type[e]] + "<" + ctf_event["args"] + ">"

ctf_event["pid"] = trace.events.node_id[e]
tid = trace.streams.th_id[trace.events.stream_id[e]]
Expand All @@ -78,16 +78,17 @@ def pbt_to_ctf(pbt_files_list, ctf_filename, skip_parsec_events, skip_mpi_events
if index_of_open_bracket != -1:
name = event_trace["name"][:index_of_open_bracket]
duration = event_trace["dur"]
seconds = duration / 1000


if name in aggregated_durations:
aggregated_durations[name] += duration
aggregated_durations[name]["duration"] += duration
aggregated_durations[name]["count"] += 1
else:
aggregated_durations[name] = duration
# If name doesn't exist, create a new entry
aggregated_durations[name] = {"duration": duration, "count": 1}

# Print aggregated durations
for name, duration in aggregated_durations.items():
print(f"{name}: {duration} ms, equivalent to: {seconds} seconds")
# Calculate the averages for each name
averages = {name: data["duration"] / data["count"] for name, data in aggregated_durations.items()}
print(averages)

with open(ctf_filename, "w") as chrome_trace:
json.dump(ctf_data, chrome_trace)
Expand All @@ -105,10 +106,10 @@ def pbt_to_ctf(pbt_files_list, ctf_filename, skip_parsec_events, skip_mpi_events
skip_mpi_events = bool(sys.argv[4])

# iterate over all files within the directory that start with sys.argv[1]
pbt_files_list=[]
pbt_files_list = []
dirname = os.path.dirname(pbt_file_prefix)
for file in os.listdir(dirname):
file_fullname = os.path.join(dirname,file)
file_fullname = os.path.join(dirname, file)
if file_fullname.startswith(pbt_file_prefix) and ".prof" in file_fullname and file_fullname != ctf_file_name:
print("found file ", file_fullname)
pbt_files_list.append(file_fullname)
Expand Down

0 comments on commit 4d588b9

Please sign in to comment.