Skip to content

Commit

Permalink
Merge pull request #1836 from lf-lang/fedsd-fix-pandas2.0
Browse files Browse the repository at this point in the history
Fedsd compatibility with pandas 2.0
  • Loading branch information
lhstrh authored Jun 10, 2023
2 parents 8382d5a + c749a39 commit 26728e7
Showing 1 changed file with 25 additions and 30 deletions.
55 changes: 25 additions & 30 deletions util/tracing/visualization/fedsd.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,10 @@
parser.add_argument('-f','--federates', nargs='+', action='append',
help='List of the federates csv trace files.')


''' Clock synchronization error '''
''' FIXME: There should be a value for each communicating pair '''
clock_sync_error = 0

''' Bound on the network latency '''
''' FIXME: There should be a value for each communicating pair '''
network_latency = 100000000 # That is 100us

# Events matching at the sender and receiver ends depend on whether they are tagged
# (the elapsed logical time and microstep have to be the same) or not.
# Set of tagged events (messages)
non_tagged_messages = {'FED_ID', 'ACK', 'REJECT', 'ADR_RQ', 'ADR_AD', 'MSG', 'P2P_MSG'}

def load_and_process_csv_file(csv_file) :
'''
Expand Down Expand Up @@ -134,18 +129,13 @@ def load_and_process_csv_file(csv_file) :
x_coor[fed_id] = (padding * 2) + (spacing * (len(actors)-1))
fed_df['x1'] = x_coor[fed_id]
# Append into trace_df
trace_df = trace_df.append(fed_df, sort=False, ignore_index=True)
trace_df = pd.concat([trace_df, fed_df])
fed_df = fed_df[0:0]

# Sort all traces by physical time and then reset the index
trace_df = trace_df.sort_values(by=['physical_time'])
trace_df = trace_df.reset_index(drop=True)

# FIXME: For now, we need to remove the rows with negative physical time values...
# Until the reason behinf such values is investigated. The negative physical
# time is when federates are still in the process of joining
# trace_df = trace_df[trace_df['physical_time'] >= 0]

# Add the Y column and initialize it with the padding value
trace_df['y1'] = math.ceil(padding * 3 / 2) # Or set a small shift

Expand Down Expand Up @@ -197,25 +187,33 @@ def load_and_process_csv_file(csv_file) :
inout = trace_df.at[index, 'inout']

# Match tracepoints
matching_df = trace_df[\
(trace_df['inout'] != inout) & \
(trace_df['self_id'] == partner_id) & \
(trace_df['partner_id'] == self_id) & \
(trace_df['arrow'] == 'pending') & \
(trace_df['event'] == event) & \
(trace_df['logical_time'] == logical_time) & \
(trace_df['microstep'] == microstep) \
]
# Depends on whether the event is tagged or not
if (trace_df.at[index,'event'] not in non_tagged_messages):
matching_df = trace_df[\
(trace_df['inout'] != inout) & \
(trace_df['self_id'] == partner_id) & \
(trace_df['partner_id'] == self_id) & \
(trace_df['arrow'] == 'pending') & \
(trace_df['event'] == event) & \
(trace_df['logical_time'] == logical_time) & \
(trace_df['microstep'] == microstep) \
]
else :
matching_df = trace_df[\
(trace_df['inout'] != inout) & \
(trace_df['self_id'] == partner_id) & \
(trace_df['partner_id'] == self_id) & \
(trace_df['arrow'] == 'pending') & \
(trace_df['event'] == event)
]

if (matching_df.empty) :
# If no matching receiver, than set the arrow to 'dot',
# meaning that only a dot will be rendered
trace_df.loc[index, 'arrow'] = 'dot'
else:
# If there is one or more matching rows, then consider
# the first one, since it is an out -> in arrow, and
# since it is the closet in time
# FIXME: What other possible choices to consider?
# the first one
if (inout == 'out'):
matching_index = matching_df.index[0]
matching_row = matching_df.loc[matching_index]
Expand Down Expand Up @@ -277,9 +275,6 @@ def load_and_process_csv_file(csv_file) :

if (row['event'] in {'FED_ID', 'ACK', 'REJECT', 'ADR_RQ', 'ADR_AD', 'MSG', 'P2P_MSG'}):
label = row['event']
elif (row['logical_time'] == -1678240241788173894) :
# FIXME: This isn't right. NEVER == -9223372036854775808.
label = row['event'] + '(NEVER)'
else:
label = row['event'] + '(' + f'{int(row["logical_time"]):,}' + ', ' + str(row['microstep']) + ')'

Expand Down

0 comments on commit 26728e7

Please # to comment.