From aeec478d9795bb35671f5a6b3d6a5e68d920e549 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Fri, 17 Feb 2023 11:43:41 +0100 Subject: [PATCH 01/61] Start on tracing federate communication --- util/tracing/trace_to_csv.c | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index a2bcfd91a2..f373b6c45d 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -125,14 +125,14 @@ size_t read_and_write_trace() { if (trace[i].physical_time > latest_time) { latest_time = trace[i].physical_time; } - if (summary_stats[object_instance] == NULL) { + if (object_instance >= 0 && summary_stats[object_instance] == NULL) { summary_stats[object_instance] = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); } if (trigger_instance >= 0 && summary_stats[trigger_instance] == NULL) { summary_stats[trigger_instance] = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); } - summary_stats_t* stats; + summary_stats_t* stats = NULL; interval_t exec_time; reaction_stats_t* rstats; int index; @@ -244,10 +244,16 @@ size_t read_and_write_trace() { } } break; + case federate_NET: + case federate_LTC: + // FIXME: No summary stats collected? + break; } // Common stats across event types. - stats->occurrences++; - stats->event_type = trace[i].event_type; + if (stats != NULL) { + stats->occurrences++; + stats->event_type = trace[i].event_type; + } } return trace_length; } From fa209b93d8fd6220a34c35b5ed35a78c85f7abbb Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Fri, 17 Feb 2023 12:01:49 +0100 Subject: [PATCH 02/61] Report total count of events in summary file --- util/tracing/trace_to_csv.c | 53 +++++++++++++++++++++++++------------ 1 file changed, 36 insertions(+), 17 deletions(-) diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index f373b6c45d..633d35f290 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -69,7 +69,7 @@ typedef struct reaction_stats_t { */ typedef struct summary_stats_t { trace_event_t event_type; // Use reaction_ends for reactions. - char* description; // Description in the reaction table (e.g. reactor name). + const char* description; // Description in the reaction table (e.g. reactor name). int occurrences; // Number of occurrences of this description. int num_reactions_seen; reaction_stats_t reactions[MAX_NUM_REACTIONS]; @@ -125,11 +125,11 @@ size_t read_and_write_trace() { if (trace[i].physical_time > latest_time) { latest_time = trace[i].physical_time; } - if (object_instance >= 0 && summary_stats[object_instance] == NULL) { - summary_stats[object_instance] = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); + if (object_instance >= 0 && summary_stats[NUM_EVENT_TYPES + object_instance] == NULL) { + summary_stats[NUM_EVENT_TYPES + object_instance] = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); } - if (trigger_instance >= 0 && summary_stats[trigger_instance] == NULL) { - summary_stats[trigger_instance] = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); + if (trigger_instance >= 0 && summary_stats[NUM_EVENT_TYPES + trigger_instance] == NULL) { + summary_stats[NUM_EVENT_TYPES + trigger_instance] = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); } summary_stats_t* stats = NULL; @@ -137,6 +137,14 @@ size_t read_and_write_trace() { reaction_stats_t* rstats; int index; + // Count of event type. + if (summary_stats[trace[i].event_type] == NULL) { + summary_stats[trace[i].event_type] = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); + } + summary_stats[trace[i].event_type]->event_type = trace[i].event_type; + summary_stats[trace[i].event_type]->description = trace_event_names[trace[i].event_type]; + summary_stats[trace[i].event_type]->occurrences++; + switch(trace[i].event_type) { case reaction_starts: case reaction_ends: @@ -146,7 +154,7 @@ size_t read_and_write_trace() { fprintf(stderr, "WARNING: Too many reactions. Not all will be shown in summary file.\n"); continue; } - stats = summary_stats[object_instance]; + stats = summary_stats[NUM_EVENT_TYPES + object_instance]; stats->description = reactor_name; if (trace[i].reaction_number >= stats->num_reactions_seen) { stats->num_reactions_seen = trace[i].reaction_number + 1; @@ -172,19 +180,19 @@ size_t read_and_write_trace() { // No trigger. Do not report. continue; } - stats = summary_stats[trigger_instance]; + stats = summary_stats[NUM_EVENT_TYPES + trigger_instance]; stats->description = trigger_name; break; case user_event: // Although these are not exec times and not reactions, // commandeer the first entry in the reactions array to track values. - stats = summary_stats[object_instance]; + stats = summary_stats[NUM_EVENT_TYPES + object_instance]; stats->description = reactor_name; break; case user_value: // Although these are not exec times and not reactions, // commandeer the first entry in the reactions array to track values. - stats = summary_stats[object_instance]; + stats = summary_stats[NUM_EVENT_TYPES + object_instance]; stats->description = reactor_name; rstats = &stats->reactions[0]; rstats->occurrences++; @@ -216,10 +224,10 @@ size_t read_and_write_trace() { fprintf(stderr, "WARNING: Too many workers. Not all will be shown in summary file.\n"); continue; } - stats = summary_stats[object_table_size + index]; + stats = summary_stats[NUM_EVENT_TYPES + object_table_size + index]; if (stats == NULL) { stats = (summary_stats_t*)calloc(1, sizeof(summary_stats_t)); - summary_stats[object_table_size + index] = stats; + summary_stats[NUM_EVENT_TYPES + object_table_size + index] = stats; } // num_reactions_seen here will be used to store the number of // entries in the reactions array, which is twice the number of workers. @@ -246,7 +254,7 @@ size_t read_and_write_trace() { break; case federate_NET: case federate_LTC: - // FIXME: No summary stats collected? + case NUM_EVENT_TYPES: break; } // Common stats across event types. @@ -267,11 +275,22 @@ void write_summary_file() { fprintf(summary_file, "End time:, %lld\n", latest_time); fprintf(summary_file, "Total time:, %lld\n", latest_time - start_time); + fprintf(summary_file, "\nTotal Event Occurrences\n"); + for (int i = 0; i < NUM_EVENT_TYPES; i++) { + summary_stats_t* stats = summary_stats[i]; + if (stats != NULL) { + fprintf(summary_file, "%s, %d\n", + stats->description, + stats->occurrences + ); + } + } + // First pass looks for reaction invocations. // First print a header. fprintf(summary_file, "\nReaction Executions\n"); fprintf(summary_file, "Reactor, Reaction, Occurrences, Total Time, Pct Total Time, Avg Time, Max Time, Min Time\n"); - for (int i = 0; i < table_size; i++) { + for (int i = NUM_EVENT_TYPES; i < table_size; i++) { summary_stats_t* stats = summary_stats[i]; if (stats != NULL && stats->num_reactions_seen > 0) { for (int j = 0; j < stats->num_reactions_seen; j++) { @@ -294,7 +313,7 @@ void write_summary_file() { // Next pass looks for calls to schedule. bool first = true; - for (int i = 0; i < table_size; i++) { + for (int i = NUM_EVENT_TYPES; i < table_size; i++) { summary_stats_t* stats = summary_stats[i]; if (stats != NULL && stats->event_type == schedule_called && stats->occurrences > 0) { if (first) { @@ -308,7 +327,7 @@ void write_summary_file() { // Next pass looks for user-defined events. first = true; - for (int i = 0; i < table_size; i++) { + for (int i = NUM_EVENT_TYPES; i < table_size; i++) { summary_stats_t* stats = summary_stats[i]; if (stats != NULL && (stats->event_type == user_event || stats->event_type == user_value) @@ -335,7 +354,7 @@ void write_summary_file() { // Next pass looks for wait events. first = true; - for (int i = 0; i < table_size; i++) { + for (int i = NUM_EVENT_TYPES; i < table_size; i++) { summary_stats_t* stats = summary_stats[i]; if (stats != NULL && ( stats->event_type == worker_wait_ends @@ -379,7 +398,7 @@ int main(int argc, char* argv[]) { if (read_header() >= 0) { // Allocate an array for summary statistics. - table_size = object_table_size + (MAX_NUM_WORKERS * 2); + table_size = NUM_EVENT_TYPES + object_table_size + (MAX_NUM_WORKERS * 2); summary_stats = (summary_stats_t**)calloc(table_size, sizeof(summary_stats_t*)); // Write a header line into the CSV file. From b124a9d1434b137a8e028e525df71d374d8b6724 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Fri, 17 Feb 2023 12:05:16 +0100 Subject: [PATCH 03/61] Fixed warning message --- util/tracing/trace_to_chrome.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/tracing/trace_to_chrome.c b/util/tracing/trace_to_chrome.c index d39f53171e..33b911a6ca 100644 --- a/util/tracing/trace_to_chrome.c +++ b/util/tracing/trace_to_chrome.c @@ -182,7 +182,7 @@ size_t read_and_write_trace() { phase = "E"; break; default: - fprintf(stderr, "WARNING: Unrecognized event type %d: %s", + fprintf(stderr, "WARNING: Unrecognized event type %d: %s\n", trace[i].event_type, trace_event_names[trace[i].event_type]); pid = PID_FOR_UNKNOWN_EVENT; phase = "i"; From aa5263738669d32aab4720318a855c33c3552dff Mon Sep 17 00:00:00 2001 From: Byeonggil-Jun Date: Fri, 17 Feb 2023 16:52:20 -0800 Subject: [PATCH 04/61] Add cases for TAG and PTAG --- util/tracing/trace_to_csv.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index 633d35f290..35ad770c8a 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -253,6 +253,8 @@ size_t read_and_write_trace() { } break; case federate_NET: + case federate_TAG: + case federate_PTAG: case federate_LTC: case NUM_EVENT_TYPES: break; From 17b19b9e423f213e3ae96cd72a2f3f23f6f77103 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Tue, 21 Feb 2023 14:50:46 -0800 Subject: [PATCH 05/61] Add canvas for RTI tracing in trace_to_csv and reactor-c submodule --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/trace_to_csv.c | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 2ec5deff12..d02e568045 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 2ec5deff12daf66ce106892083443fc85385d035 +Subproject commit d02e568045a3385f616cb8c6e8a82add89ba5711 diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index 35ad770c8a..bcfae63f27 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -256,6 +256,17 @@ size_t read_and_write_trace() { case federate_TAG: case federate_PTAG: case federate_LTC: + case rti_receive_TIMESTAMP: + case rti_receive_ADDRESS_QUERY: + case rti_receive_ADDRESS_ADVERTISEMENT: + case rti_receive_TAGGED_MESSAGE: + case rti_receive_RESIGN: + case rti_receive_NEXT_EVENT_TAG: + case rti_receive_LOGICAL_TAG_COMPLETE: + case rti_receive_STOP_REQUEST: + case rti_receive_STOP_REQUEST_REPLY: + case rti_receive_PORT_ABSENT: + case rti_receive_unidentified: case NUM_EVENT_TYPES: break; } From 4ef5fe254eb77f2c0a754d80a1e8b348c5d5abf6 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Feb 2023 10:18:47 +0100 Subject: [PATCH 06/61] Renamed reaction_number to id_number --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/trace_to_chrome.c | 12 ++++++------ util/tracing/trace_to_csv.c | 14 +++++++------- util/tracing/trace_to_influxdb.c | 4 ++-- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index d02e568045..e4bc739982 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit d02e568045a3385f616cb8c6e8a82add89ba5711 +Subproject commit e4bc739982b06059d653614b3bfe8093748807ea diff --git a/util/tracing/trace_to_chrome.c b/util/tracing/trace_to_chrome.c index 33b911a6ca..ef16e57eb9 100644 --- a/util/tracing/trace_to_chrome.c +++ b/util/tracing/trace_to_chrome.c @@ -70,9 +70,9 @@ size_t read_and_write_trace() { // Write each line. for (int i = 0; i < trace_length; i++) { char* reaction_name = "\"UNKNOWN\""; - if (trace[i].reaction_number >= 0) { + if (trace[i].id_number >= 0) { reaction_name = (char*)malloc(4); - snprintf(reaction_name, 4, "%d", trace[i].reaction_number); + snprintf(reaction_name, 4, "%d", trace[i].id_number); } // printf("DEBUG: Reactor's self struct pointer: %p\n", trace[i].pointer); int reactor_index; @@ -217,13 +217,13 @@ size_t read_and_write_trace() { pid = reactor_index + 1; reaction_name = (char*)malloc(4); char name[13]; - snprintf(name, 13, "reaction %d", trace[i].reaction_number); + snprintf(name, 13, "reaction %d", trace[i].id_number); // NOTE: If the reactor has more than 1024 timers and actions, then // there will be a collision of thread IDs here. - thread_id = 1024 + trace[i].reaction_number; - if (trace[i].reaction_number > max_reaction_number) { - max_reaction_number = trace[i].reaction_number; + thread_id = 1024 + trace[i].id_number; + if (trace[i].id_number > max_reaction_number) { + max_reaction_number = trace[i].id_number; } fprintf(output_file, "{" diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index bcfae63f27..39361643b5 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -95,9 +95,9 @@ size_t read_and_write_trace() { // Write each line. for (int i = 0; i < trace_length; i++) { char* reaction_name = "none"; - if (trace[i].reaction_number >= 0) { + if (trace[i].id_number >= 0) { reaction_name = (char*)malloc(4); - snprintf(reaction_name, 4, "%d", trace[i].reaction_number); + snprintf(reaction_name, 4, "%d", trace[i].id_number); } // printf("DEBUG: reactor self struct pointer: %p\n", trace[i].pointer); int object_instance = -1; @@ -150,16 +150,16 @@ size_t read_and_write_trace() { case reaction_ends: // This code relies on the mutual exclusion of reactions in a reactor // and the ordering of reaction_starts and reaction_ends events. - if (trace[i].reaction_number >= MAX_NUM_REACTIONS) { + if (trace[i].id_number >= MAX_NUM_REACTIONS) { fprintf(stderr, "WARNING: Too many reactions. Not all will be shown in summary file.\n"); continue; } stats = summary_stats[NUM_EVENT_TYPES + object_instance]; stats->description = reactor_name; - if (trace[i].reaction_number >= stats->num_reactions_seen) { - stats->num_reactions_seen = trace[i].reaction_number + 1; + if (trace[i].id_number >= stats->num_reactions_seen) { + stats->num_reactions_seen = trace[i].id_number + 1; } - rstats = &stats->reactions[trace[i].reaction_number]; + rstats = &stats->reactions[trace[i].id_number]; if (trace[i].event_type == reaction_starts) { rstats->latest_start_time = trace[i].physical_time; } else { @@ -415,7 +415,7 @@ int main(int argc, char* argv[]) { summary_stats = (summary_stats_t**)calloc(table_size, sizeof(summary_stats_t*)); // Write a header line into the CSV file. - fprintf(output_file, "Event, Reactor, Reaction, Worker, Elapsed Logical Time, Microstep, Elapsed Physical Time, Trigger, Extra Delay\n"); + fprintf(output_file, "Event, Reactor, ID, Worker, Elapsed Logical Time, Microstep, Elapsed Physical Time, Trigger, Extra Delay\n"); while (read_and_write_trace() != 0) {}; write_summary_file(); diff --git a/util/tracing/trace_to_influxdb.c b/util/tracing/trace_to_influxdb.c index d8281abd01..d664c3de39 100644 --- a/util/tracing/trace_to_influxdb.c +++ b/util/tracing/trace_to_influxdb.c @@ -152,9 +152,9 @@ size_t read_and_write_trace() { // Write each line. for (int i = 0; i < trace_length; i++) { char* reaction_name = "none"; - if (trace[i].reaction_number >= 0) { + if (trace[i].id_number >= 0) { reaction_name = (char*)malloc(4); - snprintf(reaction_name, 4, "%d", trace[i].reaction_number); + snprintf(reaction_name, 4, "%d", trace[i].id_number); } // printf("DEBUG: reactor self struct pointer: %p\n", trace[i].pointer); int object_instance = -1; From f4e01d802cb55898ea92b1a2e6976a7831945586 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Feb 2023 11:45:10 +0100 Subject: [PATCH 07/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index e4bc739982..d96abd7be1 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit e4bc739982b06059d653614b3bfe8093748807ea +Subproject commit d96abd7be166d49a73c955b0b47312aecea90e83 From 8d140050470b7fc53da25beff6e4d471a64172e8 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Feb 2023 12:02:15 +0100 Subject: [PATCH 08/61] Aligned reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index d96abd7be1..8e82cd9377 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit d96abd7be166d49a73c955b0b47312aecea90e83 +Subproject commit 8e82cd9377b526fb2936333ac2682b24c3670b51 From ef09cb951415d0ae4702840708a359914135d9a6 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Feb 2023 12:24:28 +0100 Subject: [PATCH 09/61] Fixed bogus comment --- org.lflang/src/org/lflang/TargetProperty.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/org.lflang/src/org/lflang/TargetProperty.java b/org.lflang/src/org/lflang/TargetProperty.java index 782b368ce6..2dd33a4c5b 100644 --- a/org.lflang/src/org/lflang/TargetProperty.java +++ b/org.lflang/src/org/lflang/TargetProperty.java @@ -580,8 +580,7 @@ public enum TargetProperty { }), /** - * Directive to generate a Dockerfile. This is either a boolean, - * true or false, or a dictionary of options. + * Directive to enable tracing. */ TRACING("tracing", UnionType.TRACING_UNION, Arrays.asList(Target.C, Target.CCPP, Target.CPP, Target.Python), From 9b047035a6a4544f82fdeaa1b4ce1bc3b561b971 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Feb 2023 12:25:02 +0100 Subject: [PATCH 10/61] Added attempt to pass -t argument to the RTI... Doesn't work --- org.lflang/src/org/lflang/federated/launcher/FedLauncher.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/org.lflang/src/org/lflang/federated/launcher/FedLauncher.java b/org.lflang/src/org/lflang/federated/launcher/FedLauncher.java index 48a7149729..8230dfc5c8 100644 --- a/org.lflang/src/org/lflang/federated/launcher/FedLauncher.java +++ b/org.lflang/src/org/lflang/federated/launcher/FedLauncher.java @@ -329,6 +329,9 @@ private String getRtiCommand(List federates, boolean isRemote) if (targetConfig.auth) { commands.add(" -a \\"); } + if (targetConfig.tracing != null) { + commands.add(" -t \\"); + } commands.addAll(List.of( " -n "+federates.size()+" \\", " -c "+targetConfig.clockSync.toString()+" \\" From 3baa0a0d79033b6648e87160cdb223e28b162417 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Feb 2023 14:45:40 +0100 Subject: [PATCH 11/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 8e82cd9377..ea4afb2c86 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 8e82cd9377b526fb2936333ac2682b24c3670b51 +Subproject commit ea4afb2c861d9c3241cfc5c82a513b726b64edac From ef7a4da6c9bbb49d30beeb02967bb802cfc5c46e Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Feb 2023 16:45:36 +0100 Subject: [PATCH 12/61] Refactored file handling to support opening multiple files --- util/tracing/makefile | 1 + util/tracing/trace_to_chrome.c | 28 +++++-- util/tracing/trace_to_csv.c | 27 ++++++- util/tracing/trace_to_influxdb.c | 6 +- util/tracing/trace_util.c | 127 ++++++++++++------------------- util/tracing/trace_util.h | 27 ++++--- 6 files changed, 121 insertions(+), 95 deletions(-) diff --git a/util/tracing/makefile b/util/tracing/makefile index eb21e42e62..9e008f916f 100644 --- a/util/tracing/makefile +++ b/util/tracing/makefile @@ -8,6 +8,7 @@ CFLAGS=-I../../org.lflang/src/lib/c/reactor-c/include/core/ \ -I../../org.lflang/src/lib/c/reactor-c/include/core/platform \ -I../../org.lflang/src/lib/c/reactor-c/include/core/utils \ -DLF_UNTHREADED=1 \ + -g \ -Wall DEPS= LIBS=-lcurl diff --git a/util/tracing/trace_to_chrome.c b/util/tracing/trace_to_chrome.c index ef16e57eb9..4615adc8a7 100644 --- a/util/tracing/trace_to_chrome.c +++ b/util/tracing/trace_to_chrome.c @@ -43,6 +43,12 @@ THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. /** Maximum thread ID seen. */ int max_thread_id = 0; +/** File containing the trace binary data. */ +FILE* trace_file = NULL; + +/** File for writing the output data. */ +FILE* output_file = NULL; + /** * Print a usage message. */ @@ -62,9 +68,11 @@ bool physical_time_only = false; /** * Read a trace in the specified file and write it to the specified json file. + * @param trace_file An open trace file. + * @param output_file An open output .json file. * @return The number of records read or 0 upon seeing an EOF. */ -size_t read_and_write_trace() { +size_t read_and_write_trace(FILE* trace_file, FILE* output_file) { int trace_length = read_trace(trace_file); if (trace_length == 0) return 0; // Write each line. @@ -253,8 +261,9 @@ size_t read_and_write_trace() { /** * Write metadata events, which provide names in the renderer. + * @param output_file An open output .json file. */ -void write_metadata_events() { +void write_metadata_events(FILE* output_file) { // Thread 0 is the main thread. fprintf(output_file, "{" "\"name\": \"thread_name\", " @@ -416,13 +425,22 @@ int main(int argc, char* argv[]) { usage(); exit(0); } - open_files(filename, "json"); + + // Open the trace file. + trace_file = open_file(filename, "r"); + + // Construct the name of the csv output file and open it. + char* root = root_name(filename); + char json_filename[strlen(root) + 6]; + strcpy(json_filename, root); + strcat(json_filename, ".json"); + output_file = open_file(json_filename, "w"); if (read_header(trace_file) >= 0) { // Write the opening bracket into the json file. fprintf(output_file, "{ \"traceEvents\": [\n"); - while (read_and_write_trace() != 0) {}; - write_metadata_events(); + while (read_and_write_trace(trace_file, output_file) != 0) {}; + write_metadata_events(output_file); fprintf(output_file, "]}\n"); } } diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index 39361643b5..26104633e9 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -37,6 +37,15 @@ THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #define MAX_NUM_REACTIONS 64 // Maximum number of reactions reported in summary stats. #define MAX_NUM_WORKERS 64 +/** File containing the trace binary data. */ +FILE* trace_file = NULL; + +/** File for writing the output data. */ +FILE* output_file = NULL; + +/** File for writing summary statistics. */ +FILE* summary_file = NULL; + /** Size of the stats table is object_table_size plus twice MAX_NUM_WORKERS. */ int table_size; @@ -407,7 +416,23 @@ int main(int argc, char* argv[]) { usage(); exit(0); } - open_files(argv[1], "csv"); + // Open the trace file. + trace_file = open_file(argv[1], "r"); + + // Construct the name of the csv output file and open it. + char* root = root_name(argv[1]); + char csv_filename[strlen(root) + 5]; + strcpy(csv_filename, root); + strcat(csv_filename, ".csv"); + output_file = open_file(csv_filename, "w"); + + // Construct the name of the summary output file and open it. + char summary_filename[strlen(root) + 13]; + strcpy(summary_filename, root); + strcat(summary_filename, "_summary.csv"); + summary_file = open_file(summary_filename, "w"); + + free(root); if (read_header() >= 0) { // Allocate an array for summary statistics. diff --git a/util/tracing/trace_to_influxdb.c b/util/tracing/trace_to_influxdb.c index d664c3de39..9fdff85020 100644 --- a/util/tracing/trace_to_influxdb.c +++ b/util/tracing/trace_to_influxdb.c @@ -117,6 +117,9 @@ THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #define MAX_NUM_REACTIONS 64 // Maximum number of reactions reported in summary stats. #define MAX_NUM_WORKERS 64 +/** File containing the trace binary data. */ +FILE* trace_file = NULL; + /** Struct identifying the influx client. */ influx_client_t influx_client; influx_v2_client_t influx_v2_client; @@ -259,7 +262,8 @@ int main(int argc, char* argv[]) { exit(1); } - open_files(filename, NULL); + // Open the trace file. + trace_file = open_file(filename, "r"); if (read_header() >= 0) { size_t num_records = 0, result; diff --git a/util/tracing/trace_util.c b/util/tracing/trace_util.c index f688b560d4..0f97b3bdb9 100644 --- a/util/tracing/trace_util.c +++ b/util/tracing/trace_util.c @@ -37,15 +37,6 @@ THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. /** Buffer for reading object descriptions. Size limit is BUFFER_SIZE bytes. */ char buffer[BUFFER_SIZE]; -/** File containing the trace binary data. */ -FILE* trace_file = NULL; - -/** File for writing the output data. */ -FILE* output_file = NULL; - -/** File for writing summary statistics. */ -FILE* summary_file = NULL; - /** Buffer for reading trace records. */ trace_record_t trace[TRACE_BUFFER_CAPACITY]; @@ -60,6 +51,13 @@ char* top_level = NULL; object_description_t* object_table; int object_table_size = 0; +typedef struct open_file_t open_file_t; +typedef struct open_file_t { + FILE* file; + open_file_t* next; +} open_file_t; +open_file_t* _open_files = NULL; + /** * Function to be invoked upon exiting. */ @@ -68,82 +66,57 @@ void termination() { for (int i = 0; i < object_table_size; i++) { free(object_table[i].description); } - if (trace_file != NULL) { - fclose(trace_file); - } - if (output_file != NULL) { - fclose(output_file); - } - if (summary_file != NULL) { - fclose(summary_file); + while (_open_files != NULL) { + fclose(_open_files->file); + open_file_t* tmp = _open_files->next; + free(_open_files); + _open_files = tmp; } printf("Done!\n"); } -/** - * Open the trace file and the output file using the given filename. - * This leaves the FILE* pointers in the global variables trace_file and output_file. - * If the extension if "csv", then it also opens a summary_file. - * The filename argument can include path information. - * It can include the ".lft" extension or not. - * The output file will have the same path and name except that the - * extension will be given by the second argument. - * The summary_file, if opened, will have the filename with "_summary.csv" appended. - * @param filename The file name. - * @param output_file_extension The extension to put on the output file name (e.g. "csv"). - * @return A pointer to the file. - */ -void open_files(char* filename, char* output_file_extension) { - // Open the input file for reading. - size_t length = strlen(filename); - if (length > 4 && strcmp(&filename[length - 4], ".lft") == 0) { - // The filename includes the .lft extension. - length -= 4; - } - char trace_file_name[length + 4]; - strncpy(trace_file_name, filename, length); - trace_file_name[length] = 0; - strcat(trace_file_name, ".lft"); - trace_file = fopen(trace_file_name, "r"); - if (trace_file == NULL) { - fprintf(stderr, "No trace file named %s.\n", trace_file_name); +const char PATH_SEPARATOR = +#ifdef _WIN32 + '\\'; +#else + '/'; +#endif + +char* root_name(const char* path) { + if (path == NULL) return NULL; + + // Remove any path. + char* last_separator = strrchr(path, PATH_SEPARATOR); + if (last_separator != NULL) path = last_separator + 1; + + // Allocate and copy name without extension. + char* last_period = strrchr(path, '.'); + size_t length = (last_period == NULL) ? + strlen(path) : last_period - path; + char* result = (char*)malloc(length + 1); + if (result == NULL) return NULL; + strncpy(result, path, length); + result[length] = '\0'; + + return result; +} + +FILE* open_file(const char* path, const char* mode) { + FILE* result = fopen(path, mode); + if (result == NULL) { + fprintf(stderr, "No file named %s.\n", path); usage(); exit(2); } - - // Open the output file for writing. - if (output_file_extension) { - char output_file_name[length + strlen(output_file_extension) + 1]; - strncpy(output_file_name, filename, length); - output_file_name[length] = 0; - strcat(output_file_name, "."); - strcat(output_file_name, output_file_extension); - output_file = fopen(output_file_name, "w"); - if (output_file == NULL) { - fprintf(stderr, "Could not create output file named %s.\n", output_file_name); - usage(); - exit(2); - } - - if (strcmp("csv", output_file_extension) == 0) { - // Also open a summary_file. - char *suffix = "_summary.csv"; - char summary_file_name[length + strlen(suffix) + 1]; - strncpy(summary_file_name, filename, length); - summary_file_name[length] = 0; - strcat(summary_file_name, suffix); - summary_file = fopen(summary_file_name, "w"); - if (summary_file == NULL) { - fprintf(stderr, "Could not create summary file named %s.\n", summary_file_name); - usage(); - exit(2); - } - } - } - - if (atexit(termination) != 0) { - fprintf(stderr, "WARNING: Failed to register termination function!"); + open_file_t* record = (open_file_t*)malloc(sizeof(open_file_t)); + if (record == NULL) { + fprintf(stderr, "Out of memory.\n"); + exit(3); } + record->file = result; + record->next = _open_files; + _open_files = record; + return result; } /** diff --git a/util/tracing/trace_util.h b/util/tracing/trace_util.h index 56eb9fe3e8..ba69498726 100644 --- a/util/tracing/trace_util.h +++ b/util/tracing/trace_util.h @@ -73,18 +73,23 @@ extern int object_table_size; extern char* top_level; /** - * Open the trace file and the output file using the given filename. - * This leaves the FILE* pointers in the global variables trace_file and output_file. - * If the extension if "csv", then it also opens a summary_file. - * The filename argument can include path information. - * It can include the ".lft" extension or not. - * The output file will have the same path and name except that the - * extension will be given by the second argument. - * The summary_file, if opened, will have the filename with "_summary.csv" appended. - * @param filename The file name. - * @param output_file_extension The extension to put on the output file name (e.g. "csv"). + * @brief Return the root file name from the given path. + * Given a path to a file, this function returns a dynamically + * allocated string (which you must free) that points to the root + * filename without the preceding path and without the file extension. + * @param path The path including the full filename. + * @return The root name of the file or NULL for failure. */ -void open_files(char* filename, char* output_file_extension); +char* root_name(const char* path); + +/** + * @brief Open the specified file for reading or writing. + * This function records the file for closing at termination. + * @param path The path to the file. + * @param mode "r" for reading and "w" for writing. + * @return A pointer to the open file or NULL for failure. + */ +FILE* open_file(const char* path, const char* mode); /** * Get the description of the object pointed to by the specified pointer. From fab6dc53c55ff6296c5eaff860b60d34bf703b1f Mon Sep 17 00:00:00 2001 From: Marten Lohstroh Date: Thu, 23 Feb 2023 15:40:44 -0800 Subject: [PATCH 13/61] Inherit tracing property from main context --- .../src/org/lflang/federated/generator/FedGenerator.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/org.lflang/src/org/lflang/federated/generator/FedGenerator.java b/org.lflang/src/org/lflang/federated/generator/FedGenerator.java index 093ea2b760..b54e461b1f 100644 --- a/org.lflang/src/org/lflang/federated/generator/FedGenerator.java +++ b/org.lflang/src/org/lflang/federated/generator/FedGenerator.java @@ -175,8 +175,9 @@ public boolean doGenerate(Resource resource, LFGeneratorContext context) throws final List services = new ArrayList(); // 1. create a Dockerfile for each federate subContexts.forEach((subContext) -> { - // Inherit Docker properties from main context + // Inherit certain properties from main context subContext.getTargetConfig().dockerOptions = context.getTargetConfig().dockerOptions; + subContext.getTargetConfig().tracing = context.getTargetConfig().tracing; var dockerGenerator = dockerGeneratorFactory(subContext); var dockerData = dockerGenerator.generateDockerData(); try { From 60f278950b36608282f51d538307e5d1549280c0 Mon Sep 17 00:00:00 2001 From: Marten Lohstroh Date: Thu, 23 Feb 2023 15:58:15 -0800 Subject: [PATCH 14/61] Revert "Inherit tracing property from main context" This reverts commit fab6dc53c55ff6296c5eaff860b60d34bf703b1f. --- .../src/org/lflang/federated/generator/FedGenerator.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/org.lflang/src/org/lflang/federated/generator/FedGenerator.java b/org.lflang/src/org/lflang/federated/generator/FedGenerator.java index b54e461b1f..093ea2b760 100644 --- a/org.lflang/src/org/lflang/federated/generator/FedGenerator.java +++ b/org.lflang/src/org/lflang/federated/generator/FedGenerator.java @@ -175,9 +175,8 @@ public boolean doGenerate(Resource resource, LFGeneratorContext context) throws final List services = new ArrayList(); // 1. create a Dockerfile for each federate subContexts.forEach((subContext) -> { - // Inherit certain properties from main context + // Inherit Docker properties from main context subContext.getTargetConfig().dockerOptions = context.getTargetConfig().dockerOptions; - subContext.getTargetConfig().tracing = context.getTargetConfig().tracing; var dockerGenerator = dockerGeneratorFactory(subContext); var dockerData = dockerGenerator.generateDockerData(); try { From bae32dff403b570c5297cdef7e26ed29b302b059 Mon Sep 17 00:00:00 2001 From: Marten Lohstroh Date: Thu, 23 Feb 2023 16:37:26 -0800 Subject: [PATCH 15/61] By default, let main file (re)set target properties specified in generated federates --- org.lflang/src/org/lflang/TargetProperty.java | 3 +-- .../src/org/lflang/federated/generator/FedGenerator.java | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/org.lflang/src/org/lflang/TargetProperty.java b/org.lflang/src/org/lflang/TargetProperty.java index 2dd33a4c5b..e8c4b4246d 100644 --- a/org.lflang/src/org/lflang/TargetProperty.java +++ b/org.lflang/src/org/lflang/TargetProperty.java @@ -631,7 +631,6 @@ public enum TargetProperty { } }), - /** * Directive to let the runtime export its internal dependency graph. * @@ -875,7 +874,7 @@ private interface PropertyGetter { this.supportedBy = supportedBy; this.getter = getter; this.setter = setter; - this.updater = (config, value, err) -> { /* Ignore the update by default */ }; + this.updater = setter; // (Re)set by default } /** diff --git a/org.lflang/src/org/lflang/federated/generator/FedGenerator.java b/org.lflang/src/org/lflang/federated/generator/FedGenerator.java index 093ea2b760..d707b16e80 100644 --- a/org.lflang/src/org/lflang/federated/generator/FedGenerator.java +++ b/org.lflang/src/org/lflang/federated/generator/FedGenerator.java @@ -175,7 +175,7 @@ public boolean doGenerate(Resource resource, LFGeneratorContext context) throws final List services = new ArrayList(); // 1. create a Dockerfile for each federate subContexts.forEach((subContext) -> { - // Inherit Docker properties from main context + // Inherit Docker options from main context subContext.getTargetConfig().dockerOptions = context.getTargetConfig().dockerOptions; var dockerGenerator = dockerGeneratorFactory(subContext); var dockerData = dockerGenerator.generateDockerData(); From b401807cfa69bfd3794013818a822e1ff82d5b01 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Thu, 23 Feb 2023 23:23:22 -0800 Subject: [PATCH 16/61] 1st draft of wip prototype for communication visualization --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing_viz/README.md | 25 ++++ util/tracing_viz/sd_gen.py | 234 +++++++++++++++++++++++++++++++++ 3 files changed, 260 insertions(+), 1 deletion(-) create mode 100644 util/tracing_viz/README.md create mode 100644 util/tracing_viz/sd_gen.py diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index d02e568045..3da6c7637b 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit d02e568045a3385f616cb8c6e8a82add89ba5711 +Subproject commit 3da6c7637b13a1c55b8b1dceb7000099da763dab diff --git a/util/tracing_viz/README.md b/util/tracing_viz/README.md new file mode 100644 index 0000000000..82861e9b2a --- /dev/null +++ b/util/tracing_viz/README.md @@ -0,0 +1,25 @@ +# Trace sequence diagram visualiser + +This is a 1st iteration of a prototyping tool for constructing a sequence diagram +out of the traces. +It operates over the csv files generated by `trace_to_csv`. + +# Running + +Once the `.lft` files collected and tranformed into `csv` files, run: +``` +$ python3 sd_gen.py -r -f ... +``` + +The output is an html file with the svg in it. + +# Current problems + +- The collected traces are not complete. They need to be checked for correcteness as well. +- All arrows are horizontal and can be duplicated. Need further processing to derive the connections +for that. +- The scale needs exploration + + + + diff --git a/util/tracing_viz/sd_gen.py b/util/tracing_viz/sd_gen.py new file mode 100644 index 0000000000..888f8ca691 --- /dev/null +++ b/util/tracing_viz/sd_gen.py @@ -0,0 +1,234 @@ +#!/usr/bin/env python3 +import argparse # For arguments parsing +import pandas as pd # For csv manipulation +from os.path import exists +import math + +# Define the arguments to pass in the command line +parser = argparse.ArgumentParser(description='Set of the csv trace files to render.') +parser.add_argument('-r','--rti', type=str, default="rti.csv", + help='RTI csv trace file.') +parser.add_argument('-f','--federates', nargs='+', action='append', + help='List of the federates csv trace files.') + + +def prune_event_name(event_name) : + ''' + Prunes the event name, so that to get nice to render string on top of + the arrows. + + Args: + * event_name: String with the event name + Returns: + * pruned event name + ''' + + if ('RTI receives ' in event_name) : + tmp_str = event_name.split('RTI receives ')[1] + tmp_str = tmp_str.split(' from federate')[0] + elif ('RTI sends ' in event_name) : + tmp_str = event_name.split('RTI sends ')[1] + tmp_str = tmp_str.split(' to federate')[0] + elif ('Federate receives ' in event_name) : + tmp_str = event_name.split('Federate receives ')[1] + tmp_str = tmp_str.split(' from RTI')[0] + elif ('Federate sends ' in event_name) : + tmp_str = event_name.split('Federate sends ')[1] + tmp_str = tmp_str.split(' to RTI')[0] + + return tmp_str + + +def svg_string_draw_line(x1, y1, x2, y2): + ''' + Constructs the svg html string to draw a line from (x1, y1) to (x2, y2) + ''' + str_line = '\t\n' + return str_line + + +def svg_string_draw_arrow(x1, y1, x2, y2): + ''' + Constructs the svg html string to draw a line from (x1, y1) to (x2, y2) + ''' + str_line1 = svg_string_draw_line(x1, y1, x2, y2) + str_line2 = '' + if (x1 > x2) : + str_line2 = '\t\n' + else : + str_line2 = '\t\n' + + return str_line1 + str_line2 + +def svg_string_comment(string): + ''' + Constructs the svg html string to write a comment into an svg file + ''' + str_line = '\n\t\n' + return str_line + + + + + +if __name__ == '__main__': + args = parser.parse_args() + + # Check if the files exist + if (not exists(args.rti)): + print('Error: No RTI csv tarce file!') + exit(0) + + ############################################################################ + #### RTI trace processing + ############################################################################ + + # Load RTI tracepoints, rename the columns and clean non useful data + trace_df = pd.read_csv(args.rti) + trace_df.columns = ['event', 'r', 'fed_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] + trace_df = trace_df.drop(columns=['r', 'w', 'm', 't', 'ed']) + + # Remove all the lines that do not contain communication information + # which boils up to having 'RTI' in the 'event' column + trace_df = trace_df[trace_df['event'].str.contains('RTI') == True] + + # Add an inout column to set the arrow direction + trace_df['inout'] = trace_df['event'].apply(lambda e: 'in' if 'receives' in e else 'out') + + # Prune event names + trace_df['event'] = trace_df['event'].apply(lambda e: prune_event_name(e)) + + # Set that these are the RTI information + trace_df['rti'] = True + + # Count the number of actors + actors_nbr = 1 + + ############################################################################ + #### Federates trace processing + ############################################################################ + # Loop over the given list of federates trace files + if (args.federates) : + for fed_trace in args.federates[0]: + print(fed_trace) + if (not exists(fed_trace)): + print('Warning: Trace file ' + fed_trace + ' does not exist! Will resume though') + continue + + # Proceed as done with the RTI + fed_df = pd.read_csv(fed_trace) + fed_df.columns = ['event', 'r', 'fed_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] + fed_df = fed_df.drop(columns=['r', 'w', 'm', 't', 'ed']) + fed_df = fed_df[fed_df['event'].str.contains('RTI') == True] + fed_df['inout'] = fed_df['event'].apply(lambda e: 'in' if 'receives' in e else 'out') + fed_df['event'] = fed_df['event'].apply(lambda e: prune_event_name(e)) + fed_df['rti'] = False + actors_nbr = actors_nbr + 1 + + # Append into trace_df + trace_df = trace_df.append(fed_df, sort=False, ignore_index=True) + + # Sort all traces by physical time and then reset the index + trace_df = trace_df.sort_values(by=['physical_time']) + trace_df = trace_df.reset_index(drop=True) + + # FIXME: For now, we need to remove the rows with negative physical time values... + # Until the reason behinf such values is investigated + trace_df = trace_df[trace_df['physical_time'] >= 0] + + # Add the Y column and initialize it with 0 + trace_df['y'] = 50 # Or set a small shift + + ############################################################################ + #### Process the traces in order to create the 'Y' coordinates + ############################################################################ + ppt = 0 # Previous physical time + cpt = 0 # Current physical time + py = 0 # Previous y + min = 10 # Will probably be set manually + scale = 1 # Will probably be set manually + for index, row in trace_df.iterrows(): + if (index != 2) : + cpt = int(row['physical_time']) + # print('cpt = '+str(cpt)+' and ppt = '+ppt) + # From the email: + # Y = T_previous + min + log10(1 + (T - T_previous)*scale) + # But i'd rather think it should be: + py = math.ceil(py + min + (1 + math.log10(cpt - ppt) * scale)) + trace_df.at[index, 'y'] = py + + ppt = int(row['physical_time']) + py = trace_df.at[index, 'y'] + + ############################################################################ + #### Compute the 'X' coordinates + ############################################################################ + spacing = 200 # Spacing between actors + padding = 50 + svg_width = padding + (actors_nbr - 1) * spacing + padding + svg_height = padding + int(trace_df.iloc[-1]['y']) + x_rti = 50 + x_fed = [] + for i in range(0, actors_nbr-1) : + x_fed.append(padding + (spacing * (i+1))) + + # Write all the x coordinates + trace_df['x'] = trace_df[['rti','fed_id']].apply(lambda x: x_rti if x['rti'] == True else x_fed[int(x['fed_id'])], axis=1) + + # + # FIXME: Should add processing to match the communications... + # Currently, everything is duplicated!!! + # + + ############################################################################ + #### Write to svg file + ############################################################################ + with open('trace_svg.html', 'w', encoding='utf-8') as f: + # Print header + f.write('\n') + f.write('\n') + f.write('\n\n') + + f.write('\n') + + # Print the circles and the names + # RTI + f.write(svg_string_comment('RTI Actor and line')) + f.write(svg_string_draw_line(x_rti, math.ceil(padding/2), x_rti, svg_height)) + f.write('\t\n') + f.write('\tRTI\n') + # Federates + for i in range(0, actors_nbr-1): + f.write(svg_string_comment('Federate '+str(i)+' Actor and line')) + f.write(svg_string_draw_line(x_fed[i], math.ceil(padding/2), x_fed[i], svg_height)) + f.write('\t\n') + f.write('\t'+str(i)+'\n') + + # Now, we need to iterate over the traces to draw the lines + # + # FIXME: Here, we draw every single message, w/o checking the connection + # and the arrival time. This can be done as pre-processing. + # This means that most arrows are duplicated :-/ + # + f.write(svg_string_comment('Draw interactions')) + for index, row in trace_df.iterrows(): + if (row['rti'] == True): + if ('out' in row['inout']): # RTI -> Federate + f.write(svg_string_draw_arrow(x_rti, int(row['y']), x_fed[int(row['fed_id'])], int(row['y']))) + else: # Federate -> RTI + f.write(svg_string_draw_arrow(x_fed[int(row['fed_id'])], int(row['y']), x_rti, int(row['y']))) + else: + if ('out' in row['inout']): # Federate -> RTI + f.write(svg_string_draw_arrow(x_fed[int(row['fed_id'])], int(row['y']), x_rti, int(row['y']))) + else: # RTI -> Federate + f.write(svg_string_draw_arrow(x_rti, int(row['y']), x_fed[int(row['fed_id'])], int(row['y']))) + + f.write('\n\n') + + # Print footer + f.write('\n') + f.write('\n') + + + # Write to a csv file + trace_df.to_csv('all.csv', index=False) \ No newline at end of file From 7464e2e3faa533a05e2d24f0632dd6da937a59d9 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Fri, 24 Feb 2023 11:58:33 +0100 Subject: [PATCH 17/61] Reorganized how to invoke visualization utility --- lib/scripts/launch-fedsd.sh | 58 +++++++++++++++++++ util/tracing/README.md | 6 ++ util/tracing/makefile | 2 + .../visualization}/README.md | 0 .../visualization/fedsd.py} | 0 5 files changed, 66 insertions(+) create mode 100755 lib/scripts/launch-fedsd.sh rename util/{tracing_viz => tracing/visualization}/README.md (100%) rename util/{tracing_viz/sd_gen.py => tracing/visualization/fedsd.py} (100%) diff --git a/lib/scripts/launch-fedsd.sh b/lib/scripts/launch-fedsd.sh new file mode 100755 index 0000000000..9d570608e4 --- /dev/null +++ b/lib/scripts/launch-fedsd.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +#============================================================================ +# Description: Visualize federated trace data for RTI-federate interactions. +# Authors: Chadlia Jerad +# Edward A. Lee +# Usage: Usage: fedsd -r [rti.csv] -f [fed.csv ...] +#============================================================================ + +#============================================================================ +# Preamble +#============================================================================ + +# Copied from build.sh FIXME: How to avoid copying + +# Find the directory in which this script resides in a way that is compatible +# with MacOS, which has a `readlink` implementation that does not support the +# necessary `-f` flag to canonicalize by following every symlink in every +# component of the given name recursively. +# This solution, adapted from an example written by Geoff Nixon, is POSIX- +# compliant and robust to symbolic links. If a chain of more than 1000 links +# is encountered, we return. +find_dir() ( + start_dir=$PWD + cd "$(dirname "$1")" + link=$(readlink "$(basename "$1")") + count=0 + while [ "${link}" ]; do + if [[ "${count}" -lt 1000 ]]; then + cd "$(dirname "${link}")" + link=$(readlink "$(basename "$1")") + ((count++)) + else + return + fi + done + real_path="$PWD/$(basename "$1")" + cd "${start_dir}" + echo `dirname "${real_path}"` +) + +# Report fatal error and exit. +function fatal_error() { + 1>&2 echo -e "\e[1mfedsd: \e[31mfatal error: \e[0m$1" + exit 1 +} + +abs_path="$(find_dir "$0")" + +if [[ "${abs_path}" ]]; then + base=`dirname $(dirname ${abs_path})` +else + fatal_error "Unable to determine absolute path to $0." +fi + +# FIXME: Check that python3 is in the path. + +python3 "${base}/util/tracing/visualization/fedsd.py" "$@" diff --git a/util/tracing/README.md b/util/tracing/README.md index d189260ef7..28d8db23d7 100644 --- a/util/tracing/README.md +++ b/util/tracing/README.md @@ -3,12 +3,18 @@ This directory contains the source code for utilities that are standalone executables for post-processing tracing data created by the tracing function in Lingua Franca. +Utilities for visualizing the data are contained in the [visualization](visualization/README.md) +directory. + * trace\_to\_csv: Creates a comma-separated values text file from a binary trace file. The resulting file is suitable for analyzing in spreadsheet programs such as Excel. * trace\_to\_chrome: Creates a JSON file suitable for importing into Chrome's trace visualizer. Point Chrome to chrome://tracing/ and load the resulting file. +* trace\_to\_influxdb: A preliminary implementation that takes a binary trace file + and uploads its data into [InfluxDB](https://en.wikipedia.org/wiki/InfluxDB). + ## Installing ``` diff --git a/util/tracing/makefile b/util/tracing/makefile index 9e008f916f..5258c236dd 100644 --- a/util/tracing/makefile +++ b/util/tracing/makefile @@ -29,6 +29,8 @@ install: trace_to_csv trace_to_chrome trace_to_influxdb mv trace_to_csv ../../bin mv trace_to_chrome ../../bin mv trace_to_influxdb ../../bin + ln -f -s ../../lib/scripts/launch-fedsd.sh ../../bin/fedsd + chmod +x ../../bin/fedsd clean: rm -f *.o diff --git a/util/tracing_viz/README.md b/util/tracing/visualization/README.md similarity index 100% rename from util/tracing_viz/README.md rename to util/tracing/visualization/README.md diff --git a/util/tracing_viz/sd_gen.py b/util/tracing/visualization/fedsd.py similarity index 100% rename from util/tracing_viz/sd_gen.py rename to util/tracing/visualization/fedsd.py From 531c5537e70f2f823f24798262e934cc3db5b595 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Fri, 24 Feb 2023 14:15:10 -0800 Subject: [PATCH 18/61] Minor bug and documentation fixes. --- util/tracing/visualization/fedsd.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index 888f8ca691..b063539d7c 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -22,8 +22,10 @@ def prune_event_name(event_name) : Returns: * pruned event name ''' - - if ('RTI receives ' in event_name) : + tmp_str = event_name + if ('RTI accepts joining federate' in event_name) : + tmp_str = "JOIN" + elif ('RTI receives ' in event_name) : tmp_str = event_name.split('RTI receives ')[1] tmp_str = tmp_str.split(' from federate')[0] elif ('RTI sends ' in event_name) : @@ -98,8 +100,9 @@ def svg_string_comment(string): # Prune event names trace_df['event'] = trace_df['event'].apply(lambda e: prune_event_name(e)) - # Set that these are the RTI information + # Set that these are the RTI information, by setting trace_df['rti'] = True + # print(trace_df) # Count the number of actors actors_nbr = 1 @@ -123,6 +126,7 @@ def svg_string_comment(string): fed_df['inout'] = fed_df['event'].apply(lambda e: 'in' if 'receives' in e else 'out') fed_df['event'] = fed_df['event'].apply(lambda e: prune_event_name(e)) fed_df['rti'] = False + # print(fed_df) actors_nbr = actors_nbr + 1 # Append into trace_df @@ -133,7 +137,8 @@ def svg_string_comment(string): trace_df = trace_df.reset_index(drop=True) # FIXME: For now, we need to remove the rows with negative physical time values... - # Until the reason behinf such values is investigated + # Until the reason behinf such values is investigated. The negative physical + # time is when federates are still in the process of joining trace_df = trace_df[trace_df['physical_time'] >= 0] # Add the Y column and initialize it with 0 @@ -145,15 +150,15 @@ def svg_string_comment(string): ppt = 0 # Previous physical time cpt = 0 # Current physical time py = 0 # Previous y - min = 10 # Will probably be set manually - scale = 1 # Will probably be set manually + min = 10 # Will probably be set manually + scale = 1 # Will probably be set manually for index, row in trace_df.iterrows(): if (index != 2) : cpt = int(row['physical_time']) # print('cpt = '+str(cpt)+' and ppt = '+ppt) # From the email: # Y = T_previous + min + log10(1 + (T - T_previous)*scale) - # But i'd rather think it should be: + # But rather think it should be: py = math.ceil(py + min + (1 + math.log10(cpt - ppt) * scale)) trace_df.at[index, 'y'] = py @@ -230,5 +235,5 @@ def svg_string_comment(string): f.write('\n') - # Write to a csv file + # Write to a csv file, just to double check trace_df.to_csv('all.csv', index=False) \ No newline at end of file From 0f21340e73e5f5b4f588d8f41af1796e245cc34d Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Sat, 25 Feb 2023 10:20:50 +0100 Subject: [PATCH 19/61] Consolidated cases into default --- util/tracing/trace_to_csv.c | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index 26104633e9..04a2efb551 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -261,22 +261,8 @@ size_t read_and_write_trace() { } } break; - case federate_NET: - case federate_TAG: - case federate_PTAG: - case federate_LTC: - case rti_receive_TIMESTAMP: - case rti_receive_ADDRESS_QUERY: - case rti_receive_ADDRESS_ADVERTISEMENT: - case rti_receive_TAGGED_MESSAGE: - case rti_receive_RESIGN: - case rti_receive_NEXT_EVENT_TAG: - case rti_receive_LOGICAL_TAG_COMPLETE: - case rti_receive_STOP_REQUEST: - case rti_receive_STOP_REQUEST_REPLY: - case rti_receive_PORT_ABSENT: - case rti_receive_unidentified: - case NUM_EVENT_TYPES: + default: + // No special summary statistics for the rest. break; } // Common stats across event types. From 0f5a3725f933563e405caedb7a91d5c3b305362c Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Sat, 25 Feb 2023 10:21:06 +0100 Subject: [PATCH 20/61] Fixed makefile for executable for Python utilities --- util/tracing/makefile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/util/tracing/makefile b/util/tracing/makefile index 5258c236dd..407b153235 100644 --- a/util/tracing/makefile +++ b/util/tracing/makefile @@ -8,7 +8,6 @@ CFLAGS=-I../../org.lflang/src/lib/c/reactor-c/include/core/ \ -I../../org.lflang/src/lib/c/reactor-c/include/core/platform \ -I../../org.lflang/src/lib/c/reactor-c/include/core/utils \ -DLF_UNTHREADED=1 \ - -g \ -Wall DEPS= LIBS=-lcurl @@ -29,7 +28,7 @@ install: trace_to_csv trace_to_chrome trace_to_influxdb mv trace_to_csv ../../bin mv trace_to_chrome ../../bin mv trace_to_influxdb ../../bin - ln -f -s ../../lib/scripts/launch-fedsd.sh ../../bin/fedsd + ln -f -s ../lib/scripts/launch-fedsd.sh ../../bin/fedsd chmod +x ../../bin/fedsd clean: From 867d3ce5503aeaccdf17e183f0138b3c6a3838e2 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Mon, 27 Feb 2023 13:53:15 -0800 Subject: [PATCH 21/61] fedsd refactoring, to support tracepoints matching and towards intra-federates communication. This version accounts for network latency and clock synchronization --- util/tracing/visualization/.gitignore | 1 + util/tracing/visualization/fedsd.py | 348 ++++++++++++--------- util/tracing/visualization/fedsd_helper.py | 135 ++++++++ 3 files changed, 343 insertions(+), 141 deletions(-) create mode 100644 util/tracing/visualization/.gitignore create mode 100644 util/tracing/visualization/fedsd_helper.py diff --git a/util/tracing/visualization/.gitignore b/util/tracing/visualization/.gitignore new file mode 100644 index 0000000000..ba0430d26c --- /dev/null +++ b/util/tracing/visualization/.gitignore @@ -0,0 +1 @@ +__pycache__/ \ No newline at end of file diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index b063539d7c..ddac3ed3da 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -1,8 +1,24 @@ +''' +Define arrows: + (x1, y1) ==> (x2, y2), when unique result (this arrow will be tilted) + (x1, y1) --> (x2, y2), when a possible result (could be not tilted)? +If not arrow, then triangle with text + +In the dataframe, each arrow will be marked as: + - 'a': draw a non-dashed arrow + - 'd': draw dashed arrow + - 't': draw the triangle only + - 'm': marked, not to be drawn + - 'p': pending +''' + + #!/usr/bin/env python3 import argparse # For arguments parsing import pandas as pd # For csv manipulation from os.path import exists import math +import fedsd_helper as fhlp # Define the arguments to pass in the command line parser = argparse.ArgumentParser(description='Set of the csv trace files to render.') @@ -12,100 +28,81 @@ help='List of the federates csv trace files.') -def prune_event_name(event_name) : - ''' - Prunes the event name, so that to get nice to render string on top of - the arrows. +''' Clock synchronization error ''' +clock_sync_error = 0 - Args: - * event_name: String with the event name - Returns: - * pruned event name - ''' - tmp_str = event_name - if ('RTI accepts joining federate' in event_name) : - tmp_str = "JOIN" - elif ('RTI receives ' in event_name) : - tmp_str = event_name.split('RTI receives ')[1] - tmp_str = tmp_str.split(' from federate')[0] - elif ('RTI sends ' in event_name) : - tmp_str = event_name.split('RTI sends ')[1] - tmp_str = tmp_str.split(' to federate')[0] - elif ('Federate receives ' in event_name) : - tmp_str = event_name.split('Federate receives ')[1] - tmp_str = tmp_str.split(' from RTI')[0] - elif ('Federate sends ' in event_name) : - tmp_str = event_name.split('Federate sends ')[1] - tmp_str = tmp_str.split(' to RTI')[0] - - return tmp_str - - -def svg_string_draw_line(x1, y1, x2, y2): - ''' - Constructs the svg html string to draw a line from (x1, y1) to (x2, y2) - ''' - str_line = '\t\n' - return str_line +''' Bound on the network latency ''' +network_latency = 150000 # That is 100us -def svg_string_draw_arrow(x1, y1, x2, y2): +def load_and_process_csv_file(csv_file, rti) : ''' - Constructs the svg html string to draw a line from (x1, y1) to (x2, y2) - ''' - str_line1 = svg_string_draw_line(x1, y1, x2, y2) - str_line2 = '' - if (x1 > x2) : - str_line2 = '\t\n' - else : - str_line2 = '\t\n' - - return str_line1 + str_line2 + Loads and processes the csv entries, based on the type of the actor (if RTI + or federate). -def svg_string_comment(string): - ''' - Constructs the svg html string to write a comment into an svg file + Args: + * csv_file: String file name + * rti: Bool True if it the RTI, False otherwise + Returns: + * The processed dataframe. ''' - str_line = '\n\t\n' - return str_line + # Load RTI tracepoints, rename the columns and clean non useful data + df = pd.read_csv(csv_file) + print + if (rti == True): + df.columns = ['event', 'r', 'partner_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] + # Set that these are the RTI information + df['self_id'] = -1 + # df['partner_id'] = int(df['partner_id']) + else: + df.columns = ['event', 'r', 'self_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] + # Set that these are the RTI information + # FIXME: Here, we assume that the coordination in centralized. + # To be updated for the decentralized case... + df['partner_id'] = -1 + # df['self_id'] = int(df['partner_id']) + + # Remove non-needed information + df = df.drop(columns=['r', 'w', 'm', 't', 'ed']) + # Remove all the lines that do not contain communication information + # which boils up to having 'RTI' in the 'event' column + df = df[df['event'].str.contains('RTI') == True] + df = df.astype({'self_id': 'int', 'partner_id': 'int'}) + # Add an inout column to set the arrow direction + df['inout'] = df['event'].apply(lambda e: 'in' if 'receives' in e else 'out') + # Prune event names + df['event'] = df['event'].apply(lambda e: fhlp.prune_event_name[e]) + # print(df) + return df if __name__ == '__main__': args = parser.parse_args() - # Check if the files exist + # Check if the RTI trace file exists if (not exists(args.rti)): print('Error: No RTI csv tarce file!') + # FIXME: Exit? exit(0) + # The RTI and each of the federates have a fixed x coordinate. They will be + # saved in a dict + x_coor = {} + actors = [] + padding = 50 + spacing = 200 # Spacing between actors + ############################################################################ #### RTI trace processing ############################################################################ - - # Load RTI tracepoints, rename the columns and clean non useful data - trace_df = pd.read_csv(args.rti) - trace_df.columns = ['event', 'r', 'fed_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] - trace_df = trace_df.drop(columns=['r', 'w', 'm', 't', 'ed']) - - # Remove all the lines that do not contain communication information - # which boils up to having 'RTI' in the 'event' column - trace_df = trace_df[trace_df['event'].str.contains('RTI') == True] - - # Add an inout column to set the arrow direction - trace_df['inout'] = trace_df['event'].apply(lambda e: 'in' if 'receives' in e else 'out') - - # Prune event names - trace_df['event'] = trace_df['event'].apply(lambda e: prune_event_name(e)) - - # Set that these are the RTI information, by setting - trace_df['rti'] = True - # print(trace_df) - - # Count the number of actors - actors_nbr = 1 + trace_df = load_and_process_csv_file(args.rti, True) + x_coor[-1] = padding + actors.append(-1) + # Temporary use + trace_df['x1'] = x_coor[-1] ############################################################################ #### Federates trace processing @@ -113,25 +110,22 @@ def svg_string_comment(string): # Loop over the given list of federates trace files if (args.federates) : for fed_trace in args.federates[0]: - print(fed_trace) if (not exists(fed_trace)): print('Warning: Trace file ' + fed_trace + ' does not exist! Will resume though') continue - - # Proceed as done with the RTI - fed_df = pd.read_csv(fed_trace) - fed_df.columns = ['event', 'r', 'fed_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] - fed_df = fed_df.drop(columns=['r', 'w', 'm', 't', 'ed']) - fed_df = fed_df[fed_df['event'].str.contains('RTI') == True] - fed_df['inout'] = fed_df['event'].apply(lambda e: 'in' if 'receives' in e else 'out') - fed_df['event'] = fed_df['event'].apply(lambda e: prune_event_name(e)) - fed_df['rti'] = False - # print(fed_df) - actors_nbr = actors_nbr + 1 - - # Append into trace_df - trace_df = trace_df.append(fed_df, sort=False, ignore_index=True) - + fed_df = load_and_process_csv_file(fed_trace, False) + if (not fed_df.empty): + # Get the federate id number + fed_id = fed_df.iloc[-1]['self_id'] + # Add to the list of sequence diagram actors + actors.append(fed_id) + # Derive the x coordinate of the actor + x_coor[fed_id] = padding + (spacing * (len(actors)-1)) + fed_df['x1'] = x_coor[fed_id] + # Append into trace_df + trace_df = trace_df.append(fed_df, sort=False, ignore_index=True) + fed_df = fed_df[0:0] + # Sort all traces by physical time and then reset the index trace_df = trace_df.sort_values(by=['physical_time']) trace_df = trace_df.reset_index(drop=True) @@ -141,53 +135,128 @@ def svg_string_comment(string): # time is when federates are still in the process of joining trace_df = trace_df[trace_df['physical_time'] >= 0] - # Add the Y column and initialize it with 0 - trace_df['y'] = 50 # Or set a small shift + # Add the Y column and initialize it with the padding value + trace_df['y1'] = math.ceil(padding * 3 / 2) # Or set a small shift ############################################################################ - #### Process the traces in order to create the 'Y' coordinates + #### Compute the 'y1' coordinates ############################################################################ ppt = 0 # Previous physical time cpt = 0 # Current physical time py = 0 # Previous y min = 10 # Will probably be set manually scale = 1 # Will probably be set manually + first_pass = True for index, row in trace_df.iterrows(): - if (index != 2) : + if (not first_pass) : cpt = int(row['physical_time']) # print('cpt = '+str(cpt)+' and ppt = '+ppt) # From the email: # Y = T_previous + min + log10(1 + (T - T_previous)*scale) # But rather think it should be: py = math.ceil(py + min + (1 + math.log10(cpt - ppt) * scale)) - trace_df.at[index, 'y'] = py + trace_df.at[index, 'y1'] = py ppt = int(row['physical_time']) - py = trace_df.at[index, 'y'] + py = trace_df.at[index, 'y1'] + first_pass = False ############################################################################ - #### Compute the 'X' coordinates + #### Derive arrows that match sided communications ############################################################################ - spacing = 200 # Spacing between actors - padding = 50 - svg_width = padding + (actors_nbr - 1) * spacing + padding - svg_height = padding + int(trace_df.iloc[-1]['y']) - x_rti = 50 - x_fed = [] - for i in range(0, actors_nbr-1) : - x_fed.append(padding + (spacing * (i+1))) - - # Write all the x coordinates - trace_df['x'] = trace_df[['rti','fed_id']].apply(lambda x: x_rti if x['rti'] == True else x_fed[int(x['fed_id'])], axis=1) + # Intialize all rows as pending to be matched + trace_df['arrow'] = 'p' + trace_df['x2'] = -1 + trace_df['y2'] = -1 - # - # FIXME: Should add processing to match the communications... - # Currently, everything is duplicated!!! - # + match = {} + # Iterate and check possible sides + for index, row in trace_df.iterrows(): + if ('p' in row['arrow']) : + physical_time = row['physical_time'] + self_id = int(row['self_id']) + partner_id = int(row['partner_id']) + event = row['event'] + + # Depending on the direction, compute the possible time interval + # and choose the row + if ('out' in row['inout']): + # Compute the possible timestamps interval at the receiver side + physical_time_start = physical_time - clock_sync_error + physical_time_end = physical_time + clock_sync_error + network_latency + + # Match with 'in' tracepoints + matching_df = trace_df[\ + (trace_df['physical_time'] >= physical_time_start) & \ + (trace_df['physical_time'] <= physical_time_end) & \ + (trace_df['inout'] == 'in') & \ + (trace_df['self_id'] == partner_id) & \ + (trace_df['partner_id'] == self_id) & \ + (trace_df['arrow'] == 'p') + ] + + if (matching_df.empty) : + # If no matching receiver, than set the arrow to 't', + # meaning that only a triangle will be rendered + trace_df.loc[index, 'arrow'] = 't' + else: + # If there is one or more matching rows, then consider + # the first one, since it is an out -> in arrow, and + # since it is the closet in time + # FIXME: What other possible choices to consider? + matching_index = matching_df.index[0] + matching_row = matching_df.loc[matching_index] + # Mark it, so not to consider it anymore + trace_df.at[matching_index, 'arrow'] = 'm' + trace_df.at[index, 'x2'] = matching_row['x1'] + trace_df.at[index, 'y2'] = matching_row['y1'] + if (len(matching_df.index) == 1) : + trace_df.at[index, 'arrow'] = 'a' + else : + trace_df.at[index, 'arrow'] = 'd' + else: # 'in' in row['inout'] + # Compute the possible timestamps interval at the receiver side + physical_time_start = physical_time - network_latency - clock_sync_error + physical_time_end = physical_time + clock_sync_error + + # Match with 'out' tracepoints + matching_df = trace_df[\ + (trace_df['physical_time'] >= physical_time_start) & \ + (trace_df['physical_time'] <= physical_time_end) & \ + (trace_df['inout'] == 'out') & \ + (trace_df['self_id'] == partner_id) & \ + (trace_df['partner_id'] == self_id) & \ + (trace_df['arrow'] == 'p') + ] + + if (matching_df.empty) : + # If no matching receiver, than set the arrow to 't', + # meaning that only a triangle will be rendered + trace_df.loc[index, 'arrow'] = 't' + else : + # If there is one or more matching rows, then consider + # the first one, since it is an out -> in arrow, and + # since it is the closet in time + # FIXME: What other possible choices to consider? + matching_index = matching_df.index[-1] + matching_row = matching_df.loc[matching_index] + # Mark it, so not to consider it anymore + trace_df.at[matching_index, 'arrow'] = 'm' + trace_df.at[index, 'x2'] = trace_df.at[index, 'x1'] + trace_df.at[index, 'y2'] = trace_df.at[index, 'y1'] + trace_df.at[index, 'x1'] = matching_row['x1'] + trace_df.at[index, 'y1'] = matching_row['y1'] + if (len(matching_df.index) == 1) : + trace_df.at[index, 'arrow'] = 'a' + else : + trace_df.at[index, 'arrow'] = 'd' ############################################################################ #### Write to svg file ############################################################################ + svg_width = padding + (len(actors) - 1) * spacing + padding + 200 + svg_height = padding + trace_df.iloc[-1]['y1'] + with open('trace_svg.html', 'w', encoding='utf-8') as f: # Print header f.write('\n') @@ -197,43 +266,40 @@ def svg_string_comment(string): f.write('\n') # Print the circles and the names - # RTI - f.write(svg_string_comment('RTI Actor and line')) - f.write(svg_string_draw_line(x_rti, math.ceil(padding/2), x_rti, svg_height)) - f.write('\t\n') - f.write('\tRTI\n') - # Federates - for i in range(0, actors_nbr-1): - f.write(svg_string_comment('Federate '+str(i)+' Actor and line')) - f.write(svg_string_draw_line(x_fed[i], math.ceil(padding/2), x_fed[i], svg_height)) - f.write('\t\n') - f.write('\t'+str(i)+'\n') + for key in x_coor: + if (key == -1): + f.write(fhlp.svg_string_comment('RTI Actor and line')) + title = 'RTI' + center = 15 + else: + f.write(fhlp.svg_string_comment('Federate '+str(key)+' Actor and line')) + title = str(key) + center = 5 + f.write(fhlp.svg_string_draw_line(x_coor[key], math.ceil(padding/2), x_coor[key], svg_height, False)) + f.write('\t\n') + f.write('\t'+title+'\n') # Now, we need to iterate over the traces to draw the lines - # - # FIXME: Here, we draw every single message, w/o checking the connection - # and the arrival time. This can be done as pre-processing. - # This means that most arrows are duplicated :-/ - # - f.write(svg_string_comment('Draw interactions')) + f.write(fhlp.svg_string_comment('Draw interactions')) for index, row in trace_df.iterrows(): - if (row['rti'] == True): - if ('out' in row['inout']): # RTI -> Federate - f.write(svg_string_draw_arrow(x_rti, int(row['y']), x_fed[int(row['fed_id'])], int(row['y']))) - else: # Federate -> RTI - f.write(svg_string_draw_arrow(x_fed[int(row['fed_id'])], int(row['y']), x_rti, int(row['y']))) - else: - if ('out' in row['inout']): # Federate -> RTI - f.write(svg_string_draw_arrow(x_fed[int(row['fed_id'])], int(row['y']), x_rti, int(row['y']))) - else: # RTI -> Federate - f.write(svg_string_draw_arrow(x_rti, int(row['y']), x_fed[int(row['fed_id'])], int(row['y']))) - - f.write('\n\n') + # FIXME: Whose physical and logical time? + label = row['event'] + ' @PT=' + str(row['physical_time']) + ' @LT=' + str(row['logical_time']) + if (row['arrow'] == 'a'): + f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, False)) + elif (row['arrow'] == 'd'): + f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, True)) + elif (row['arrow'] == 't'): + if (row['inout'] == 'in'): + x1 = row['x1'] - 1 + else : + x1 = row['x1'] + 1 + f.write(fhlp.svg_string_draw_triangle(x1, row['x1'], row['y1'])) + + f.write('\n\n\n') # Print footer f.write('\n') f.write('\n') - # Write to a csv file, just to double check trace_df.to_csv('all.csv', index=False) \ No newline at end of file diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py new file mode 100644 index 0000000000..a247890240 --- /dev/null +++ b/util/tracing/visualization/fedsd_helper.py @@ -0,0 +1,135 @@ +import math + +# Disctionary for pruning event names. Usefule for tracepoint matching and +# communication rendering +prune_event_name = { + "Reaction starts": "REACTION_STARTS", + "Reaction ends": "REACTION_ENDS", + "Schedule called": "SCH_CALL", + "User-defined event": "UDE", + "User-defined valued event": "UDVE", + "Worker wait starts": "WWS", + "Worker wait ends": "WWE", + "Scheduler advancing time starts": "SCH_ADVTS", + "Scheduler advancing time ends": "SCH_ADVTE", + "Federate sends NET to RTI": "NET", + "Federate receives TAG from RTI": "TAG", + "Federate receives PTAG from RTI": "PTAG", + "Federate sends LTC to RTI": "LTC", + "RTI receives TIMESTAMP from federate": "TIMESTAMP", + "RTI receives ADDRESS_QUERY from federate": "ADDRESS_QUERY", + "RTI receives ADDRESS_ADVERTISEMENT from federate": "ADDRESS_AD", + "RTI receives TAGGED_MESSAGE from federate": "TAG", + "RTI receives RESIGN from federate": "RESIGN", + "RTI receives NEXT_EVENT_TAG from federate": "NET", + "RTI receives LOGICAL_TAG_COMPLETE from federate": "LTC", + "RTI receives STOP_REQUEST from federate": "STOP_REQUEST", + "RTI receives STOP_REQUEST_REPLY from federate": "STOP_REQUEST_REPLY", + "RTI receives PORT_ABSENT from federate": "PORT_ABSENT", + "RTI receives unidentified message from federate": "UNIDENTIFIED", + "RTI sends PTAG to federate": "PTAG", + "RTI sends TAG to federate": "TAG", + "RTI sends reject to federate": "REJECT", + "RTI sends STOP REQUEST to federate": "STOP_RQ", + "RTI accepts joining federate": "JOIN" +} + +prune_event_name.setdefault(" ", "UNIDENTIFIED") + +################################################################################ +### Routines to write to csv file +################################################################################ + +def svg_string_draw_line(x1, y1, x2, y2, dashed): + ''' + Constructs the svg html string to draw a line from (x1, y1) to (x2, y2). The + line can be continous or dashed. + + Args: + * x1: Int X coordinate of the source point + * y1: Int Y coordinate of the source point + * x2: Int X coordinate of the sink point + * y2: Int Y coordinate of the sink point + * dashed: Bool True if the line is dashed, continous otherwise + Returns: + * String: the svg string of the line + ''' + str_line = '\t\n' + return str_line + + +def svg_string_draw_triangle(x1, x2, y2) : + ''' + Constructs the svg html string to draw the arrow end + + Args: + * x1: Int X coordinate of the source point + * x2: Int X coordinate of the sink point + * y2: Int Y coordinate of the sink point + Returns: + * String: the svg string of the triangle + ''' + str_line = '' + if (x1 > x2) : + str_line = '\t\n' + else : + str_line = '\t\n' + return str_line + + +def svg_string_draw_label(x1, y1, x2, y2, label) : + ''' + Computes the rotation angle of the text and then constructs the svg string. + + Args: + * x1: Int X coordinate of the source point + * y1: Int Y coordinate of the source point + * x2: Int X coordinate of the sink point + * y2: Int Y coordinate of the sink point + * label: Bool True if the line is dashed, continous otherwise + Returns: + * String: the svg string of the text + ''' + # FIXME: Need further improvement, based of the position of the arrows + # FIXME: Rotation value is not that accurate. + rotation = math.ceil(math.atan((x2-x1)/(y2-y1))) + #print('rot = '+str(rotation)+' x1='+str(x1)+' y1='+str(y1)+' x2='+str(x2)+' y2='+str(y2)) + str_line = '\t'+label+'\n' + return str_line + + +def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed): + ''' + Constructs the svg html string to draw the arrow from (x1, y1) to (x2, y2). + The arrow end is constructed, together with the label + + Args: + * x1: Int X coordinate of the source point + * y1: Int Y coordinate of the source point + * x2: Int X coordinate of the sink point + * y2: Int Y coordinate of the sink point + * label: String Label to draw on top of the arrow + * dashed: Bool True if the line is dashed, continous otherwise + Returns: + * String: the svg string of the arrow + ''' + str_line1 = svg_string_draw_line(x1, y1, x2, y2, dashed) + str_line2 = svg_string_draw_triangle(x1, x2, y2) + str_line3 = svg_string_draw_label(x1, y1, x2, y2, label) + return str_line1 + str_line2 + str_line3 + + +def svg_string_comment(comment): + ''' + Constructs the svg html string to write a comment into an svg file. + + Args: + * comment: String Comment to add + Returns: + * String: the svg string of the comment + ''' + str_line = '\n\t\n' + return str_line From fb1476409ddaa5b26cf0019a65fcdcf716400aa8 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Mon, 27 Feb 2023 17:21:26 -0800 Subject: [PATCH 22/61] Fix labels rotation and better rendering --- util/tracing/visualization/fedsd.py | 6 ++--- util/tracing/visualization/fedsd_helper.py | 27 ++++++++++++++++++++-- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index ddac3ed3da..d9e8b75fa2 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -290,10 +290,10 @@ def load_and_process_csv_file(csv_file, rti) : f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, True)) elif (row['arrow'] == 't'): if (row['inout'] == 'in'): - x1 = row['x1'] - 1 + label = "(in)" + label else : - x1 = row['x1'] + 1 - f.write(fhlp.svg_string_draw_triangle(x1, row['x1'], row['y1'])) + label = "(out)" + label + f.write(fhlp.svg_string_draw_dot(row['x1'], row['y1'], label)) f.write('\n\n\n') diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index a247890240..09b579266b 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -95,9 +95,15 @@ def svg_string_draw_label(x1, y1, x2, y2, label) : ''' # FIXME: Need further improvement, based of the position of the arrows # FIXME: Rotation value is not that accurate. - rotation = math.ceil(math.atan((x2-x1)/(y2-y1))) + if (x2 < x1) : + rotation = - math.ceil(math.atan((x2-x1)/(y2-y1)) * 180 / 3.14) - 90 + str_line = '\t'+label+'\n' + else : + rotation = - math.ceil(math.atan((x1-x2)/(y1-y2)) * 180 / 3.14) + 90 + x = math.ceil((x2 + x1) / 2) + y = math.ceil((y1 + y2) / 2) - 5 + str_line = '\t'+label+'\n' #print('rot = '+str(rotation)+' x1='+str(x1)+' y1='+str(y1)+' x2='+str(x2)+' y2='+str(y2)) - str_line = '\t'+label+'\n' return str_line @@ -133,3 +139,20 @@ def svg_string_comment(comment): ''' str_line = '\n\t\n' return str_line + + +def svg_string_draw_dot(x, y, label) : + ''' + Constructs the svg html string to draw the arrow end + + Args: + * x: Int X coordinate of the dot + * y: Int Y coordinate of the dot + * label: String + Returns: + * String: the svg string of the triangle + ''' + str_line = '' + str_line = '\t\n' + str_line = str_line + '\t'+label+'\n' + return str_line \ No newline at end of file From 50cefb96274b1148032941e9fbc06d6b691162a0 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Tue, 28 Feb 2023 12:13:17 -0800 Subject: [PATCH 23/61] Fixing matching arrows + Better naming + Add FIXMEs for future todos --- util/tracing/visualization/fedsd.py | 68 +++++++++++++--------- util/tracing/visualization/fedsd_helper.py | 4 +- 2 files changed, 42 insertions(+), 30 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index d9e8b75fa2..e28b83c1e6 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -5,11 +5,11 @@ If not arrow, then triangle with text In the dataframe, each arrow will be marked as: - - 'a': draw a non-dashed arrow - - 'd': draw dashed arrow - - 't': draw the triangle only - - 'm': marked, not to be drawn - - 'p': pending + - 'arrow': draw a non-dashed arrow + - 'dashedarrow': draw dashed arrow + - 'dot': draw the triangle only + - 'marked': marked, not to be drawn + - 'pending': pending ''' @@ -29,10 +29,12 @@ ''' Clock synchronization error ''' +''' FIXME: There should be a value for each communicating pair ''' clock_sync_error = 0 ''' Bound on the network latency ''' -network_latency = 150000 # That is 100us +''' FIXME: There should be a value for each communicating pair ''' +network_latency = 250000 # That is 100us def load_and_process_csv_file(csv_file, rti) : @@ -75,7 +77,6 @@ def load_and_process_csv_file(csv_file, rti) : # Prune event names df['event'] = df['event'].apply(lambda e: fhlp.prune_event_name[e]) - # print(df) return df @@ -165,14 +166,23 @@ def load_and_process_csv_file(csv_file, rti) : #### Derive arrows that match sided communications ############################################################################ # Intialize all rows as pending to be matched - trace_df['arrow'] = 'p' + trace_df['arrow'] = 'pending' trace_df['x2'] = -1 trace_df['y2'] = -1 - match = {} + # Because pandas library prevents writing the dataframe when iterating, but + # the row at the cueent index, the turnaround is to save the indexes to be + # modified and then check within the iterations + indexes_to_mark = [] # Iterate and check possible sides for index, row in trace_df.iterrows(): - if ('p' in row['arrow']) : + # Check is the index is to be marked with 'marked' + if (index in indexes_to_mark): + trace_df.at[index, 'arrow'] = 'marked' + continue + + # If not, and if it is a pending tracepoint, proceed to look for a match + if (row['arrow'] == 'pending') : physical_time = row['physical_time'] self_id = int(row['self_id']) partner_id = int(row['partner_id']) @@ -192,13 +202,13 @@ def load_and_process_csv_file(csv_file, rti) : (trace_df['inout'] == 'in') & \ (trace_df['self_id'] == partner_id) & \ (trace_df['partner_id'] == self_id) & \ - (trace_df['arrow'] == 'p') + (trace_df['arrow'] == 'pending') ] if (matching_df.empty) : - # If no matching receiver, than set the arrow to 't', + # If no matching receiver, than set the arrow to 'dot', # meaning that only a triangle will be rendered - trace_df.loc[index, 'arrow'] = 't' + trace_df.loc[index, 'arrow'] = 'dot' else: # If there is one or more matching rows, then consider # the first one, since it is an out -> in arrow, and @@ -207,13 +217,14 @@ def load_and_process_csv_file(csv_file, rti) : matching_index = matching_df.index[0] matching_row = matching_df.loc[matching_index] # Mark it, so not to consider it anymore - trace_df.at[matching_index, 'arrow'] = 'm' + # trace_df.at[matching_index, 'arrow'] = 'marked' + indexes_to_mark.append(matching_index) trace_df.at[index, 'x2'] = matching_row['x1'] trace_df.at[index, 'y2'] = matching_row['y1'] if (len(matching_df.index) == 1) : - trace_df.at[index, 'arrow'] = 'a' + trace_df.at[index, 'arrow'] = 'arrow' else : - trace_df.at[index, 'arrow'] = 'd' + trace_df.at[index, 'arrow'] = 'dashedarrow' else: # 'in' in row['inout'] # Compute the possible timestamps interval at the receiver side physical_time_start = physical_time - network_latency - clock_sync_error @@ -226,13 +237,13 @@ def load_and_process_csv_file(csv_file, rti) : (trace_df['inout'] == 'out') & \ (trace_df['self_id'] == partner_id) & \ (trace_df['partner_id'] == self_id) & \ - (trace_df['arrow'] == 'p') + (trace_df['arrow'] == 'pending') ] if (matching_df.empty) : - # If no matching receiver, than set the arrow to 't', + # If no matching receiver, than set the arrow to 'dot', # meaning that only a triangle will be rendered - trace_df.loc[index, 'arrow'] = 't' + trace_df.loc[index, 'arrow'] = 'dot' else : # If there is one or more matching rows, then consider # the first one, since it is an out -> in arrow, and @@ -241,15 +252,16 @@ def load_and_process_csv_file(csv_file, rti) : matching_index = matching_df.index[-1] matching_row = matching_df.loc[matching_index] # Mark it, so not to consider it anymore - trace_df.at[matching_index, 'arrow'] = 'm' + # trace_df.at[matching_index, 'arrow'] = 'marked' + indexes_to_mark.append(matching_index) trace_df.at[index, 'x2'] = trace_df.at[index, 'x1'] trace_df.at[index, 'y2'] = trace_df.at[index, 'y1'] trace_df.at[index, 'x1'] = matching_row['x1'] trace_df.at[index, 'y1'] = matching_row['y1'] if (len(matching_df.index) == 1) : - trace_df.at[index, 'arrow'] = 'a' + trace_df.at[index, 'arrow'] = 'arrow' else : - trace_df.at[index, 'arrow'] = 'd' + trace_df.at[index, 'arrow'] = 'dashedarrow' ############################################################################ #### Write to svg file @@ -284,15 +296,15 @@ def load_and_process_csv_file(csv_file, rti) : for index, row in trace_df.iterrows(): # FIXME: Whose physical and logical time? label = row['event'] + ' @PT=' + str(row['physical_time']) + ' @LT=' + str(row['logical_time']) - if (row['arrow'] == 'a'): + if (row['arrow'] == 'arrow'): f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, False)) - elif (row['arrow'] == 'd'): + elif (row['arrow'] == 'dashedarrow'): f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, True)) - elif (row['arrow'] == 't'): + elif (row['arrow'] == 'dot'): if (row['inout'] == 'in'): - label = "(in)" + label + label = "(in) from " + str(row['partner_id']) + ' ' + label else : - label = "(out)" + label + label = "(out) to " + str(row['partner_id']) + ' ' + label f.write(fhlp.svg_string_draw_dot(row['x1'], row['y1'], label)) f.write('\n\n\n') @@ -302,4 +314,4 @@ def load_and_process_csv_file(csv_file, rti) : f.write('\n') # Write to a csv file, just to double check - trace_df.to_csv('all.csv', index=False) \ No newline at end of file + trace_df.to_csv('all.csv', index=True) \ No newline at end of file diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index 09b579266b..ccf1114d7f 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -61,7 +61,7 @@ def svg_string_draw_line(x1, y1, x2, y2, dashed): return str_line -def svg_string_draw_triangle(x1, x2, y2) : +def svg_string_draw_arrow_head(x1, x2, y2) : ''' Constructs the svg html string to draw the arrow end @@ -123,7 +123,7 @@ def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed): * String: the svg string of the arrow ''' str_line1 = svg_string_draw_line(x1, y1, x2, y2, dashed) - str_line2 = svg_string_draw_triangle(x1, x2, y2) + str_line2 = svg_string_draw_arrow_head(x1, x2, y2) str_line3 = svg_string_draw_label(x1, y1, x2, y2, label) return str_line1 + str_line2 + str_line3 From ef6eb444f5d07b6c7b8a04c8f2488482de406634 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Tue, 28 Feb 2023 13:35:45 -0800 Subject: [PATCH 24/61] A better fix for the previous matching bug --- util/tracing/visualization/fedsd.py | 33 ++++++++++------------------- 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index e28b83c1e6..4dd5c134ba 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -170,27 +170,18 @@ def load_and_process_csv_file(csv_file, rti) : trace_df['x2'] = -1 trace_df['y2'] = -1 - # Because pandas library prevents writing the dataframe when iterating, but - # the row at the cueent index, the turnaround is to save the indexes to be - # modified and then check within the iterations - indexes_to_mark = [] # Iterate and check possible sides - for index, row in trace_df.iterrows(): - # Check is the index is to be marked with 'marked' - if (index in indexes_to_mark): - trace_df.at[index, 'arrow'] = 'marked' - continue - - # If not, and if it is a pending tracepoint, proceed to look for a match - if (row['arrow'] == 'pending') : - physical_time = row['physical_time'] - self_id = int(row['self_id']) - partner_id = int(row['partner_id']) - event = row['event'] + for index in trace_df.index: + # If the tracepoint is pending, proceed to look for a match + if (trace_df.at[index,'arrow'] == 'pending') : + physical_time = trace_df.at[index,'physical_time'] + self_id = int(trace_df.at[index,'self_id']) + partner_id = int(trace_df.at[index,'partner_id']) + event = trace_df.at[index,'event'] # Depending on the direction, compute the possible time interval # and choose the row - if ('out' in row['inout']): + if ('out' in trace_df.at[index,'inout']): # Compute the possible timestamps interval at the receiver side physical_time_start = physical_time - clock_sync_error physical_time_end = physical_time + clock_sync_error + network_latency @@ -217,15 +208,14 @@ def load_and_process_csv_file(csv_file, rti) : matching_index = matching_df.index[0] matching_row = matching_df.loc[matching_index] # Mark it, so not to consider it anymore - # trace_df.at[matching_index, 'arrow'] = 'marked' - indexes_to_mark.append(matching_index) + trace_df.at[matching_index, 'arrow'] = 'marked' trace_df.at[index, 'x2'] = matching_row['x1'] trace_df.at[index, 'y2'] = matching_row['y1'] if (len(matching_df.index) == 1) : trace_df.at[index, 'arrow'] = 'arrow' else : trace_df.at[index, 'arrow'] = 'dashedarrow' - else: # 'in' in row['inout'] + else: # 'in' in trace_df.at[index,'inout'] # Compute the possible timestamps interval at the receiver side physical_time_start = physical_time - network_latency - clock_sync_error physical_time_end = physical_time + clock_sync_error @@ -252,8 +242,7 @@ def load_and_process_csv_file(csv_file, rti) : matching_index = matching_df.index[-1] matching_row = matching_df.loc[matching_index] # Mark it, so not to consider it anymore - # trace_df.at[matching_index, 'arrow'] = 'marked' - indexes_to_mark.append(matching_index) + trace_df.at[matching_index, 'arrow'] = 'marked' trace_df.at[index, 'x2'] = trace_df.at[index, 'x1'] trace_df.at[index, 'y2'] = trace_df.at[index, 'y1'] trace_df.at[index, 'x1'] = matching_row['x1'] From 037aa407398566aaafcf164edf672b70548a0b81 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Fri, 3 Mar 2023 11:06:55 -0800 Subject: [PATCH 25/61] Improve matching mechanism by accounting for part of the payload + better trace position in reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/visualization/fedsd.py | 106 ++++++++++++---------------- 2 files changed, 46 insertions(+), 62 deletions(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 852638896d..c0fd5fbbd8 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 852638896ded7fc2c0eba027de1080ab3d204b93 +Subproject commit c0fd5fbbd8b01434b29ebc0ff3915e7c7e44e0f3 diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index 4dd5c134ba..c97bbbf60a 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -34,7 +34,7 @@ ''' Bound on the network latency ''' ''' FIXME: There should be a value for each communicating pair ''' -network_latency = 250000 # That is 100us +network_latency = 100000000 # That is 100us def load_and_process_csv_file(csv_file, rti) : @@ -52,12 +52,12 @@ def load_and_process_csv_file(csv_file, rti) : df = pd.read_csv(csv_file) print if (rti == True): - df.columns = ['event', 'r', 'partner_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] + df.columns = ['event', 'r', 'partner_id', 'w', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] # Set that these are the RTI information df['self_id'] = -1 # df['partner_id'] = int(df['partner_id']) else: - df.columns = ['event', 'r', 'self_id', 'w', 'logical_time', 'm', 'physical_time', 't', 'ed'] + df.columns = ['event', 'r', 'self_id', 'w', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] # Set that these are the RTI information # FIXME: Here, we assume that the coordination in centralized. # To be updated for the decentralized case... @@ -65,7 +65,7 @@ def load_and_process_csv_file(csv_file, rti) : # df['self_id'] = int(df['partner_id']) # Remove non-needed information - df = df.drop(columns=['r', 'w', 'm', 't', 'ed']) + df = df.drop(columns=['r', 'w', 't', 'ed']) # Remove all the lines that do not contain communication information # which boils up to having 'RTI' in the 'event' column @@ -178,79 +178,63 @@ def load_and_process_csv_file(csv_file, rti) : self_id = int(trace_df.at[index,'self_id']) partner_id = int(trace_df.at[index,'partner_id']) event = trace_df.at[index,'event'] + logical_time = trace_df.at[index, 'logical_time'] + microstep = trace_df.at[index, 'microstep'] + inout = trace_df.at[index, 'inout'] + # Depending on the direction, compute the possible time interval # and choose the row - if ('out' in trace_df.at[index,'inout']): + if (inout == 'out'): # Compute the possible timestamps interval at the receiver side physical_time_start = physical_time - clock_sync_error physical_time_end = physical_time + clock_sync_error + network_latency - - # Match with 'in' tracepoints - matching_df = trace_df[\ - (trace_df['physical_time'] >= physical_time_start) & \ - (trace_df['physical_time'] <= physical_time_end) & \ - (trace_df['inout'] == 'in') & \ - (trace_df['self_id'] == partner_id) & \ - (trace_df['partner_id'] == self_id) & \ - (trace_df['arrow'] == 'pending') - ] - - if (matching_df.empty) : - # If no matching receiver, than set the arrow to 'dot', - # meaning that only a triangle will be rendered - trace_df.loc[index, 'arrow'] = 'dot' - else: - # If there is one or more matching rows, then consider - # the first one, since it is an out -> in arrow, and - # since it is the closet in time - # FIXME: What other possible choices to consider? + else: + physical_time_start = physical_time - network_latency - clock_sync_error + physical_time_end = physical_time + clock_sync_error + + # Match tracepoints + matching_df = trace_df[\ + (trace_df['physical_time'] >= physical_time_start) & \ + (trace_df['physical_time'] <= physical_time_end) & \ + (trace_df['inout'] != inout) & \ + (trace_df['self_id'] == partner_id) & \ + (trace_df['partner_id'] == self_id) & \ + (trace_df['arrow'] == 'pending') & \ + (trace_df['event'] == event) & \ + (trace_df['logical_time'] == logical_time) & \ + (trace_df['microstep'] == microstep) \ + ] + + if (matching_df.empty) : + # If no matching receiver, than set the arrow to 'dot', + # meaning that only a dot will be rendered + trace_df.loc[index, 'arrow'] = 'dot' + else: + # If there is one or more matching rows, then consider + # the first one, since it is an out -> in arrow, and + # since it is the closet in time + # FIXME: What other possible choices to consider? + if (inout == 'out'): matching_index = matching_df.index[0] matching_row = matching_df.loc[matching_index] - # Mark it, so not to consider it anymore - trace_df.at[matching_index, 'arrow'] = 'marked' trace_df.at[index, 'x2'] = matching_row['x1'] trace_df.at[index, 'y2'] = matching_row['y1'] - if (len(matching_df.index) == 1) : - trace_df.at[index, 'arrow'] = 'arrow' - else : - trace_df.at[index, 'arrow'] = 'dashedarrow' - else: # 'in' in trace_df.at[index,'inout'] - # Compute the possible timestamps interval at the receiver side - physical_time_start = physical_time - network_latency - clock_sync_error - physical_time_end = physical_time + clock_sync_error - - # Match with 'out' tracepoints - matching_df = trace_df[\ - (trace_df['physical_time'] >= physical_time_start) & \ - (trace_df['physical_time'] <= physical_time_end) & \ - (trace_df['inout'] == 'out') & \ - (trace_df['self_id'] == partner_id) & \ - (trace_df['partner_id'] == self_id) & \ - (trace_df['arrow'] == 'pending') - ] - - if (matching_df.empty) : - # If no matching receiver, than set the arrow to 'dot', - # meaning that only a triangle will be rendered - trace_df.loc[index, 'arrow'] = 'dot' - else : - # If there is one or more matching rows, then consider - # the first one, since it is an out -> in arrow, and - # since it is the closet in time - # FIXME: What other possible choices to consider? + else: matching_index = matching_df.index[-1] matching_row = matching_df.loc[matching_index] - # Mark it, so not to consider it anymore - trace_df.at[matching_index, 'arrow'] = 'marked' trace_df.at[index, 'x2'] = trace_df.at[index, 'x1'] trace_df.at[index, 'y2'] = trace_df.at[index, 'y1'] trace_df.at[index, 'x1'] = matching_row['x1'] trace_df.at[index, 'y1'] = matching_row['y1'] - if (len(matching_df.index) == 1) : - trace_df.at[index, 'arrow'] = 'arrow' - else : - trace_df.at[index, 'arrow'] = 'dashedarrow' + + # Mark it, so not to consider it anymore + trace_df.at[matching_index, 'arrow'] = 'marked' + + if (len(matching_df.index) == 1) : + trace_df.at[index, 'arrow'] = 'arrow' + else : + trace_df.at[index, 'arrow'] = 'dashedarrow' ############################################################################ #### Write to svg file From ef3050017e28d0617bccab2b1c05b0882248023a Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Sun, 5 Mar 2023 16:22:15 +0100 Subject: [PATCH 26/61] Aligned reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index c0fd5fbbd8..3b5e70711b 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit c0fd5fbbd8b01434b29ebc0ff3915e7c7e44e0f3 +Subproject commit 3b5e70711b25b64b2d08348d5e148f43f0379829 From 5285c4c203aa58b10bda15c148b0f55e7b34b887 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Sun, 5 Mar 2023 08:47:42 -0800 Subject: [PATCH 27/61] Better matching between events. More improvements are WiP. --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/visualization/fedsd_helper.py | 48 +++++++++++----------- 2 files changed, 24 insertions(+), 26 deletions(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index c0fd5fbbd8..b79cdc9347 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit c0fd5fbbd8b01434b29ebc0ff3915e7c7e44e0f3 +Subproject commit b79cdc93470d9e498fcee49119a84a3656e21a5f diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index ccf1114d7f..abfe3efa9c 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -3,35 +3,33 @@ # Disctionary for pruning event names. Usefule for tracepoint matching and # communication rendering prune_event_name = { - "Reaction starts": "REACTION_STARTS", - "Reaction ends": "REACTION_ENDS", - "Schedule called": "SCH_CALL", - "User-defined event": "UDE", - "User-defined valued event": "UDVE", - "Worker wait starts": "WWS", - "Worker wait ends": "WWE", - "Scheduler advancing time starts": "SCH_ADVTS", - "Scheduler advancing time ends": "SCH_ADVTE", + "Federate sends TIMESTAMP to RTI": "TIMESTAMP", "Federate sends NET to RTI": "NET", - "Federate receives TAG from RTI": "TAG", - "Federate receives PTAG from RTI": "PTAG", "Federate sends LTC to RTI": "LTC", - "RTI receives TIMESTAMP from federate": "TIMESTAMP", - "RTI receives ADDRESS_QUERY from federate": "ADDRESS_QUERY", - "RTI receives ADDRESS_ADVERTISEMENT from federate": "ADDRESS_AD", - "RTI receives TAGGED_MESSAGE from federate": "TAG", - "RTI receives RESIGN from federate": "RESIGN", - "RTI receives NEXT_EVENT_TAG from federate": "NET", - "RTI receives LOGICAL_TAG_COMPLETE from federate": "LTC", - "RTI receives STOP_REQUEST from federate": "STOP_REQUEST", - "RTI receives STOP_REQUEST_REPLY from federate": "STOP_REQUEST_REPLY", - "RTI receives PORT_ABSENT from federate": "PORT_ABSENT", - "RTI receives unidentified message from federate": "UNIDENTIFIED", + "Federate sends STOP_REQ to RTI": "STOP_REQ", + "Federate sends STOP_REQ_REP to RTI": "STOP_REQ_REP", + "Federate receives ACK from RTI": "ACK", + "Federate receives REJECT from RTI": "REJECT", + "Federate receives TIMESTAMP from RTI": "TIMESTAMP", + "Federate receives PTAG from RTI": "PTAG", + "Federate receives TAG from RTI": "TAG", + "Federate receives STOP_REQ from RTI": "STOP_REQ", + "Federate receives STOP_GRN from RTI": "STOP_GRN", + "Federate sends FED_ID to federate": "FED_ID", + "Federate receives FED_ID from federate": "FED_ID", + "RTI sends ACK to federate": "ACK", + "RTI sends REJECT to federate": "REJECT", + "RTI sends TIMESTAMP to federate": "TIMESTAMP", "RTI sends PTAG to federate": "PTAG", "RTI sends TAG to federate": "TAG", - "RTI sends reject to federate": "REJECT", - "RTI sends STOP REQUEST to federate": "STOP_RQ", - "RTI accepts joining federate": "JOIN" + "RTI sends STOP_REQ to federate": "STOP_REQ", + "RTI sends STOP_GRN to federate": "STOP_GRN", + "RTI sends JOIN to federate": "JOIN", + "RTI receives TIMESTAMP from federate": "TIMESTAMP", + "RTI receives NET from federate": "NET", + "RTI receives LTC from federate": "LTC", + "RTI receives STOP_REQ from federate": "STOP_REQ", + "RTI receives STOP_REQ_REP from federate": "STOP_REQ_REP" } prune_event_name.setdefault(" ", "UNIDENTIFIED") From 7bd29f766936eac1b308d228dd4391f9c7a3ae71 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Sun, 5 Mar 2023 17:53:20 +0100 Subject: [PATCH 28/61] Minor changes --- util/tracing/visualization/fedsd.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index c97bbbf60a..91811a95ec 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -85,16 +85,15 @@ def load_and_process_csv_file(csv_file, rti) : # Check if the RTI trace file exists if (not exists(args.rti)): - print('Error: No RTI csv tarce file!') - # FIXME: Exit? - exit(0) + print('Error: No RTI csv trace file! Specify with -r argument.') + exit(1) # The RTI and each of the federates have a fixed x coordinate. They will be # saved in a dict x_coor = {} actors = [] padding = 50 - spacing = 200 # Spacing between actors + spacing = 200 # Spacing between federates ############################################################################ #### RTI trace processing @@ -145,7 +144,7 @@ def load_and_process_csv_file(csv_file, rti) : ppt = 0 # Previous physical time cpt = 0 # Current physical time py = 0 # Previous y - min = 10 # Will probably be set manually + min = 10 # Minimum spacing between events when time has not advanced. scale = 1 # Will probably be set manually first_pass = True for index, row in trace_df.iterrows(): From da26cb54903f3ba2cf75ecd28e1ab7e34f2a436a Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Mon, 6 Mar 2023 19:23:28 +0100 Subject: [PATCH 29/61] Exit if trace files don't exist --- util/tracing/trace_to_csv.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index 04a2efb551..9df605f6d2 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -404,6 +404,7 @@ int main(int argc, char* argv[]) { } // Open the trace file. trace_file = open_file(argv[1], "r"); + if (trace_file == NULL) exit(1); // Construct the name of the csv output file and open it. char* root = root_name(argv[1]); @@ -411,12 +412,14 @@ int main(int argc, char* argv[]) { strcpy(csv_filename, root); strcat(csv_filename, ".csv"); output_file = open_file(csv_filename, "w"); + if (output_file == NULL) exit(1); // Construct the name of the summary output file and open it. char summary_filename[strlen(root) + 13]; strcpy(summary_filename, root); strcat(summary_filename, "_summary.csv"); summary_file = open_file(summary_filename, "w"); + if (summary_file == NULL) exit(1); free(root); From 0e03b674e55a909c3ca9384021a5ea2490559780 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Mon, 6 Mar 2023 19:24:06 +0100 Subject: [PATCH 30/61] Exit rather than segfaulting when reading trace file fails --- util/tracing/trace_util.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/util/tracing/trace_util.h b/util/tracing/trace_util.h index ba69498726..dab1f5e989 100644 --- a/util/tracing/trace_util.h +++ b/util/tracing/trace_util.h @@ -36,10 +36,10 @@ THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. /** Macro to use when access to trace file fails. */ #define _LF_TRACE_FAILURE(trace_file) \ do { \ - fprintf(stderr, "WARNING: Access to trace file failed.\n"); \ + fprintf(stderr, "ERROR: Access to trace file failed.\n"); \ fclose(trace_file); \ trace_file = NULL; \ - return -1; \ + exit(1); \ } while(0) /** Buffer for reading object descriptions. Size limit is BUFFER_SIZE bytes. */ From 9c5ca19e72c94b35e666d185a2ab9e86572652d1 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Mon, 6 Mar 2023 15:05:14 -0800 Subject: [PATCH 31/61] Aligning reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index b79cdc9347..ecca71ef19 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit b79cdc93470d9e498fcee49119a84a3656e21a5f +Subproject commit ecca71ef19af91293816c19a8cd23a97c288fee8 From e9b6e35d88545e88ab72f6e9b2cd87467772a805 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Mon, 6 Mar 2023 23:17:43 -0800 Subject: [PATCH 32/61] Make fedsd aware of the changes in tracing mechanis under reactor-c. Align reactor-c. --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/visualization/fedsd.py | 15 +++--- util/tracing/visualization/fedsd_helper.py | 54 +++++++++++----------- 3 files changed, 35 insertions(+), 36 deletions(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index ecca71ef19..86487379fa 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit ecca71ef19af91293816c19a8cd23a97c288fee8 +Subproject commit 86487379fa52237995f4b4c391cc60da51009421 diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index 91811a95ec..dc0daa81fe 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -55,25 +55,24 @@ def load_and_process_csv_file(csv_file, rti) : df.columns = ['event', 'r', 'partner_id', 'w', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] # Set that these are the RTI information df['self_id'] = -1 - # df['partner_id'] = int(df['partner_id']) + # Remove non-needed information + df = df.drop(columns=['r', 'w', 't', 'ed']) else: - df.columns = ['event', 'r', 'self_id', 'w', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] + df.columns = ['event', 'r', 'self_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] # Set that these are the RTI information # FIXME: Here, we assume that the coordination in centralized. # To be updated for the decentralized case... df['partner_id'] = -1 - # df['self_id'] = int(df['partner_id']) - - # Remove non-needed information - df = df.drop(columns=['r', 'w', 't', 'ed']) + # Remove non-needed information + df = df.drop(columns=['r', 't', 'ed']) # Remove all the lines that do not contain communication information # which boils up to having 'RTI' in the 'event' column - df = df[df['event'].str.contains('RTI') == True] + df = df[df['event'].str.contains('Sending|Receiving') == True] df = df.astype({'self_id': 'int', 'partner_id': 'int'}) # Add an inout column to set the arrow direction - df['inout'] = df['event'].apply(lambda e: 'in' if 'receives' in e else 'out') + df['inout'] = df['event'].apply(lambda e: 'in' if 'Receiving' in e else 'out') # Prune event names df['event'] = df['event'].apply(lambda e: fhlp.prune_event_name[e]) diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index abfe3efa9c..be8840ef9e 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -3,33 +3,33 @@ # Disctionary for pruning event names. Usefule for tracepoint matching and # communication rendering prune_event_name = { - "Federate sends TIMESTAMP to RTI": "TIMESTAMP", - "Federate sends NET to RTI": "NET", - "Federate sends LTC to RTI": "LTC", - "Federate sends STOP_REQ to RTI": "STOP_REQ", - "Federate sends STOP_REQ_REP to RTI": "STOP_REQ_REP", - "Federate receives ACK from RTI": "ACK", - "Federate receives REJECT from RTI": "REJECT", - "Federate receives TIMESTAMP from RTI": "TIMESTAMP", - "Federate receives PTAG from RTI": "PTAG", - "Federate receives TAG from RTI": "TAG", - "Federate receives STOP_REQ from RTI": "STOP_REQ", - "Federate receives STOP_GRN from RTI": "STOP_GRN", - "Federate sends FED_ID to federate": "FED_ID", - "Federate receives FED_ID from federate": "FED_ID", - "RTI sends ACK to federate": "ACK", - "RTI sends REJECT to federate": "REJECT", - "RTI sends TIMESTAMP to federate": "TIMESTAMP", - "RTI sends PTAG to federate": "PTAG", - "RTI sends TAG to federate": "TAG", - "RTI sends STOP_REQ to federate": "STOP_REQ", - "RTI sends STOP_GRN to federate": "STOP_GRN", - "RTI sends JOIN to federate": "JOIN", - "RTI receives TIMESTAMP from federate": "TIMESTAMP", - "RTI receives NET from federate": "NET", - "RTI receives LTC from federate": "LTC", - "RTI receives STOP_REQ from federate": "STOP_REQ", - "RTI receives STOP_REQ_REP from federate": "STOP_REQ_REP" + "Sending ACK": "ACK", + "Sending TIMESTAMP": "TIMESTAMP", + "Sending NET": "NET", + "Sending LTC": "LTC", + "Sending STOP_REQ": "STOP_REQ", + "Sending STOP_REQ_REP": "STOP_REQ_REP", + "Sending FED_ID": "FED_ID", + "Sending PTAG": "PTAG", + "Sending TAG": "TAG", + "Sending STOP_GRN": "STOP_GRN", + "Sending JOIN": "JOIN", + "Sending REJECT": "REJECT", + "Sending RESIGN": "RESIGN", + "Receiving ACK": "ACK", + "Receiving TIMESTAMP": "TIMESTAMP", + "Receiving NET": "NET", + "Receiving LTC": "LTC", + "Receiving STOP_REQ": "STOP_REQ", + "Receiving STOP_REQ_REP": "STOP_REQ_REP", + "Receiving FED_ID": "FED_ID", + "Receiving PTAG": "PTAG", + "Receiving TAG": "TAG", + "Receiving STOP_GRN": "STOP_GRN", + "Receiving REJECT": "REJECT", + "Receiving RESIGN": "RESIGN", + "Receiving PORT_ABS": "PORT_ABS", + "Receiving UNIDENTIFIED": "UNIDENTIFIED" } prune_event_name.setdefault(" ", "UNIDENTIFIED") From 5c9cfc4ae51f7ac3fd5eed26ebd946597a5729e8 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Tue, 7 Mar 2023 15:19:33 +0100 Subject: [PATCH 33/61] Adjusted to refactored tracing fields and aligned reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/trace_to_chrome.c | 22 +++++++++++++--------- util/tracing/trace_to_csv.c | 23 +++++++++-------------- util/tracing/trace_to_influxdb.c | 10 +++++++--- 4 files changed, 30 insertions(+), 27 deletions(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 86487379fa..5eb44e045d 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 86487379fa52237995f4b4c391cc60da51009421 +Subproject commit 5eb44e045d0f5ab8e2a5f55576f56f472de4c734 diff --git a/util/tracing/trace_to_chrome.c b/util/tracing/trace_to_chrome.c index 4615adc8a7..92eeee71dd 100644 --- a/util/tracing/trace_to_chrome.c +++ b/util/tracing/trace_to_chrome.c @@ -78,9 +78,13 @@ size_t read_and_write_trace(FILE* trace_file, FILE* output_file) { // Write each line. for (int i = 0; i < trace_length; i++) { char* reaction_name = "\"UNKNOWN\""; - if (trace[i].id_number >= 0) { + + // Ignore federated trace events. + if (trace[i].event_type > federated) continue; + + if (trace[i].dst_id >= 0) { reaction_name = (char*)malloc(4); - snprintf(reaction_name, 4, "%d", trace[i].id_number); + snprintf(reaction_name, 4, "%d", trace[i].dst_id); } // printf("DEBUG: Reactor's self struct pointer: %p\n", trace[i].pointer); int reactor_index; @@ -121,7 +125,7 @@ size_t read_and_write_trace(FILE* trace_file, FILE* output_file) { } // Default thread id is the worker number. - int thread_id = trace[i].worker; + int thread_id = trace[i].src_id; char* args; asprintf(&args, "{" @@ -214,8 +218,8 @@ size_t read_and_write_trace(FILE* trace_file, FILE* output_file) { ); free(args); - if (trace[i].worker > max_thread_id) { - max_thread_id = trace[i].worker; + if (trace[i].src_id > max_thread_id) { + max_thread_id = trace[i].src_id; } // If the event is reaction_starts and physical_time_only is not set, // then also generate an instantaneous @@ -225,13 +229,13 @@ size_t read_and_write_trace(FILE* trace_file, FILE* output_file) { pid = reactor_index + 1; reaction_name = (char*)malloc(4); char name[13]; - snprintf(name, 13, "reaction %d", trace[i].id_number); + snprintf(name, 13, "reaction %d", trace[i].dst_id); // NOTE: If the reactor has more than 1024 timers and actions, then // there will be a collision of thread IDs here. - thread_id = 1024 + trace[i].id_number; - if (trace[i].id_number > max_reaction_number) { - max_reaction_number = trace[i].id_number; + thread_id = 1024 + trace[i].dst_id; + if (trace[i].dst_id > max_reaction_number) { + max_reaction_number = trace[i].dst_id; } fprintf(output_file, "{" diff --git a/util/tracing/trace_to_csv.c b/util/tracing/trace_to_csv.c index 9df605f6d2..707cc5f43b 100644 --- a/util/tracing/trace_to_csv.c +++ b/util/tracing/trace_to_csv.c @@ -103,11 +103,6 @@ size_t read_and_write_trace() { if (trace_length == 0) return 0; // Write each line. for (int i = 0; i < trace_length; i++) { - char* reaction_name = "none"; - if (trace[i].id_number >= 0) { - reaction_name = (char*)malloc(4); - snprintf(reaction_name, 4, "%d", trace[i].id_number); - } // printf("DEBUG: reactor self struct pointer: %p\n", trace[i].pointer); int object_instance = -1; char* reactor_name = get_object_description(trace[i].pointer, &object_instance); @@ -119,11 +114,11 @@ size_t read_and_write_trace() { if (trigger_name == NULL) { trigger_name = "NO TRIGGER"; } - fprintf(output_file, "%s, %s, %s, %d, %lld, %d, %lld, %s, %lld\n", + fprintf(output_file, "%s, %s, %d, %d, %lld, %d, %lld, %s, %lld\n", trace_event_names[trace[i].event_type], reactor_name, - reaction_name, - trace[i].worker, + trace[i].src_id, + trace[i].dst_id, trace[i].logical_time - start_time, trace[i].microstep, trace[i].physical_time - start_time, @@ -159,16 +154,16 @@ size_t read_and_write_trace() { case reaction_ends: // This code relies on the mutual exclusion of reactions in a reactor // and the ordering of reaction_starts and reaction_ends events. - if (trace[i].id_number >= MAX_NUM_REACTIONS) { + if (trace[i].dst_id >= MAX_NUM_REACTIONS) { fprintf(stderr, "WARNING: Too many reactions. Not all will be shown in summary file.\n"); continue; } stats = summary_stats[NUM_EVENT_TYPES + object_instance]; stats->description = reactor_name; - if (trace[i].id_number >= stats->num_reactions_seen) { - stats->num_reactions_seen = trace[i].id_number + 1; + if (trace[i].dst_id >= stats->num_reactions_seen) { + stats->num_reactions_seen = trace[i].dst_id + 1; } - rstats = &stats->reactions[trace[i].id_number]; + rstats = &stats->reactions[trace[i].dst_id]; if (trace[i].event_type == reaction_starts) { rstats->latest_start_time = trace[i].physical_time; } else { @@ -222,7 +217,7 @@ size_t read_and_write_trace() { // Use the reactions array to store data. // There will be two entries per worker, one for waits on the // reaction queue and one for waits while advancing time. - index = trace[i].worker * 2; + index = trace[i].src_id * 2; // Even numbered indices are used for waits on reaction queue. // Odd numbered indices for waits for time advancement. if (trace[i].event_type == scheduler_advancing_time_starts @@ -429,7 +424,7 @@ int main(int argc, char* argv[]) { summary_stats = (summary_stats_t**)calloc(table_size, sizeof(summary_stats_t*)); // Write a header line into the CSV file. - fprintf(output_file, "Event, Reactor, ID, Worker, Elapsed Logical Time, Microstep, Elapsed Physical Time, Trigger, Extra Delay\n"); + fprintf(output_file, "Event, Reactor, Source, Destination, Elapsed Logical Time, Microstep, Elapsed Physical Time, Trigger, Extra Delay\n"); while (read_and_write_trace() != 0) {}; write_summary_file(); diff --git a/util/tracing/trace_to_influxdb.c b/util/tracing/trace_to_influxdb.c index 9fdff85020..a99ae003ec 100644 --- a/util/tracing/trace_to_influxdb.c +++ b/util/tracing/trace_to_influxdb.c @@ -154,10 +154,14 @@ size_t read_and_write_trace() { if (trace_length == 0) return 0; // Write each line. for (int i = 0; i < trace_length; i++) { + + // Ignore federated traces. + if (trace[i].event_type > federated) continue; + char* reaction_name = "none"; - if (trace[i].id_number >= 0) { + if (trace[i].dst_id >= 0) { reaction_name = (char*)malloc(4); - snprintf(reaction_name, 4, "%d", trace[i].id_number); + snprintf(reaction_name, 4, "%d", trace[i].dst_id); } // printf("DEBUG: reactor self struct pointer: %p\n", trace[i].pointer); int object_instance = -1; @@ -179,7 +183,7 @@ size_t read_and_write_trace() { INFLUX_MEAS(trace_event_names[trace[i].event_type]), INFLUX_TAG("Reactor", reactor_name), INFLUX_TAG("Reaction", reaction_name), - INFLUX_F_INT("Worker", trace[i].worker), + INFLUX_F_INT("Worker", trace[i].src_id), INFLUX_F_INT("Logical Time", trace[i].logical_time), INFLUX_F_INT("Microstep", trace[i].microstep), INFLUX_F_STR("Trigger Name", trigger_name), From 6feab54923559106726fb989003ee0c32ee616d6 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Tue, 7 Mar 2023 15:20:04 +0100 Subject: [PATCH 34/61] Display time values at start and end of arrows and adapted to adjusted tracing fields --- util/tracing/visualization/fedsd.py | 28 +++++++++++++------ util/tracing/visualization/fedsd_helper.py | 32 +++++++++++++++++++--- 2 files changed, 48 insertions(+), 12 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index dc0daa81fe..dbafcf864c 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -52,19 +52,19 @@ def load_and_process_csv_file(csv_file, rti) : df = pd.read_csv(csv_file) print if (rti == True): - df.columns = ['event', 'r', 'partner_id', 'w', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] + df.columns = ['event', 'reactor', 'rti_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] # Set that these are the RTI information df['self_id'] = -1 # Remove non-needed information - df = df.drop(columns=['r', 'w', 't', 'ed']) + df = df.drop(columns=['reactor', 't', 'ed']) else: - df.columns = ['event', 'r', 'self_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] + df.columns = ['event', 'reactor', 'self_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] # Set that these are the RTI information # FIXME: Here, we assume that the coordination in centralized. # To be updated for the decentralized case... df['partner_id'] = -1 # Remove non-needed information - df = df.drop(columns=['r', 't', 'ed']) + df = df.drop(columns=['reactor', 't', 'ed']) # Remove all the lines that do not contain communication information # which boils up to having 'RTI' in the 'event' column @@ -184,7 +184,7 @@ def load_and_process_csv_file(csv_file, rti) : # Depending on the direction, compute the possible time interval # and choose the row if (inout == 'out'): - # Compute the possible timestamps interval at the receiver side + # Compute the possible physical time interval at the receiver side physical_time_start = physical_time - clock_sync_error physical_time_end = physical_time + clock_sync_error + network_latency else: @@ -265,18 +265,30 @@ def load_and_process_csv_file(csv_file, rti) : # Now, we need to iterate over the traces to draw the lines f.write(fhlp.svg_string_comment('Draw interactions')) for index, row in trace_df.iterrows(): - # FIXME: Whose physical and logical time? - label = row['event'] + ' @PT=' + str(row['physical_time']) + ' @LT=' + str(row['logical_time']) + # For time labels, display them on the left for the RTI, right for everthing else. + anchor = 'start' + if (row['self_id'] < 0): + anchor = 'end' + + # formatted physical time. + # FIXME: Using microseconds is hardwired here. + physical_time = f'{int(row["physical_time"]/1000):,}' + + label = row['event'] + ' @LT=' + str(row['logical_time']) if (row['arrow'] == 'arrow'): f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, False)) + f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) elif (row['arrow'] == 'dashedarrow'): f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, True)) + f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) elif (row['arrow'] == 'dot'): if (row['inout'] == 'in'): label = "(in) from " + str(row['partner_id']) + ' ' + label else : label = "(out) to " + str(row['partner_id']) + ' ' + label - f.write(fhlp.svg_string_draw_dot(row['x1'], row['y1'], label)) + f.write(fhlp.svg_string_draw_dot(row['x1'], row['y1'], physical_time + ': ' + label)) + elif (row['arrow'] == 'marked'): + f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) f.write('\n\n\n') diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index be8840ef9e..6adf4d19b5 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -125,6 +125,30 @@ def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed): str_line3 = svg_string_draw_label(x1, y1, x2, y2, label) return str_line1 + str_line2 + str_line3 +def svg_string_draw_side_label(x, y, label, anchor="start") : + ''' + Put a label to the right of the x, y point, + unless x is small, in which case put it to the left. + + Args: + * x: Int X coordinate of the source point + * y: Int Y coordinate of the source point + * label: Label to put by the point. + * anchor: One of "start", "middle", or "end" to specify the text-anchor. + Returns: + * String: the svg string of the text + ''' + offset = 5 + if (anchor == 'end'): + offset = -5 + elif (anchor == 'middle'): + offset = 0 + str_line = '\t'+label+'\n' + + return str_line def svg_string_comment(comment): ''' @@ -141,16 +165,16 @@ def svg_string_comment(comment): def svg_string_draw_dot(x, y, label) : ''' - Constructs the svg html string to draw the arrow end + Constructs the svg html string to draw at a dot. Args: * x: Int X coordinate of the dot * y: Int Y coordinate of the dot - * label: String + * label1: String to draw Returns: * String: the svg string of the triangle ''' str_line = '' - str_line = '\t\n' - str_line = str_line + '\t'+label+'\n' + str_line = '\t\n' + str_line = str_line + '\t'+label+'\n' return str_line \ No newline at end of file From bc19dd6b7b462c0e5098ef11275def69b01868f8 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Tue, 7 Mar 2023 14:00:46 -0800 Subject: [PATCH 35/61] Fix of the extreme rare case when two tracepoints have exactly the same physical time + More genralization use of the federate and RTI traces. --- util/tracing/visualization/fedsd.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index dbafcf864c..bc2c73ee0d 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -52,9 +52,9 @@ def load_and_process_csv_file(csv_file, rti) : df = pd.read_csv(csv_file) print if (rti == True): - df.columns = ['event', 'reactor', 'rti_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] + df.columns = ['event', 'reactor', 'self_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] # Set that these are the RTI information - df['self_id'] = -1 + # df['self_id'] = -1 # Remove non-needed information df = df.drop(columns=['reactor', 't', 'ed']) else: @@ -62,7 +62,7 @@ def load_and_process_csv_file(csv_file, rti) : # Set that these are the RTI information # FIXME: Here, we assume that the coordination in centralized. # To be updated for the decentralized case... - df['partner_id'] = -1 + # df['partner_id'] = -1 # Remove non-needed information df = df.drop(columns=['reactor', 't', 'ed']) @@ -98,7 +98,7 @@ def load_and_process_csv_file(csv_file, rti) : #### RTI trace processing ############################################################################ trace_df = load_and_process_csv_file(args.rti, True) - x_coor[-1] = padding + x_coor[-1] = padding * 5 actors.append(-1) # Temporary use trace_df['x1'] = x_coor[-1] @@ -119,7 +119,7 @@ def load_and_process_csv_file(csv_file, rti) : # Add to the list of sequence diagram actors actors.append(fed_id) # Derive the x coordinate of the actor - x_coor[fed_id] = padding + (spacing * (len(actors)-1)) + x_coor[fed_id] = (padding * 5) + (spacing * (len(actors)-1)) fed_df['x1'] = x_coor[fed_id] # Append into trace_df trace_df = trace_df.append(fed_df, sort=False, ignore_index=True) @@ -146,17 +146,19 @@ def load_and_process_csv_file(csv_file, rti) : min = 10 # Minimum spacing between events when time has not advanced. scale = 1 # Will probably be set manually first_pass = True + print(trace_df) for index, row in trace_df.iterrows(): if (not first_pass) : - cpt = int(row['physical_time']) - # print('cpt = '+str(cpt)+' and ppt = '+ppt) + cpt = row['physical_time'] + print('cpt = '+str(cpt)+' and ppt = '+str(ppt)) # From the email: # Y = T_previous + min + log10(1 + (T - T_previous)*scale) # But rather think it should be: - py = math.ceil(py + min + (1 + math.log10(cpt - ppt) * scale)) + if (cpt != ppt) : + py = math.ceil(py + min + (1 + math.log10(cpt - ppt) * scale)) trace_df.at[index, 'y1'] = py - ppt = int(row['physical_time']) + ppt = row['physical_time'] py = trace_df.at[index, 'y1'] first_pass = False @@ -173,8 +175,8 @@ def load_and_process_csv_file(csv_file, rti) : # If the tracepoint is pending, proceed to look for a match if (trace_df.at[index,'arrow'] == 'pending') : physical_time = trace_df.at[index,'physical_time'] - self_id = int(trace_df.at[index,'self_id']) - partner_id = int(trace_df.at[index,'partner_id']) + self_id = trace_df.at[index,'self_id'] + partner_id = trace_df.at[index,'partner_id'] event = trace_df.at[index,'event'] logical_time = trace_df.at[index, 'logical_time'] microstep = trace_df.at[index, 'microstep'] From 284121193ccf1cc3d77fa3018da57ed1ea56d480 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Tue, 7 Mar 2023 17:13:58 -0800 Subject: [PATCH 36/61] Enlarge left and right padding in the svg figure, so that time is not clipped --- util/tracing/visualization/fedsd.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index bc2c73ee0d..bb44927f31 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -98,7 +98,7 @@ def load_and_process_csv_file(csv_file, rti) : #### RTI trace processing ############################################################################ trace_df = load_and_process_csv_file(args.rti, True) - x_coor[-1] = padding * 5 + x_coor[-1] = padding * 2 actors.append(-1) # Temporary use trace_df['x1'] = x_coor[-1] @@ -119,7 +119,7 @@ def load_and_process_csv_file(csv_file, rti) : # Add to the list of sequence diagram actors actors.append(fed_id) # Derive the x coordinate of the actor - x_coor[fed_id] = (padding * 5) + (spacing * (len(actors)-1)) + x_coor[fed_id] = (padding * 2) + (spacing * (len(actors)-1)) fed_df['x1'] = x_coor[fed_id] # Append into trace_df trace_df = trace_df.append(fed_df, sort=False, ignore_index=True) @@ -146,11 +146,10 @@ def load_and_process_csv_file(csv_file, rti) : min = 10 # Minimum spacing between events when time has not advanced. scale = 1 # Will probably be set manually first_pass = True - print(trace_df) for index, row in trace_df.iterrows(): if (not first_pass) : cpt = row['physical_time'] - print('cpt = '+str(cpt)+' and ppt = '+str(ppt)) + # print('cpt = '+str(cpt)+' and ppt = '+str(ppt)) # From the email: # Y = T_previous + min + log10(1 + (T - T_previous)*scale) # But rather think it should be: @@ -239,7 +238,7 @@ def load_and_process_csv_file(csv_file, rti) : ############################################################################ #### Write to svg file ############################################################################ - svg_width = padding + (len(actors) - 1) * spacing + padding + 200 + svg_width = padding * 2 + (len(actors) - 1) * spacing + padding * 2 + 200 svg_height = padding + trace_df.iloc[-1]['y1'] with open('trace_svg.html', 'w', encoding='utf-8') as f: From 530297589d9987029f404d9683d7e4842b327140 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Tue, 7 Mar 2023 18:08:06 -0800 Subject: [PATCH 37/61] Simplifying fedsd and adding support for more tracepoints --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/visualization/fedsd.py | 33 ++++++++-------------- util/tracing/visualization/fedsd_helper.py | 7 ++++- 3 files changed, 18 insertions(+), 24 deletions(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 5eb44e045d..f45ab4284e 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 5eb44e045d0f5ab8e2a5f55576f56f472de4c734 +Subproject commit f45ab4284e93db1a3a861504df998448df9f687b diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index bb44927f31..da6dd421f7 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -37,34 +37,20 @@ network_latency = 100000000 # That is 100us -def load_and_process_csv_file(csv_file, rti) : +def load_and_process_csv_file(csv_file) : ''' Loads and processes the csv entries, based on the type of the actor (if RTI or federate). Args: * csv_file: String file name - * rti: Bool True if it the RTI, False otherwise Returns: * The processed dataframe. ''' - # Load RTI tracepoints, rename the columns and clean non useful data + # Load tracepoints, rename the columns and clean non useful data df = pd.read_csv(csv_file) - print - if (rti == True): - df.columns = ['event', 'reactor', 'self_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] - # Set that these are the RTI information - # df['self_id'] = -1 - # Remove non-needed information - df = df.drop(columns=['reactor', 't', 'ed']) - else: - df.columns = ['event', 'reactor', 'self_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] - # Set that these are the RTI information - # FIXME: Here, we assume that the coordination in centralized. - # To be updated for the decentralized case... - # df['partner_id'] = -1 - # Remove non-needed information - df = df.drop(columns=['reactor', 't', 'ed']) + df.columns = ['event', 'reactor', 'self_id', 'partner_id', 'logical_time', 'microstep', 'physical_time', 't', 'ed'] + df = df.drop(columns=['reactor', 't', 'ed']) # Remove all the lines that do not contain communication information # which boils up to having 'RTI' in the 'event' column @@ -97,7 +83,7 @@ def load_and_process_csv_file(csv_file, rti) : ############################################################################ #### RTI trace processing ############################################################################ - trace_df = load_and_process_csv_file(args.rti, True) + trace_df = load_and_process_csv_file(args.rti) x_coor[-1] = padding * 2 actors.append(-1) # Temporary use @@ -112,7 +98,7 @@ def load_and_process_csv_file(csv_file, rti) : if (not exists(fed_trace)): print('Warning: Trace file ' + fed_trace + ' does not exist! Will resume though') continue - fed_df = load_and_process_csv_file(fed_trace, False) + fed_df = load_and_process_csv_file(fed_trace) if (not fed_df.empty): # Get the federate id number fed_id = fed_df.iloc[-1]['self_id'] @@ -132,7 +118,7 @@ def load_and_process_csv_file(csv_file, rti) : # FIXME: For now, we need to remove the rows with negative physical time values... # Until the reason behinf such values is investigated. The negative physical # time is when federates are still in the process of joining - trace_df = trace_df[trace_df['physical_time'] >= 0] + # trace_df = trace_df[trace_df['physical_time'] >= 0] # Add the Y column and initialize it with the padding value trace_df['y1'] = math.ceil(padding * 3 / 2) # Or set a small shift @@ -275,7 +261,10 @@ def load_and_process_csv_file(csv_file, rti) : # FIXME: Using microseconds is hardwired here. physical_time = f'{int(row["physical_time"]/1000):,}' - label = row['event'] + ' @LT=' + str(row['logical_time']) + if (row['logical_time'] == -1678240241788173894) : + label = row['event'] + ' @LT=+oo' + else: + label = row['event'] + ' @LT=' + str(row['logical_time']) if (row['arrow'] == 'arrow'): f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, False)) f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index 6adf4d19b5..c7232fe48c 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -16,6 +16,8 @@ "Sending JOIN": "JOIN", "Sending REJECT": "REJECT", "Sending RESIGN": "RESIGN", + "Sending PORT_ABS": "PORT_ABS", + "Sending CLOSE_REQ": "CLOSE_REQ", "Receiving ACK": "ACK", "Receiving TIMESTAMP": "TIMESTAMP", "Receiving NET": "NET", @@ -29,7 +31,10 @@ "Receiving REJECT": "REJECT", "Receiving RESIGN": "RESIGN", "Receiving PORT_ABS": "PORT_ABS", - "Receiving UNIDENTIFIED": "UNIDENTIFIED" + "Receiving UNIDENTIFIED": "UNIDENTIFIED", + "Receiving CLOSE_REQ": "CLOSE_REQ", + "Receiving UNIDENTIFIED": "UNIDENTIFIED", + "Receiving TAGGED_MSG": "TAGGED_MSG" } prune_event_name.setdefault(" ", "UNIDENTIFIED") From 3f8692496243780f12d6d73dad78457e4e34d546 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Wed, 8 Mar 2023 16:40:27 -0800 Subject: [PATCH 38/61] Reflecting new tracepoints + Aligning reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/visualization/fedsd_helper.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index f45ab4284e..aa2286096a 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit f45ab4284e93db1a3a861504df998448df9f687b +Subproject commit aa2286096ac3fb40cfc2e55edaa3e8114320ee99 diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index c7232fe48c..513cdd2ff2 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -18,6 +18,8 @@ "Sending RESIGN": "RESIGN", "Sending PORT_ABS": "PORT_ABS", "Sending CLOSE_REQ": "CLOSE_REQ", + "Sending TAGGED_MSG": "TAGGED_MSG", + "Sending P2P_TAGGED_MSG": "P2P_TAGGED_MSG", "Receiving ACK": "ACK", "Receiving TIMESTAMP": "TIMESTAMP", "Receiving NET": "NET", @@ -31,10 +33,12 @@ "Receiving REJECT": "REJECT", "Receiving RESIGN": "RESIGN", "Receiving PORT_ABS": "PORT_ABS", - "Receiving UNIDENTIFIED": "UNIDENTIFIED", "Receiving CLOSE_REQ": "CLOSE_REQ", "Receiving UNIDENTIFIED": "UNIDENTIFIED", - "Receiving TAGGED_MSG": "TAGGED_MSG" + # "Receiving ADDRESS_QUERY": "ADDRESS_QUERY", + # "Receiving ADDRESS_ADVERTISEMENT": "ADDRESS_ADVERTISEMENT", + "Receiving TAGGED_MSG": "TAGGED_MSG", + "Receiving P2P_TAGGED_MSG": "P2P_TAGGED_MSG" } prune_event_name.setdefault(" ", "UNIDENTIFIED") From 990f3f376b5e61c09562ebc4cdcea49e7715a39d Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Wed, 8 Mar 2023 22:29:13 -0800 Subject: [PATCH 39/61] Simplify fedsd call by including the csv transformation in the script (> fedsd *.lft) --- lib/scripts/launch-fedsd.sh | 42 +++++++++++++++++++++++++++++++++++-- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/lib/scripts/launch-fedsd.sh b/lib/scripts/launch-fedsd.sh index 9d570608e4..9d44ab44c7 100755 --- a/lib/scripts/launch-fedsd.sh +++ b/lib/scripts/launch-fedsd.sh @@ -53,6 +53,44 @@ else fatal_error "Unable to determine absolute path to $0." fi -# FIXME: Check that python3 is in the path. +# Get the lft files +lft_files_list=$@ + +# Initialize variables +csv_files_list='' +extension='.csv' +rti_file='' + +# Iterate over the lft file list to: +# - First, transform into csv +# - Second, construct the csv fiel name +# - Then construct the csv file list +# The csv file list does include the rti, it is put in a separate variable +for each_lft_file in $lft_files_list + do + # Tranform to csv + trace_to_csv $each_lft_file + # Get the file name + csv=${each_lft_file%.*} + if [ $csv == 'rti' ] + then + # Set the rti csv file + rti_file='rti.csv' + else + # Construct the csv file name and add it to the list + csv_files_list="$csv$extension $csv_files_list" + fi + done -python3 "${base}/util/tracing/visualization/fedsd.py" "$@" +echo $lft_files_list +echo $rti_file +echo $csv_files_list + +# FIXME: Check that python3 is in the path. +if [ $rti_file == '' ] +then + # FIXME: Support the case where no rti file is given + python3 "${base}/util/tracing/visualization/fedsd.py" "-f" $csv_files_list +else + python3 "${base}/util/tracing/visualization/fedsd.py" "-r" "$rti_file" "-f" $csv_files_list +fi From 62661a3f5d51621ae8998865eaef3ca27893cc8e Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 9 Mar 2023 15:53:45 +0100 Subject: [PATCH 40/61] Added usage check --- lib/scripts/launch-fedsd.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/scripts/launch-fedsd.sh b/lib/scripts/launch-fedsd.sh index 9d44ab44c7..96ad9fe68e 100755 --- a/lib/scripts/launch-fedsd.sh +++ b/lib/scripts/launch-fedsd.sh @@ -56,6 +56,12 @@ fi # Get the lft files lft_files_list=$@ +if [ -z "$lft_files_list" ] +then + echo "Usage: fedsd [lft files]" + exit 1 +fi + # Initialize variables csv_files_list='' extension='.csv' From 6348d23c491798fc21ca54e9dec13975d4a0a7dc Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 9 Mar 2023 15:54:02 +0100 Subject: [PATCH 41/61] Aligned reactor C --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index aa2286096a..81a4fab5b0 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit aa2286096ac3fb40cfc2e55edaa3e8114320ee99 +Subproject commit 81a4fab5b0d139c39936e2f9cf9cdb1fc1e4a4fe From 17216fa4e8a47d49826ef897acd323c604c92299 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 9 Mar 2023 15:54:17 +0100 Subject: [PATCH 42/61] Added CSS styling and arrowhead rotation --- util/tracing/visualization/fedsd.py | 44 +++++++++-- util/tracing/visualization/fedsd_helper.py | 87 +++++++++++++++------- 2 files changed, 99 insertions(+), 32 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index da6dd421f7..5e66dc0f9f 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -12,6 +12,26 @@ - 'pending': pending ''' +# Styles to determine appearance: +css_style = ' \ +' #!/usr/bin/env python3 import argparse # For arguments parsing @@ -94,6 +114,7 @@ def load_and_process_csv_file(csv_file) : ############################################################################ # Loop over the given list of federates trace files if (args.federates) : + print('***************'+str(args.federates[0])) for fed_trace in args.federates[0]: if (not exists(fed_trace)): print('Warning: Trace file ' + fed_trace + ' does not exist! Will resume though') @@ -104,6 +125,7 @@ def load_and_process_csv_file(csv_file) : fed_id = fed_df.iloc[-1]['self_id'] # Add to the list of sequence diagram actors actors.append(fed_id) + print('***************'+str(fed_id)+": "+str(actors)) # Derive the x coordinate of the actor x_coor[fed_id] = (padding * 2) + (spacing * (len(actors)-1)) fed_df['x1'] = x_coor[fed_id] @@ -129,7 +151,7 @@ def load_and_process_csv_file(csv_file) : ppt = 0 # Previous physical time cpt = 0 # Current physical time py = 0 # Previous y - min = 10 # Minimum spacing between events when time has not advanced. + min = 15 # Minimum spacing between events when time has not advanced. scale = 1 # Will probably be set manually first_pass = True for index, row in trace_df.iterrows(): @@ -234,6 +256,8 @@ def load_and_process_csv_file(csv_file) : f.write('\n\n') f.write('\n') + + f.write(css_style) # Print the circles and the names for key in x_coor: @@ -262,21 +286,29 @@ def load_and_process_csv_file(csv_file) : physical_time = f'{int(row["physical_time"]/1000):,}' if (row['logical_time'] == -1678240241788173894) : - label = row['event'] + ' @LT=+oo' + # FIXME: This isn't right. NEVER == -9223372036854775808. + label = row['event'] + '(NEVER)' else: - label = row['event'] + ' @LT=' + str(row['logical_time']) + label = row['event'] + '(' + f'{int(row["logical_time"]):,}' + ', ' + str(row['microstep']) + ')' + if (row['arrow'] == 'arrow'): - f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, False)) + f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, False, row['event'])) f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) elif (row['arrow'] == 'dashedarrow'): - f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, True)) + f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, True, row['event'])) f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) elif (row['arrow'] == 'dot'): if (row['inout'] == 'in'): label = "(in) from " + str(row['partner_id']) + ' ' + label else : label = "(out) to " + str(row['partner_id']) + ' ' + label - f.write(fhlp.svg_string_draw_dot(row['x1'], row['y1'], physical_time + ': ' + label)) + + if (anchor == 'end'): + f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) + f.write(fhlp.svg_string_draw_dot(row['x1'], row['y1'], label)) + else: + f.write(fhlp.svg_string_draw_dot_with_time(row['x1'], row['y1'], physical_time, label)) + elif (row['arrow'] == 'marked'): f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index 513cdd2ff2..a2b22fe657 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -16,10 +16,10 @@ "Sending JOIN": "JOIN", "Sending REJECT": "REJECT", "Sending RESIGN": "RESIGN", - "Sending PORT_ABS": "PORT_ABS", + "Sending ABS": "ABS", "Sending CLOSE_REQ": "CLOSE_REQ", - "Sending TAGGED_MSG": "TAGGED_MSG", - "Sending P2P_TAGGED_MSG": "P2P_TAGGED_MSG", + "Sending MSG": "MSG", + "Sending P2P_MSG": "P2P_MSG", "Receiving ACK": "ACK", "Receiving TIMESTAMP": "TIMESTAMP", "Receiving NET": "NET", @@ -32,13 +32,13 @@ "Receiving STOP_GRN": "STOP_GRN", "Receiving REJECT": "REJECT", "Receiving RESIGN": "RESIGN", - "Receiving PORT_ABS": "PORT_ABS", + "Receiving ABS": "ABS", "Receiving CLOSE_REQ": "CLOSE_REQ", "Receiving UNIDENTIFIED": "UNIDENTIFIED", # "Receiving ADDRESS_QUERY": "ADDRESS_QUERY", # "Receiving ADDRESS_ADVERTISEMENT": "ADDRESS_ADVERTISEMENT", - "Receiving TAGGED_MSG": "TAGGED_MSG", - "Receiving P2P_TAGGED_MSG": "P2P_TAGGED_MSG" + "Receiving MSG": "MSG", + "Receiving P2P_MSG": "P2P_MSG" } prune_event_name.setdefault(" ", "UNIDENTIFIED") @@ -47,7 +47,7 @@ ### Routines to write to csv file ################################################################################ -def svg_string_draw_line(x1, y1, x2, y2, dashed): +def svg_string_draw_line(x1, y1, x2, y2, dashed, type=''): ''' Constructs the svg html string to draw a line from (x1, y1) to (x2, y2). The line can be continous or dashed. @@ -59,31 +59,50 @@ def svg_string_draw_line(x1, y1, x2, y2, dashed): * y2: Int Y coordinate of the sink point * dashed: Bool True if the line is dashed, continous otherwise Returns: - * String: the svg string of the line + * String: the svg string of the lineĀ© ''' - str_line = '\t\n' return str_line -def svg_string_draw_arrow_head(x1, x2, y2) : +def svg_string_draw_arrow_head(x1, y1, x2, y2, type='') : ''' Constructs the svg html string to draw the arrow end Args: * x1: Int X coordinate of the source point + * y1: Int Y coordinate of the source point * x2: Int X coordinate of the sink point * y2: Int Y coordinate of the sink point + * type: The type (for styling) Returns: * String: the svg string of the triangle ''' + + rotation = - math.ceil(math.atan((x2-x1)/(y2-y1)) * 180 / 3.14) - 90 + style = '' + if (type): + style = ' class="'+type+'"' + str_line = '' if (x1 > x2) : - str_line = '\t\n' + str_line = '\t\n' else : - str_line = '\t\n' + str_line = '\t\n' + return str_line @@ -103,18 +122,18 @@ def svg_string_draw_label(x1, y1, x2, y2, label) : # FIXME: Need further improvement, based of the position of the arrows # FIXME: Rotation value is not that accurate. if (x2 < x1) : + # Left-going arrow. rotation = - math.ceil(math.atan((x2-x1)/(y2-y1)) * 180 / 3.14) - 90 - str_line = '\t'+label+'\n' + str_line = '\t'+label+'\n' else : + # Right-going arrow. rotation = - math.ceil(math.atan((x1-x2)/(y1-y2)) * 180 / 3.14) + 90 - x = math.ceil((x2 + x1) / 2) - y = math.ceil((y1 + y2) / 2) - 5 - str_line = '\t'+label+'\n' + str_line = '\t'+label+'\n' #print('rot = '+str(rotation)+' x1='+str(x1)+' y1='+str(y1)+' x2='+str(x2)+' y2='+str(y2)) return str_line -def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed): +def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed, type=''): ''' Constructs the svg html string to draw the arrow from (x1, y1) to (x2, y2). The arrow end is constructed, together with the label @@ -129,8 +148,8 @@ def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed): Returns: * String: the svg string of the arrow ''' - str_line1 = svg_string_draw_line(x1, y1, x2, y2, dashed) - str_line2 = svg_string_draw_arrow_head(x1, x2, y2) + str_line1 = svg_string_draw_line(x1, y1, x2, y2, dashed, type) + str_line2 = svg_string_draw_arrow_head(x1, y1, x2, y2, type) str_line3 = svg_string_draw_label(x1, y1, x2, y2, label) return str_line1 + str_line2 + str_line3 @@ -153,9 +172,8 @@ def svg_string_draw_side_label(x, y, label, anchor="start") : elif (anchor == 'middle'): offset = 0 str_line = '\t'+label+'\n' + +' class="time"' \ + +' transform="translate('+str(x+offset)+', '+str(y+5)+')">'+label+'\n' return str_line @@ -179,11 +197,28 @@ def svg_string_draw_dot(x, y, label) : Args: * x: Int X coordinate of the dot * y: Int Y coordinate of the dot - * label1: String to draw + * label: String to draw Returns: * String: the svg string of the triangle ''' str_line = '' str_line = '\t\n' - str_line = str_line + '\t'+label+'\n' - return str_line \ No newline at end of file + str_line = str_line + '\t'+label+'\n' + return str_line + +def svg_string_draw_dot_with_time(x, y, time, label) : + ''' + Constructs the svg html string to draw at a dot with a prefixed physical time. + + Args: + * x: Int X coordinate of the dot + * y: Int Y coordinate of the dot + * time: The time + * label: String to draw + Returns: + * String: the svg string of the triangle + ''' + str_line = '' + str_line = '\t\n' + str_line = str_line + '\t '+time+': '+label+'\n' + return str_line From 2f41482246b287fb6561bc1e3c29eeffd549770a Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 9 Mar 2023 18:03:49 +0100 Subject: [PATCH 43/61] Removed dependance on physical time for matching --- util/tracing/visualization/fedsd.py | 25 ++-------------------- util/tracing/visualization/fedsd_helper.py | 18 ++++++---------- 2 files changed, 9 insertions(+), 34 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index 5e66dc0f9f..c63b0042d3 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -114,7 +114,6 @@ def load_and_process_csv_file(csv_file) : ############################################################################ # Loop over the given list of federates trace files if (args.federates) : - print('***************'+str(args.federates[0])) for fed_trace in args.federates[0]: if (not exists(fed_trace)): print('Warning: Trace file ' + fed_trace + ' does not exist! Will resume though') @@ -125,7 +124,6 @@ def load_and_process_csv_file(csv_file) : fed_id = fed_df.iloc[-1]['self_id'] # Add to the list of sequence diagram actors actors.append(fed_id) - print('***************'+str(fed_id)+": "+str(actors)) # Derive the x coordinate of the actor x_coor[fed_id] = (padding * 2) + (spacing * (len(actors)-1)) fed_df['x1'] = x_coor[fed_id] @@ -189,21 +187,8 @@ def load_and_process_csv_file(csv_file) : microstep = trace_df.at[index, 'microstep'] inout = trace_df.at[index, 'inout'] - - # Depending on the direction, compute the possible time interval - # and choose the row - if (inout == 'out'): - # Compute the possible physical time interval at the receiver side - physical_time_start = physical_time - clock_sync_error - physical_time_end = physical_time + clock_sync_error + network_latency - else: - physical_time_start = physical_time - network_latency - clock_sync_error - physical_time_end = physical_time + clock_sync_error - # Match tracepoints matching_df = trace_df[\ - (trace_df['physical_time'] >= physical_time_start) & \ - (trace_df['physical_time'] <= physical_time_end) & \ (trace_df['inout'] != inout) & \ (trace_df['self_id'] == partner_id) & \ (trace_df['partner_id'] == self_id) & \ @@ -238,10 +223,7 @@ def load_and_process_csv_file(csv_file) : # Mark it, so not to consider it anymore trace_df.at[matching_index, 'arrow'] = 'marked' - if (len(matching_df.index) == 1) : - trace_df.at[index, 'arrow'] = 'arrow' - else : - trace_df.at[index, 'arrow'] = 'dashedarrow' + trace_df.at[index, 'arrow'] = 'arrow' ############################################################################ #### Write to svg file @@ -292,10 +274,7 @@ def load_and_process_csv_file(csv_file) : label = row['event'] + '(' + f'{int(row["logical_time"]):,}' + ', ' + str(row['microstep']) + ')' if (row['arrow'] == 'arrow'): - f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, False, row['event'])) - f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) - elif (row['arrow'] == 'dashedarrow'): - f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, True, row['event'])) + f.write(fhlp.svg_string_draw_arrow(row['x1'], row['y1'], row['x2'], row['y2'], label, row['event'])) f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) elif (row['arrow'] == 'dot'): if (row['inout'] == 'in'): diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index a2b22fe657..1229e3bcd8 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -47,26 +47,22 @@ ### Routines to write to csv file ################################################################################ -def svg_string_draw_line(x1, y1, x2, y2, dashed, type=''): +def svg_string_draw_line(x1, y1, x2, y2, type=''): ''' - Constructs the svg html string to draw a line from (x1, y1) to (x2, y2). The - line can be continous or dashed. + Constructs the svg html string to draw a line from (x1, y1) to (x2, y2). Args: * x1: Int X coordinate of the source point * y1: Int Y coordinate of the source point * x2: Int X coordinate of the sink point * y2: Int Y coordinate of the sink point - * dashed: Bool True if the line is dashed, continous otherwise + * type: The type of the message (for styling) Returns: * String: the svg string of the lineĀ© ''' str_line = '\t\n' return str_line @@ -115,7 +111,7 @@ def svg_string_draw_label(x1, y1, x2, y2, label) : * y1: Int Y coordinate of the source point * x2: Int X coordinate of the sink point * y2: Int Y coordinate of the sink point - * label: Bool True if the line is dashed, continous otherwise + * label: The label to draw Returns: * String: the svg string of the text ''' @@ -133,7 +129,7 @@ def svg_string_draw_label(x1, y1, x2, y2, label) : return str_line -def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed, type=''): +def svg_string_draw_arrow(x1, y1, x2, y2, label, type=''): ''' Constructs the svg html string to draw the arrow from (x1, y1) to (x2, y2). The arrow end is constructed, together with the label @@ -144,11 +140,11 @@ def svg_string_draw_arrow(x1, y1, x2, y2, label, dashed, type=''): * x2: Int X coordinate of the sink point * y2: Int Y coordinate of the sink point * label: String Label to draw on top of the arrow - * dashed: Bool True if the line is dashed, continous otherwise + * type: The type of the message Returns: * String: the svg string of the arrow ''' - str_line1 = svg_string_draw_line(x1, y1, x2, y2, dashed, type) + str_line1 = svg_string_draw_line(x1, y1, x2, y2, type) str_line2 = svg_string_draw_arrow_head(x1, y1, x2, y2, type) str_line3 = svg_string_draw_label(x1, y1, x2, y2, label) return str_line1 + str_line2 + str_line3 From d08dd7110d47f24b217b5483eee705aaca904549 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Thu, 9 Mar 2023 13:48:34 -0800 Subject: [PATCH 44/61] Use the federate name instead of the id in the diagram --- util/tracing/visualization/fedsd.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index c63b0042d3..543dea70a4 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -37,6 +37,7 @@ import argparse # For arguments parsing import pandas as pd # For csv manipulation from os.path import exists +from pathlib import Path import math import fedsd_helper as fhlp @@ -97,6 +98,7 @@ def load_and_process_csv_file(csv_file) : # saved in a dict x_coor = {} actors = [] + actors_names = {} padding = 50 spacing = 200 # Spacing between federates @@ -106,6 +108,7 @@ def load_and_process_csv_file(csv_file) : trace_df = load_and_process_csv_file(args.rti) x_coor[-1] = padding * 2 actors.append(-1) + actors_names[-1] = "RTI" # Temporary use trace_df['x1'] = x_coor[-1] @@ -122,8 +125,10 @@ def load_and_process_csv_file(csv_file) : if (not fed_df.empty): # Get the federate id number fed_id = fed_df.iloc[-1]['self_id'] - # Add to the list of sequence diagram actors + # Add to the list of sequence diagram actors and add the name actors.append(fed_id) + actors_names[fed_id] = Path(fed_trace).stem + print(actors_names) # Derive the x coordinate of the actor x_coor[fed_id] = (padding * 2) + (spacing * (len(actors)-1)) fed_df['x1'] = x_coor[fed_id] @@ -243,13 +248,12 @@ def load_and_process_csv_file(csv_file) : # Print the circles and the names for key in x_coor: + title = actors_names[key] if (key == -1): f.write(fhlp.svg_string_comment('RTI Actor and line')) - title = 'RTI' center = 15 else: - f.write(fhlp.svg_string_comment('Federate '+str(key)+' Actor and line')) - title = str(key) + f.write(fhlp.svg_string_comment('Federate '+str(key)+': ' + title + ' Actor and line')) center = 5 f.write(fhlp.svg_string_draw_line(x_coor[key], math.ceil(padding/2), x_coor[key], svg_height, False)) f.write('\t\n') From 6804dbed2b866f6027324237c97df68c597d2048 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Thu, 9 Mar 2023 14:38:05 -0800 Subject: [PATCH 45/61] Add logical time advance in the diagram --- util/tracing/visualization/fedsd.py | 17 ++++++++++++----- util/tracing/visualization/fedsd_helper.py | 18 +++++++++++++++++- 2 files changed, 29 insertions(+), 6 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index 543dea70a4..22f5c293f7 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -4,12 +4,12 @@ (x1, y1) --> (x2, y2), when a possible result (could be not tilted)? If not arrow, then triangle with text -In the dataframe, each arrow will be marked as: +In the dataframe, each row will be marked with one op these values: - 'arrow': draw a non-dashed arrow - - 'dashedarrow': draw dashed arrow - - 'dot': draw the triangle only + - 'dot': draw a dot only - 'marked': marked, not to be drawn - 'pending': pending + - 'adv': for reporting logical time advancing, draw a simple dash ''' # Styles to determine appearance: @@ -25,6 +25,7 @@ .PTAG { stroke: #06d6a0; fill: #06d6a0} \ .TAG { stroke: #08a578; fill: #08a578} \ .TIMESTAMP { stroke: grey; fill: grey } \ + .ADV {stroke-linecap="round" ; stroke: "red" ; fill: "red"} \ \ text { \ font-size: smaller; \ @@ -75,7 +76,7 @@ def load_and_process_csv_file(csv_file) : # Remove all the lines that do not contain communication information # which boils up to having 'RTI' in the 'event' column - df = df[df['event'].str.contains('Sending|Receiving') == True] + df = df[df['event'].str.contains('Sending|Receiving|Scheduler advancing time ends') == True] df = df.astype({'self_id': 'int', 'partner_id': 'int'}) # Add an inout column to set the arrow direction @@ -184,7 +185,10 @@ def load_and_process_csv_file(csv_file) : for index in trace_df.index: # If the tracepoint is pending, proceed to look for a match if (trace_df.at[index,'arrow'] == 'pending') : - physical_time = trace_df.at[index,'physical_time'] + # Look for a match only if it is not about advancing time + if (trace_df.at[index,'event'] == 'AdvLT') : + trace_df.at[index,'arrow'] = 'adv' + continue self_id = trace_df.at[index,'self_id'] partner_id = trace_df.at[index,'partner_id'] event = trace_df.at[index,'event'] @@ -295,6 +299,9 @@ def load_and_process_csv_file(csv_file) : elif (row['arrow'] == 'marked'): f.write(fhlp.svg_string_draw_side_label(row['x1'], row['y1'], physical_time, anchor)) + elif (row['arrow'] == 'adv'): + f.write(fhlp.svg_string_draw_adv(row['x1'], row['y1'], label)) + f.write('\n\n\n') # Print footer diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index 1229e3bcd8..9c3c419578 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -38,7 +38,8 @@ # "Receiving ADDRESS_QUERY": "ADDRESS_QUERY", # "Receiving ADDRESS_ADVERTISEMENT": "ADDRESS_ADVERTISEMENT", "Receiving MSG": "MSG", - "Receiving P2P_MSG": "P2P_MSG" + "Receiving P2P_MSG": "P2P_MSG", + "Scheduler advancing time ends": "AdvLT" } prune_event_name.setdefault(" ", "UNIDENTIFIED") @@ -218,3 +219,18 @@ def svg_string_draw_dot_with_time(x, y, time, label) : str_line = '\t\n' str_line = str_line + '\t '+time+': '+label+'\n' return str_line + +def svg_string_draw_adv(x, y, label) : + ''' + Constructs the svg html string to draw at a dash, meaning that logical time is advancing there. + + Args: + * x: Int X coordinate of the dash + * y: Int Y coordinate of the dash + * label: String to draw + Returns: + * String: the svg string of the triangle + ''' + str_line1 = svg_string_draw_line(x-5, y, x+5, y, "ADV") + str_line2 = svg_string_draw_side_label(x, y, label) + return str_line1 + str_line2 \ No newline at end of file From 8f92d325da6b9e22fa74341499f23fe30da495d6 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Thu, 9 Mar 2023 15:56:57 -0800 Subject: [PATCH 46/61] Align reactor-c. This includes Eliminating unnecessy TAG messages (PR #175) --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 81a4fab5b0..64a3ec5490 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 81a4fab5b0d139c39936e2f9cf9cdb1fc1e4a4fe +Subproject commit 64a3ec5490861e61e31ef030c242e62ae4a72694 From f4653adaca71f45d6734cfc34ee73d7cc20776e8 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Fri, 10 Mar 2023 13:46:06 +0100 Subject: [PATCH 47/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 64a3ec5490..d9c956beee 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 64a3ec5490861e61e31ef030c242e62ae4a72694 +Subproject commit d9c956beee332e7f81c7ab854ea0e1f8baba0abf From 873bdef7d0cdc23e887ff218b3bc1a895fb3cf81 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Fri, 10 Mar 2023 16:58:04 +0100 Subject: [PATCH 48/61] Attempt to get mysterious unreproducible compile error to go --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index d9c956beee..c2cff2a2ed 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit d9c956beee332e7f81c7ab854ea0e1f8baba0abf +Subproject commit c2cff2a2edc913a6ac8d644a5f3677457d3a9050 From 483abdd5da93a51c5ec3423e1dbd183c3544338b Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Fri, 10 Mar 2023 12:10:57 -0800 Subject: [PATCH 49/61] Cleanup logged messages + Update the font in trace visualizer + Align reactor-c --- lib/scripts/launch-fedsd.sh | 15 ++++++++------- org.lflang/src/lib/c/reactor-c | 2 +- util/tracing/visualization/fedsd.py | 2 +- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/lib/scripts/launch-fedsd.sh b/lib/scripts/launch-fedsd.sh index 96ad9fe68e..e09c60fc1e 100755 --- a/lib/scripts/launch-fedsd.sh +++ b/lib/scripts/launch-fedsd.sh @@ -65,7 +65,7 @@ fi # Initialize variables csv_files_list='' extension='.csv' -rti_file='' +rti_csv_file='' # Iterate over the lft file list to: # - First, transform into csv @@ -81,22 +81,23 @@ for each_lft_file in $lft_files_list if [ $csv == 'rti' ] then # Set the rti csv file - rti_file='rti.csv' + rti_csv_file='rti.csv' else # Construct the csv file name and add it to the list csv_files_list="$csv$extension $csv_files_list" fi done -echo $lft_files_list -echo $rti_file -echo $csv_files_list +# echo $lft_files_list +# echo $rti_csv_file +# echo $csv_files_list # FIXME: Check that python3 is in the path. -if [ $rti_file == '' ] +if [ $rti_csv_file == '' ] then # FIXME: Support the case where no rti file is given python3 "${base}/util/tracing/visualization/fedsd.py" "-f" $csv_files_list else - python3 "${base}/util/tracing/visualization/fedsd.py" "-r" "$rti_file" "-f" $csv_files_list + echo Building the communication diagram for the following trace files: $lft_files_list in trace_svg.html + python3 "${base}/util/tracing/visualization/fedsd.py" "-r" "$rti_csv_file" "-f" $csv_files_list fi diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index c2cff2a2ed..25b1d89fe4 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit c2cff2a2edc913a6ac8d644a5f3677457d3a9050 +Subproject commit 25b1d89fe46d2d83090ba655837c93e033753ab4 diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index 22f5c293f7..409803a017 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -29,6 +29,7 @@ \ text { \ font-size: smaller; \ + font-family: sans-serif; \ } \ text.time {fill: #074936; } \ \ @@ -129,7 +130,6 @@ def load_and_process_csv_file(csv_file) : # Add to the list of sequence diagram actors and add the name actors.append(fed_id) actors_names[fed_id] = Path(fed_trace).stem - print(actors_names) # Derive the x coordinate of the actor x_coor[fed_id] = (padding * 2) + (spacing * (len(actors)-1)) fed_df['x1'] = x_coor[fed_id] From 366c561e4b54bdde88ec0293e4562bbd2478a92f Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Sun, 12 Mar 2023 17:47:59 +0100 Subject: [PATCH 50/61] Aligned reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 25b1d89fe4..edee7f18d0 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 25b1d89fe46d2d83090ba655837c93e033753ab4 +Subproject commit edee7f18d026ee2f289516f48d666c79c6a2aefb From 2370a0691ab08cbe956a023a02d499f22d30b674 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Tue, 14 Mar 2023 22:43:15 -0700 Subject: [PATCH 51/61] Add tracepoints + Update message labels --- util/tracing/visualization/fedsd.py | 8 ++++--- util/tracing/visualization/fedsd_helper.py | 25 +++++++++++++--------- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/util/tracing/visualization/fedsd.py b/util/tracing/visualization/fedsd.py index 409803a017..3eb13d11bc 100644 --- a/util/tracing/visualization/fedsd.py +++ b/util/tracing/visualization/fedsd.py @@ -20,13 +20,13 @@ } \ .ABS {stroke: #d9dd1f; fill: #d9dd1f; } \ .LTC { stroke: #073b4c; fill: #073b4c;} \ - .MSG { stroke: #ef476f; fill: #ef476f} \ + .T_MSG { stroke: #ef476f; fill: #ef476f} \ .NET { stroke: #118ab2; fill: #118ab2} \ .PTAG { stroke: #06d6a0; fill: #06d6a0} \ .TAG { stroke: #08a578; fill: #08a578} \ .TIMESTAMP { stroke: grey; fill: grey } \ + .FED_ID {stroke: #80DD99; fill: #80DD99 } \ .ADV {stroke-linecap="round" ; stroke: "red" ; fill: "red"} \ - \ text { \ font-size: smaller; \ font-family: sans-serif; \ @@ -275,7 +275,9 @@ def load_and_process_csv_file(csv_file) : # FIXME: Using microseconds is hardwired here. physical_time = f'{int(row["physical_time"]/1000):,}' - if (row['logical_time'] == -1678240241788173894) : + if (row['event'] in {'FED_ID', 'ACK', 'REJECT', 'ADR_RQ', 'ADR_AD', 'MSG', 'P2P_MSG'}): + label = row['event'] + elif (row['logical_time'] == -1678240241788173894) : # FIXME: This isn't right. NEVER == -9223372036854775808. label = row['event'] + '(NEVER)' else: diff --git a/util/tracing/visualization/fedsd_helper.py b/util/tracing/visualization/fedsd_helper.py index 9c3c419578..804341f117 100644 --- a/util/tracing/visualization/fedsd_helper.py +++ b/util/tracing/visualization/fedsd_helper.py @@ -9,36 +9,41 @@ "Sending LTC": "LTC", "Sending STOP_REQ": "STOP_REQ", "Sending STOP_REQ_REP": "STOP_REQ_REP", + "Sending STOP_GRN": "STOP_GRN", "Sending FED_ID": "FED_ID", "Sending PTAG": "PTAG", "Sending TAG": "TAG", - "Sending STOP_GRN": "STOP_GRN", - "Sending JOIN": "JOIN", "Sending REJECT": "REJECT", "Sending RESIGN": "RESIGN", - "Sending ABS": "ABS", - "Sending CLOSE_REQ": "CLOSE_REQ", + "Sending PORT_ABS": "ABS", + "Sending CLOSE_RQ": "CLOSE_RQ", + "Sending TAGGED_MSG": "T_MSG", + "Sending P2P_TAGGED_MSG": "P2P_T_MSG", "Sending MSG": "MSG", "Sending P2P_MSG": "P2P_MSG", + "Sending ADR_AD": "ADR_AD", + "Sending ADR_QR": "ADR_QR", "Receiving ACK": "ACK", "Receiving TIMESTAMP": "TIMESTAMP", "Receiving NET": "NET", "Receiving LTC": "LTC", "Receiving STOP_REQ": "STOP_REQ", "Receiving STOP_REQ_REP": "STOP_REQ_REP", + "Receiving STOP_GRN": "STOP_GRN", "Receiving FED_ID": "FED_ID", "Receiving PTAG": "PTAG", "Receiving TAG": "TAG", - "Receiving STOP_GRN": "STOP_GRN", "Receiving REJECT": "REJECT", "Receiving RESIGN": "RESIGN", - "Receiving ABS": "ABS", - "Receiving CLOSE_REQ": "CLOSE_REQ", - "Receiving UNIDENTIFIED": "UNIDENTIFIED", - # "Receiving ADDRESS_QUERY": "ADDRESS_QUERY", - # "Receiving ADDRESS_ADVERTISEMENT": "ADDRESS_ADVERTISEMENT", + "Receiving PORT_ABS": "ABS", + "Receiving CLOSE_RQ": "CLOSE_RQ", + "Receiving TAGGED_MSG": "T_MSG", + "Receiving P2P_TAGGED_MSG": "P2P_T_MSG", "Receiving MSG": "MSG", "Receiving P2P_MSG": "P2P_MSG", + "Receiving ADR_AD": "ADR_AD", + "Receiving ADR_QR": "ADR_QR", + "Receiving UNIDENTIFIED": "UNIDENTIFIED", "Scheduler advancing time ends": "AdvLT" } From 572c8c46ee0b9f0ea26253df045cb84fe329e02c Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Wed, 15 Mar 2023 10:53:42 +0100 Subject: [PATCH 52/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index a3e849738c..2e19c9334c 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit a3e849738c7595265fc91d2f9a89a9133c7f265f +Subproject commit 2e19c9334c5fe473f30264b4ea3bb5f7d3b70b66 From 5461bd6ee466b96b936577d0b14c4dc363cd4eca Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Fri, 17 Mar 2023 17:40:12 -0700 Subject: [PATCH 53/61] Check CI with fix-unthreaded-tracing branch in reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 2e19c9334c..bedeeab7db 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 2e19c9334c5fe473f30264b4ea3bb5f7d3b70b66 +Subproject commit bedeeab7dbf5e49273a80595e7d9312c572e5f02 From 403e642ff04964ea4cfcf17b26479f821a87cc77 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Wed, 22 Mar 2023 09:09:42 +0100 Subject: [PATCH 54/61] Aligned reactor-c with tracing-federates --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index bedeeab7db..c52dee90f3 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit bedeeab7dbf5e49273a80595e7d9312c572e5f02 +Subproject commit c52dee90f38b4dad129b402fae0819aebb8c99ca From 3d5407a478b88181517e59e04dd7cdb8bd567efd Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Wed, 22 Mar 2023 15:00:31 +0100 Subject: [PATCH 55/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index c52dee90f3..31404ffdab 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit c52dee90f38b4dad129b402fae0819aebb8c99ca +Subproject commit 31404ffdabff752509a1b1d47838d222028b748f From dc8c028f8616f5835d4f675974efd1529a8db002 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Wed, 22 Mar 2023 15:28:39 +0100 Subject: [PATCH 56/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 31404ffdab..9263c04058 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 31404ffdabff752509a1b1d47838d222028b748f +Subproject commit 9263c04058f6f18d38cd34569c35de9f8c75de55 From ad34136ed0a62c5c6dfc399f2ec84eb19904abcd Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Wed, 22 Mar 2023 15:55:12 +0100 Subject: [PATCH 57/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 9263c04058..b1171b73bb 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 9263c04058f6f18d38cd34569c35de9f8c75de55 +Subproject commit b1171b73bbe4096e22444e95da0c6592cc35eee8 From 3d9264d937208c110892925ef7a2d6696ff99cab Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Wed, 22 Mar 2023 16:00:41 +0100 Subject: [PATCH 58/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index b1171b73bb..a58831b7a1 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit b1171b73bbe4096e22444e95da0c6592cc35eee8 +Subproject commit a58831b7a1dd1f925e41d0ce8e8c1aa5189d4f8d From f243b7d4ac84235ca7921e8c777baba1de023634 Mon Sep 17 00:00:00 2001 From: ChadliaJerad Date: Wed, 22 Mar 2023 12:50:06 -0700 Subject: [PATCH 59/61] Align reactor-c. --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index a58831b7a1..ea429a8e33 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit a58831b7a1dd1f925e41d0ce8e8c1aa5189d4f8d +Subproject commit ea429a8e3349c6b0c5573e220b3984567cd1ac66 From cbbca7b39090515abccdfac4af4b66195668ab6a Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Mar 2023 09:36:56 +0100 Subject: [PATCH 60/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index ea429a8e33..66d7b9e6fa 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit ea429a8e3349c6b0c5573e220b3984567cd1ac66 +Subproject commit 66d7b9e6fa7f9e7ed854a9606376b1d89095e05b From 0d67047b480f8be71e9ccc02c8ed5876ca3a1247 Mon Sep 17 00:00:00 2001 From: "Edward A. Lee" Date: Thu, 23 Mar 2023 17:04:04 +0100 Subject: [PATCH 61/61] Align reactor-c --- org.lflang/src/lib/c/reactor-c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/org.lflang/src/lib/c/reactor-c b/org.lflang/src/lib/c/reactor-c index 66d7b9e6fa..4d6bb55496 160000 --- a/org.lflang/src/lib/c/reactor-c +++ b/org.lflang/src/lib/c/reactor-c @@ -1 +1 @@ -Subproject commit 66d7b9e6fa7f9e7ed854a9606376b1d89095e05b +Subproject commit 4d6bb5549640b57ac25b26b6ebb4ecfdfad256e6