behave_tests: log latency test (fixed threshold) 88/72788/2
authorGwenael Lambrouin <gwenael.lambrouin@orange.com>
Mon, 19 Jul 2021 09:42:39 +0000 (11:42 +0200)
committerGwenael Lambrouin <gwenael.lambrouin@orange.com>
Thu, 22 Jul 2021 15:08:18 +0000 (17:08 +0200)
Change-Id: I8285829a854f146fb9736d44655a7e848923203e
Signed-off-by: Gwenael Lambrouin <gwenael.lambrouin@orange.com>
behave_tests/features/steps/steps.py

index 76ed12d..a1d29ce 100644 (file)
@@ -127,6 +127,7 @@ def add_percentage_rate(context, percentage_rate):
     context.percentage_rate = percentage_rate
     rate = percentage_previous_rate(context, percentage_rate)
     context.json['rate'] = rate
+    context.logger.info(f"add_percentage_rate: {percentage_rate} => rate={rate}")
 
 
 """When steps."""
@@ -266,12 +267,18 @@ def check_latency_result_against_fixed_threshold(context, max_avg_latency_usec:
     # Get the just measured average latency (a float):
     new_avg_latency_usec = context.synthesis['avg_delay_usec']
 
+    # Log what we test:
+    context.logger.info("check_latency_result_against_fixed_threshold(usec): "
+                        "{value}<={ref}?".format(
+                            value=round(new_avg_latency_usec),
+                            ref=round(max_avg_latency_usec)))
+
     # Compare measured value to reference:
     if new_avg_latency_usec > max_avg_latency_usec:
         raise AssertionError("Average latency higher than max threshold: "
-                             "{avg_latency} usec > {threshold} usec".format(
-                                 avg_latency=round(new_avg_latency_usec),
-                                 threshold=round(max_avg_latency_usec)))
+                             "{value} usec > {ref} usec".format(
+                                 value=round(new_avg_latency_usec),
+                                 ref=round(max_avg_latency_usec)))
 
 
 @then(