diff --git a/Robot-Framework/lib/PerformanceDataProcessing.py b/Robot-Framework/lib/PerformanceDataProcessing.py
index dce5521f..91314039 100644
--- a/Robot-Framework/lib/PerformanceDataProcessing.py
+++ b/Robot-Framework/lib/PerformanceDataProcessing.py
@@ -47,6 +47,14 @@ def write_mem_to_csv(self, test_name, build_number, mem_data):
self.device]
self._write_to_csv(test_name, data)
+ @keyword
+ def write_speed_to_csv(self, test_name, build_number, speed_data):
+ data = [build_number,
+ speed_data['tx'],
+ speed_data['rx'],
+ self.device]
+ self._write_to_csv(test_name, data)
+
@keyword
def read_cpu_csv_and_plot(self, test_name):
build_numbers = []
@@ -77,19 +85,21 @@ def read_cpu_csv_and_plot(self, test_name):
plt.subplot(3, 1, 1)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, cpu_events_per_second, marker='o', linestyle='-', color='b')
- plt.title('CPU Events per Second', loc='right', fontweight="bold")
- plt.ylabel('CPU Events per Second')
+ plt.yticks(fontsize=14)
+ plt.title('CPU Events per Second', loc='right', fontweight="bold", fontsize=16)
+ plt.ylabel('CPU Events per Second', fontsize=16)
plt.grid(True)
- plt.xticks(build_numbers)
+ plt.xticks(build_numbers, fontsize=14)
# Plot 2: CPU Events per Thread
plt.subplot(3, 1, 2)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, cpu_events_per_thread, marker='o', linestyle='-', color='b')
- plt.title('CPU Events per Thread', loc='right', fontweight="bold")
- plt.ylabel('CPU Events per Thread')
+ plt.yticks(fontsize=14)
+ plt.title('CPU Events per Thread', loc='right', fontweight="bold", fontsize=16)
+ plt.ylabel('CPU Events per Thread', fontsize=16)
plt.grid(True)
- plt.xticks(build_numbers)
+ plt.xticks(build_numbers, fontsize=14)
# Create line chart with error bars on the same subplot
plt.errorbar(build_numbers, cpu_events_per_thread,
yerr=cpu_events_per_thread_stddev,
@@ -99,19 +109,20 @@ def read_cpu_csv_and_plot(self, test_name):
plt.subplot(3, 1, 3)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, avg_latency, marker='o', linestyle='-', color='b', label='Avg')
- plt.ylabel('Avg Latency (ms)')
+ plt.ylabel('Avg Latency (ms)', fontsize=16)
plt.legend(loc='upper left')
- plt.xlabel('Build Number')
+ plt.xlabel('Build Number', fontsize=16)
plt.twinx()
plt.plot(build_numbers, max_latency, marker='o', linestyle='-', color='r', label='Max')
plt.plot(build_numbers, min_latency, marker='o', linestyle='-', color='g', label='Min')
- plt.ylabel('Max/Min Latency (ms)')
+ plt.yticks(fontsize=14)
+ plt.ylabel('Max/Min Latency (ms)', fontsize=16)
plt.legend(loc='upper right')
- plt.title('Latency', loc='right', fontweight="bold")
+ plt.title('Latency', loc='right', fontweight="bold", fontsize=16)
plt.grid(True)
- plt.xticks(build_numbers)
+ plt.xticks(build_numbers, fontsize=14)
- plt.suptitle(f'{test_name} ({self.device})', fontsize=16, fontweight='bold')
+ plt.suptitle(f'{test_name} ({self.device})', fontsize=18, fontweight='bold')
plt.tight_layout()
plt.savefig(f'../test-suites/{self.device}_{test_name}.png') # Save the plot as an image file
@@ -148,37 +159,85 @@ def read_mem_csv_and_plot(self, test_name):
plt.subplot(3, 1, 1)
plt.ticklabel_format(axis='y', style='sci', useMathText=True)
plt.plot(build_numbers, operations_per_second, marker='o', linestyle='-', color='b')
- plt.title('Operations per Second', loc='right', fontweight="bold")
- plt.ylabel('Operations per Second')
+ plt.yticks(fontsize=14)
+ plt.title('Operations per Second', loc='right', fontweight="bold", fontsize=16)
+ plt.ylabel('Operations per Second', fontsize=16)
plt.grid(True)
- plt.xticks(build_numbers)
+ plt.xticks(build_numbers, fontsize=14)
# Plot 2: Data Transfer Speed
plt.subplot(3, 1, 2)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, data_transfer_speed, marker='o', linestyle='-', color='b')
- plt.title('Data Transfer Speed', loc='right', fontweight="bold")
- plt.ylabel('Data Transfer Speed (MiB/sec)')
+ plt.yticks(fontsize=14)
+ plt.title('Data Transfer Speed', loc='right', fontweight="bold", fontsize=16)
+ plt.ylabel('Data Transfer Speed (MiB/sec)', fontsize=16)
plt.grid(True)
- plt.xticks(build_numbers)
+ plt.xticks(build_numbers, fontsize=14)
# Plot 3: Latency
plt.subplot(3, 1, 3)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, avg_latency, marker='o', linestyle='-', color='b', label='Avg')
- plt.ylabel('Avg Latency (ms)')
+ plt.ylabel('Avg Latency (ms)', fontsize=16)
plt.legend(loc='upper left')
plt.grid(True)
- plt.xlabel('Build Number')
+ plt.xlabel('Build Number', fontsize=16)
plt.twinx()
plt.plot(build_numbers, max_latency, marker='o', linestyle='-', color='r', label='Max')
plt.plot(build_numbers, min_latency, marker='o', linestyle='-', color='g', label='Min')
- plt.ylabel('Max/Min Latency (ms)')
+ plt.yticks(fontsize=14)
+ plt.ylabel('Max/Min Latency (ms)', fontsize=16)
plt.legend(loc='upper right')
- plt.title('Latency', loc='right', fontweight="bold")
- plt.xticks(build_numbers)
+ plt.title('Latency', loc='right', fontweight="bold", fontsize=16)
+ plt.xticks(build_numbers, fontsize=14)
- plt.suptitle(f'{test_name} ({self.device})', fontsize=16, fontweight='bold')
+ plt.suptitle(f'{test_name} ({self.device})', fontsize=18, fontweight='bold')
+
+ plt.tight_layout()
+ plt.savefig(f'../test-suites/{self.device}_{test_name}.png') # Save the plot as an image file
+
+ @keyword
+ def read_speed_csv_and_plot(self, test_name):
+ build_numbers = []
+ tx = []
+ rx = []
+
+ with open(f"{self.data_dir}{self.device}_{test_name}.csv", 'r') as csvfile:
+ csvreader = csv.reader(csvfile)
+ logging.info("Reading data from csv file...")
+ for row in csvreader:
+ if row[3] == self.device:
+ build_numbers.append(str(row[0]))
+ tx.append(float(row[1]))
+ rx.append(float(row[2]))
+
+ plt.figure(figsize=(20, 10))
+ plt.set_loglevel('WARNING')
+
+ # Plot 1: TX
+ plt.subplot(2, 1, 1)
+ plt.ticklabel_format(axis='y', style='plain')
+ plt.plot(build_numbers, tx, marker='o', linestyle='-', color='b')
+ plt.yticks(fontsize=14)
+ plt.title('Transmitting Speed', loc='right', fontweight="bold", fontsize=16)
+ plt.ylabel('TX Speed (MBytes/sec)', fontsize=16)
+ plt.grid(True)
+ plt.xticks(build_numbers, fontsize=14)
+
+ # Plot 2: RX
+ plt.subplot(2, 1, 2)
+ plt.ticklabel_format(axis='y', style='plain')
+ plt.plot(build_numbers, rx, marker='o', linestyle='-', color='b')
+ plt.yticks(fontsize=14)
+ plt.title('Receiving Speed', loc='right', fontweight="bold", fontsize=16)
+ plt.ylabel('RX Speed (MBytes/sec)', fontsize=16)
+ plt.grid(True)
+ plt.xticks(build_numbers, fontsize=14)
+
+ plt.xlabel('Build Number', fontsize=16)
+
+ plt.suptitle(f'{test_name} ({self.device})', fontsize=18, fontweight='bold')
plt.tight_layout()
plt.savefig(f'../test-suites/{self.device}_{test_name}.png') # Save the plot as an image file
@@ -194,3 +253,9 @@ def save_memory_data(self, test_name, build_number, cpu_data):
self.write_mem_to_csv(test_name, build_number, cpu_data)
self.read_mem_csv_and_plot(test_name)
+
+ @keyword
+ def save_speed_data(self, test_name, build_number, cpu_data):
+
+ self.write_speed_to_csv(test_name, build_number, cpu_data)
+ self.read_speed_csv_and_plot(test_name)
diff --git a/Robot-Framework/lib/output_parser.py b/Robot-Framework/lib/output_parser.py
index bfb36c25..c19fbd53 100644
--- a/Robot-Framework/lib/output_parser.py
+++ b/Robot-Framework/lib/output_parser.py
@@ -14,6 +14,7 @@ def get_systemctl_status(output):
else:
raise Exception("Couldn't parse systemctl status")
+
def get_service_status(output):
output = re.sub(r'\033\[.*?m', '', output) # remove colors from serial console output
match = re.search(r'Active: (\w+) \((\w+)', output)
@@ -29,6 +30,7 @@ def find_pid(output, proc_name):
pids = [line.split()[1] for line in output if proc_name in line]
return pids
+
def verify_shutdown_status(output):
output = re.sub(r'\033\[.*?m', '', output) # remove colors from serial console output
match = re.search(r'ExecStop=.*\(code=(\w+), status=(.*)\)', output)
@@ -37,18 +39,21 @@ def verify_shutdown_status(output):
else:
raise Exception("Couldn't parse shutdown status")
+
def parse_version(output):
versions = output.split(' ')
name = versions[1][1:-1] if len(versions) > 1 else None
major, minor, date, commit = versions[0].split(".")
return major, minor, date, commit, name
+
def verify_date_format(date_string):
try:
datetime.strptime(date_string, '%Y%m%d')
except ValueError:
raise Exception("Wrong date format in version date field")
+
def parse_cpu_results(output):
def extract_value(pattern, output):
match = re.search(pattern, output)
@@ -77,6 +82,7 @@ def extract_value(pattern, output):
return cpu_data
+
def parse_memory_results(output):
def extract_value(pattern, output):
match = re.search(pattern, output)
@@ -107,3 +113,24 @@ def extract_value(pattern, output):
return mem_data
+
+def parse_iperf_output(output):
+ tx_pattern = r'\s(\d+(\.\d+)?) MBytes\/sec.*sender'
+ rx_pattern = r'\s(\d+(\.\d+)?) MBytes\/sec.*receiver'
+
+ match = re.search(tx_pattern, output)
+ if match:
+ tx = match.group(1)
+ else:
+ raise Exception(f"Couldn't parse TX, pattern: {tx_pattern}")
+
+ match = re.search(rx_pattern, output)
+ if match:
+ rx = match.group(1)
+ else:
+ raise Exception(f"Couldn't parse RX, pattern: {rx_pattern}")
+
+ return {
+ "tx": tx,
+ "rx": rx
+ }
diff --git a/Robot-Framework/resources/ssh_keywords.resource b/Robot-Framework/resources/ssh_keywords.resource
index fc4ffc7e..8292c228 100644
--- a/Robot-Framework/resources/ssh_keywords.resource
+++ b/Robot-Framework/resources/ssh_keywords.resource
@@ -10,6 +10,10 @@ Library Process
Library ../lib/output_parser.py
+*** Variables ***
+${netvm_ip} 192.168.101.1
+
+
*** Keywords ***
Ping Host
@@ -236,3 +240,16 @@ Install sysbench tool
ELSE
Log To Console ${\n}sysbench tool was already installed
END
+
+Install iperf tool
+ ${command_output}= Execute Command nix-env --query --installed
+ ${not_installed} = Run Keyword And Return Status Should Not Contain ${command_output} iperf
+ IF ${not_installed}
+ Execute Command nix-env -i iperf
+ ${command_output}= Execute Command nix-env --query --installed
+ Log To Console ${\n}Installed packages:${\n}${command_output}
+ Should Contain ${command_output} iperf iperf tool was not installed
+ Log To Console iperf tool was succesfully installed
+ ELSE
+ Log To Console ${\n}iperf tool was already installed
+ END
diff --git a/Robot-Framework/test-suites/performance/network.robot b/Robot-Framework/test-suites/performance/network.robot
new file mode 100644
index 00000000..c1b07668
--- /dev/null
+++ b/Robot-Framework/test-suites/performance/network.robot
@@ -0,0 +1,61 @@
+# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)
+# SPDX-License-Identifier: Apache-2.0
+
+*** Settings ***
+Documentation Network performance tests
+... Requires iperf installed on test running PC (sudo apt install iperf)
+Force Tags performance network
+Resource ../../resources/ssh_keywords.resource
+Resource ../../config/variables.robot
+Library ../../lib/output_parser.py
+Library Process
+Library ../../lib/PerformanceDataProcessing.py ${DEVICE}
+Suite Setup Common Setup
+Suite Teardown Close All Connections
+
+
+*** Test Cases ***
+
+TCP speed test
+ [Documentation] Measure RX and TX speed for TCP
+ [Tags] tcp SP-T91
+ Run iperf server on DUT
+ &{tcp_speed} Run TCP test
+ Save Speed Data ${TEST NAME} ${buildID} ${tcp_speed}
+ Log HTML
+
+UDP speed test
+ [Documentation] Measure RX and TX speed for UDP
+ [Tags] udp SP-T92
+ Run iperf server on DUT
+ &{udp_speed} Run UDP test
+ Save Speed Data ${TEST NAME} ${buildID} ${udp_speed}
+ Log HTML
+
+
+*** Keywords ***
+
+Common Setup
+ Set Variables ${DEVICE}
+ Connect
+ Install iperf tool
+
+Run iperf server on DUT
+ [Documentation] Run iperf on DUT in server mode
+ ${command} Set Variable iperf -s
+ Execute Command nohup ${command} > output.log 2>&1 &
+
+Run TCP test
+ [Documentation] Run network test on agent machine
+ ${output} Run Process iperf3 -c ${DEVICE_IP_ADDRESS} -f M -t 10 shell=True
+ Should Contain ${output.stdout} iperf Done.
+ Log ${output.stdout}
+ &{tcp_speed} Parse iperf output ${output.stdout}
+ [Return] &{tcp_speed}
+
+Run UDP test
+ ${output} Run Process iperf3 -c ${DEVICE_IP_ADDRESS} -u -b 100G -f M -t 10 shell=True
+ Should Contain ${output.stdout} iperf Done.
+ Log ${output.stdout}
+ &{udp_speed} Parse iperf output ${output.stdout}
+ [Return] &{udp_speed}
diff --git a/Robot-Framework/test-suites/performance/performance.robot b/Robot-Framework/test-suites/performance/performance.robot
index 6768739d..8c2804a5 100644
--- a/Robot-Framework/test-suites/performance/performance.robot
+++ b/Robot-Framework/test-suites/performance/performance.robot
@@ -17,7 +17,7 @@ CPU One thread test
[Documentation] Run a CPU benchmark using Sysbench with a duration of 10 seconds and a SINGLE thread.
... The benchmark records to csv CPU events per second, events per thread, and latency data.
... Create visual plots to represent these metrics comparing to previous tests.
- [Tags] performance cpu SP-T67-1
+ [Tags] cpu SP-T67-1
${output} Execute Command sysbench cpu --time=10 --threads=1 --cpu-max-prime=20000 run
Log ${output}
&{cpu_data} Parse Cpu Results ${output}
@@ -28,7 +28,7 @@ CPU multimple threads test
[Documentation] Run a CPU benchmark using Sysbench with a duration of 10 seconds and MULTIPLE threads.
... The benchmark records to csv CPU events per second, events per thread, and latency data.
... Create visual plots to represent these metrics comparing to previous tests.
- [Tags] performance cpu SP-T67-2
+ [Tags] cpu SP-T67-2
${output} Execute Command sysbench cpu --time=10 --threads=${threads_number} --cpu-max-prime=20000 run
Log ${output}
&{cpu_data} Parse Cpu Results ${output}
@@ -40,7 +40,7 @@ Memory Read One thread test
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for READ operations.
... Create visual plots to represent these metrics comparing to previous tests.
- [Tags] performance cpu SP-T67-3
+ [Tags] cpu SP-T67-3
${output} Execute Command sysbench memory --time=60 --memory-oper=read --threads=1 run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
@@ -52,7 +52,7 @@ Memory Write One thread test
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for WRITE operations.
... Create visual plots to represent these metrics comparing to previous tests.
- [Tags] performance cpu SP-T67-4
+ [Tags] cpu SP-T67-4
${output} Execute Command sysbench memory --time=60 --memory-oper=write --threads=1 run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
@@ -64,7 +64,7 @@ Memory Read multimple threads test
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for READ operations.
... Create visual plots to represent these metrics comparing to previous tests.
- [Tags] performance cpu SP-T67-5
+ [Tags] cpu SP-T67-5
${output} Execute Command sysbench memory --time=60 --memory-oper=read --threads=${threads_number} run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
@@ -76,7 +76,7 @@ Memory Write multimple threads test
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for WRITE operations.
... Create visual plots to represent these metrics comparing to previous tests.
- [Tags] performance cpu SP-T67-6
+ [Tags] cpu SP-T67-6
${output} Execute Command sysbench memory --time=60 --memory-oper=write --threads=${threads_number} run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
@@ -84,7 +84,6 @@ Memory Write multimple threads test
Log HTML
-
*** Keywords ***
Common Setup