Benchmark Kubernetes Networking Performance 98/72498/1
authorCédric Ollivier <cedric.ollivier@orange.com>
Sun, 9 May 2021 15:15:33 +0000 (17:15 +0200)
committerCédric Ollivier <cedric.ollivier@orange.com>
Mon, 10 May 2021 09:49:08 +0000 (11:49 +0200)
https://github.com/kubernetes/perf-tests/tree/master/network/benchmarks/netperf

Change-Id: I6facd567f1c52c5949b53484a1fb107dcf34d622
Signed-off-by: Cédric Ollivier <cedric.ollivier@orange.com>
(cherry picked from commit 3b5c1b115e234d636cb4f2a17d27ced872fee924)

ansible/site.gate.yml
ansible/site.yml
docker/benchmarking/Dockerfile
docker/benchmarking/plotperf.py.patch [new file with mode: 0644]
docker/benchmarking/testcases.yaml
functest_kubernetes/netperf/__init__.py [new file with mode: 0644]
functest_kubernetes/netperf/netperf.py [new file with mode: 0644]
setup.cfg

index 6c3e4d1..6ca2b4c 100644 (file)
@@ -59,6 +59,7 @@
         - container: functest-kubernetes-benchmarking
           tests:
             - xrally_kubernetes_full
+            - netperf
         - container: functest-kubernetes-cnf
           tests:
             - k8s_vims
index f2c2326..38b055d 100644 (file)
@@ -44,6 +44,7 @@
         - container: functest-kubernetes-benchmarking
           tests:
             - xrally_kubernetes_full
+            - netperf
         - container: functest-kubernetes-cnf
           tests:
             - k8s_vims
index 45e24b4..fdc0fac 100644 (file)
@@ -1,4 +1,20 @@
 FROM opnfv/functest-kubernetes-smoke:kali
 
+ARG NETPERF_TAG=8a5a7a23f2165b29e46b4d32aad7d5f85e4b9516
+ARG PLOTPERF_TAG=2455313f4b9581795a8f642243acaad472d91804
+
+COPY plotperf.py.patch /tmp/plotperf.py.patch
+RUN apk --no-cache add --update py3-matplotlib && \
+    apk --no-cache add --virtual .build-deps --update patch go && \
+    ln -s /usr/bin/python3 /usr/bin/python && \
+    git clone https://github.com/kubernetes/perf-tests && \
+    (cd perf-tests && git checkout $NETPERF_TAG) && \
+    (cd perf-tests/network/benchmarks/netperf && go build -o /usr/local/bin/launch launch.go) && \
+    curl https://raw.githubusercontent.com/girishkalele/pyplot-docker/$PLOTPERF_TAG/plotperf.py \
+        --output /usr/local/bin/plotperf.py && \
+    (cd /usr/local/bin && patch -p0 < /tmp/plotperf.py.patch && \
+        mv plotperf.py plotperf && chmod a+x plotperf) && \
+    rm -rf perf-tests /tmp/plotperf.py.patch && \
+    apk del .build-deps
 COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml
 CMD ["run_tests", "-t", "all"]
diff --git a/docker/benchmarking/plotperf.py.patch b/docker/benchmarking/plotperf.py.patch
new file mode 100644 (file)
index 0000000..45a64e1
--- /dev/null
@@ -0,0 +1,42 @@
+--- plotperf.py.orig   2021-05-09 10:42:17.858983226 +0200
++++ plotperf.py        2021-05-09 10:43:09.410934186 +0200
+@@ -18,11 +18,13 @@
+ # Generates matplotlib line and bar charts from the netperf.csv raw data file
+ #
++from __future__ import print_function
++from builtins import range
+ try:
+   import matplotlib.pyplot as plt
+-except Exception, e:
++except Exception as e:
+   # Translate the traceback to a more friendly error message
+-  print "Exception (%s) while importing matplotlib - install with apt-get install python-matplotlib (or equivalent package manager)" % e
++  print("Exception (%s) while importing matplotlib - install with apt-get install python-matplotlib (or equivalent package manager)" % e)
+   raise
+ import numpy
+@@ -93,7 +95,7 @@
+   for ext in [ "png", "svg" ]:
+     fname = os.path.join(options.outputdir, "{0}.{1}".format(options.suffix, ext))
+     plt.savefig(fname, dpi=100)
+-    print "Saved {0}".format(fname)
++    print("Saved {0}".format(fname))
+   barlabels = []
+   barvalues = []
+@@ -105,7 +107,7 @@
+     barvalues.append(float(data[n][1]))
+   plt.clf()
+-  plt.barh(bottom=range(0, len(data)-1),
++  plt.barh(list(range(0, len(data)-1)),
+            height=0.5,
+            width=barvalues,
+            align='center')
+@@ -117,4 +119,4 @@
+   for ext in [ "png", "svg" ]:
+     fname = os.path.join(options.outputdir, "{0}.bar.{1}".format(options.suffix, ext))
+     plt.savefig(fname, dpi=100)
+-    print "Saved {0}".format(fname)
++    print("Saved {0}".format(fname))
index 1a850e2..bcf40d4 100644 (file)
@@ -20,3 +20,14 @@ tiers:
             times: 10
             concurrency: 4
             namespaces_count: 3
+      - case_name: netperf
+        project_name: functest
+        criteria: 100
+        blocking: false
+        description: >-
+          A standardized benchmark to measure Kubernetes networking performance
+          on multiple host platforms and network stacks.
+        dependencies:
+          - DEPLOY_SCENARIO: "k8-*"
+        run:
+          name: netperf
diff --git a/functest_kubernetes/netperf/__init__.py b/functest_kubernetes/netperf/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/functest_kubernetes/netperf/netperf.py b/functest_kubernetes/netperf/netperf.py
new file mode 100644 (file)
index 0000000..3135a6c
--- /dev/null
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2021 Orange and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+"""
+Benchmarking Kubernetes Networking Performance
+"""
+
+import glob
+import logging
+import os
+import shutil
+import subprocess
+import time
+
+from xtesting.core import testcase
+
+
+class Netperf(testcase.TestCase):
+    """Run Benchmarking Kubernetes Networking Performance"""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self, **kwargs):
+        super(Netperf, self).__init__(**kwargs)
+        self.output_log_name = 'functest-kubernetes.log'
+        self.output_debug_log_name = 'functest-kubernetes.debug.log'
+
+    def check_requirements(self):
+        """Check if launch is in $PATH"""
+        self.is_skipped = not (
+            shutil.which("launch") and shutil.which("plotperf"))
+        if self.is_skipped:
+            self.__logger.warning("launch or plotperf is missing")
+
+    def run(self, **kwargs):
+        self.start_time = time.time()
+        try:
+            if not os.path.exists(self.res_dir):
+                os.makedirs(self.res_dir)
+            cmd = ['launch', '-iterations', '1', '-kubeConfig',
+                   '/root/.kube/config']
+            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+            self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
+            lfiles = glob.glob(os.path.join(
+                'results_netperf-latest', 'netperf-latest*.csv'))
+            results = max(lfiles, key=os.path.getmtime)
+            shutil.move(results, os.path.join(self.res_dir, 'netperf.csv'))
+            cmd = ['plotperf', '-c',
+                   os.path.join(self.res_dir, 'netperf.csv'),
+                   '-o', self.res_dir, '-s', 'netperf']
+            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+            self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
+            self.result = 100
+            status = testcase.TestCase.EX_OK
+        except Exception:  # pylint: disable=broad-except
+            self.__logger.exception("Can not run Netperf")
+            self.result = 0
+            status = testcase.TestCase.EX_RUN_ERROR
+        self.stop_time = time.time()
+        return status
index 52e1975..9721cef 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -17,3 +17,4 @@ xtesting.testcase =
     kube_hunter = functest_kubernetes.security.security:KubeHunter
     kube_bench = functest_kubernetes.security.security:KubeBench
     cnf_testsuite = functest_kubernetes.cnf_conformance.conformance:CNFConformance
+    netperf = functest_kubernetes.netperf.netperf:Netperf