throughput between nodes (in progress) 45/16745/3
authorrexlee8776 <limingjiang@huawei.com>
Tue, 12 Jul 2016 09:46:12 +0000 (09:46 +0000)
committerrexlee8776 <limingjiang@huawei.com>
Thu, 14 Jul 2016 06:49:23 +0000 (06:49 +0000)
JIRA: YARDSTICK-256

Change-Id: I48d66081dcfabf4462ef1ff15c9a3ad28132aaf9
Signed-off-by: rexlee8776 <limingjiang@huawei.com>
samples/netperf_node.yaml [new file with mode: 0755]
tests/unit/benchmark/scenarios/networking/test_netperf_node.py [new file with mode: 0755]
yardstick/benchmark/scenarios/networking/netperf_install.bash [new file with mode: 0755]
yardstick/benchmark/scenarios/networking/netperf_node.py [new file with mode: 0755]
yardstick/benchmark/scenarios/networking/netperf_remove.bash [new file with mode: 0755]

diff --git a/samples/netperf_node.yaml b/samples/netperf_node.yaml
new file mode 100755 (executable)
index 0000000..d59bbf2
--- /dev/null
@@ -0,0 +1,51 @@
+---
+# Sample benchmark task config file
+# measure network latency and throughput using netperf
+# There are two sample scenarios: bulk test and request/response test
+# In bulk test, UDP_STREAM and TCP_STREAM can be used
+# send_msg_size and recv_msg_size are options of bulk test
+# In req/rsp test, TCP_RR TCP_CRR UDP_RR can be used
+# req_rsp_size is option of req/rsp test
+
+schema: "yardstick:task:0.1"
+
+scenarios:
+-
+  type: NetperfNode
+  options:
+    testname: 'UDP_STREAM'
+    send_msg_size: 1024
+    duration: 20
+
+  host: node1.LF
+  target: node2.LF
+
+  runner:
+    type: Iteration
+    iterations: 1
+    interval: 1
+  sla:
+    mean_latency: 100
+    action: monitor
+#-
+#  type: NetperfNode
+#  options:
+#    testname: 'TCP_RR'
+#    req_rsp_size: '32,1024'
+#
+#  host: node1.LF
+#  target: node2.LF
+#
+#  runner:
+#    type: Duration
+#    duration: 20
+#    interval: 1
+#  sla:
+#    mean_latency: 300
+#    action: monitor
+
+context:
+  type: Node
+  name: LF
+  file: etc/yardstick/nodes/compass_sclab_physical/pod.yaml
+
diff --git a/tests/unit/benchmark/scenarios/networking/test_netperf_node.py b/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
new file mode 100755 (executable)
index 0000000..1c39b29
--- /dev/null
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netperf_node.NetperfNode
+
+import mock
+import unittest
+import os
+import json
+
+from yardstick.benchmark.scenarios.networking import netperf_node
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netperf_node.ssh')
+class NetperfNodeTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.ctx = {
+            'host': {
+                'ip': '192.168.10.10',
+                'user': 'root',
+                'password': 'root'
+            },
+            'target': {
+                'ip': '192.168.10.11',
+                'user': 'root',
+                'password': 'root'
+            }
+        }
+
+    def test_netperf_node_successful_setup(self, mock_ssh):
+
+        p = netperf_node.NetperfNode({}, self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+
+        p.setup()
+        self.assertIsNotNone(p.server)
+        self.assertIsNotNone(p.client)
+        self.assertEqual(p.setup_done, True)
+
+    def test_netperf_node_successful_no_sla(self, mock_ssh):
+
+        options = {}
+        args = {'options': options}
+        result = {}
+
+        p = netperf_node.NetperfNode(args, self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        sample_output = self._read_sample_output()
+        mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+        expected_result = json.loads(sample_output)
+        p.run(result)
+        self.assertEqual(result, expected_result)
+
+    def test_netperf_node_successful_sla(self, mock_ssh):
+
+        options = {}
+        args = {
+            'options': options,
+            'sla': {'mean_latency': 100}
+        }
+        result = {}
+
+        p = netperf_node.NetperfNode(args, self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        sample_output = self._read_sample_output()
+        mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+        expected_result = json.loads(sample_output)
+        p.run(result)
+        self.assertEqual(result, expected_result)
+
+    def test_netperf_node_unsuccessful_sla(self, mock_ssh):
+
+        options = {}
+        args = {
+            'options': options,
+            'sla': {'mean_latency': 5}
+        }
+        result = {}
+
+        p = netperf_node.NetperfNode(args, self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        sample_output = self._read_sample_output()
+        mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+        self.assertRaises(AssertionError, p.run, result)
+
+    def test_netperf_node_unsuccessful_script_error(self, mock_ssh):
+
+        options = {}
+        args = {'options': options}
+        result = {}
+
+        p = netperf_node.NetperfNode(args, self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+        self.assertRaises(RuntimeError, p.run, result)
+
+    def _read_sample_output(self):
+        curr_path = os.path.dirname(os.path.abspath(__file__))
+        output = os.path.join(curr_path, 'netperf_sample_output.json')
+        with open(output) as f:
+            sample_output = f.read()
+        return sample_output
+
+
+def main():
+    unittest.main()
+
+if __name__ == '__main__':
+    main()
diff --git a/yardstick/benchmark/scenarios/networking/netperf_install.bash b/yardstick/benchmark/scenarios/networking/netperf_install.bash
new file mode 100755 (executable)
index 0000000..eaa9f53
--- /dev/null
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -e
+
+echo "===Install netperf before test begin!!!==="
+cp /etc/apt/sources.list /etc/apt/sources.list_bkp
+cp /etc/resolv.conf /etc/resolv.conf_bkp
+echo "nameserver 8.8.4.4" >> /etc/resolv.conf
+
+cat <<EOF >/etc/apt/sources.list
+deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe multiverse
+deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse
+deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe multiverse
+deb http://archive.ubuntu.com/ubuntu/ trusty-proposed main restricted universe multiverse
+deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted universe multiverse
+EOF
+
+sudo apt-get update
+sudo apt-get install -y netperf
+
+service netperf start
+
+echo "===Install netperf before test end!!!==="
diff --git a/yardstick/benchmark/scenarios/networking/netperf_node.py b/yardstick/benchmark/scenarios/networking/netperf_node.py
new file mode 100755 (executable)
index 0000000..87aa8d7
--- /dev/null
@@ -0,0 +1,203 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+# bulk data test and req/rsp test are supported
+import pkg_resources
+import logging
+import json
+
+import yardstick.ssh as ssh
+from yardstick.benchmark.scenarios import base
+
+LOG = logging.getLogger(__name__)
+
+
+class NetperfNode(base.Scenario):
+    """Execute netperf between two nodes
+
+  Parameters
+    testname - to specify the test you wish to perform.
+    the valid testnames are TCP_STREAM, TCP_RR, UDP_STREAM, UDP_RR
+        type:    string
+        unit:    na
+        default: TCP_STREAM
+    send_msg_size - value set the local send size to value bytes.
+        type:    int
+        unit:    bytes
+        default: na
+    recv_msg_size - setting the receive size for the remote system.
+        type:    int
+        unit:    bytes
+        default: na
+    req_rsp_size - set the request and/or response sizes based on sizespec.
+        type:    string
+        unit:    na
+        default: na
+    duration - duration of the test
+        type:    int
+        unit:    seconds
+        default: 20
+
+    read link below for more netperf args description:
+    http://www.netperf.org/netperf/training/Netperf.html
+    """
+    __scenario_type__ = "NetperfNode"
+    TARGET_SCRIPT = 'netperf_benchmark.bash'
+    INSTALL_SCRIPT = 'netperf_install.bash'
+    REMOVE_SCRIPT = 'netperf_remove.bash'
+
+    def __init__(self, scenario_cfg, context_cfg):
+        self.scenario_cfg = scenario_cfg
+        self.context_cfg = context_cfg
+        self.setup_done = False
+
+    def setup(self):
+        '''scenario setup'''
+        self.target_script = pkg_resources.resource_filename(
+            'yardstick.benchmark.scenarios.networking',
+            NetperfNode.TARGET_SCRIPT)
+        host = self.context_cfg['host']
+        host_user = host.get('user', 'ubuntu')
+        host_ip = host.get('ip', None)
+        target = self.context_cfg['target']
+        target_user = target.get('user', 'ubuntu')
+        target_ip = target.get('ip', None)
+        self.target_ip = target.get('ip', None)
+        host_password = host.get('password', None)
+        target_password = target.get('password', None)
+
+        LOG.info("host_pw:%s, target_pw:%s", host_password, target_password)
+        # netserver start automatically during the vm boot
+        LOG.info("user:%s, target:%s", target_user, target_ip)
+        self.server = ssh.SSH(target_user, target_ip,
+                              password=target_password)
+        self.server.wait(timeout=600)
+
+        LOG.info("user:%s, host:%s", host_user, host_ip)
+        self.client = ssh.SSH(host_user, host_ip,
+                              password=host_password)
+        self.client.wait(timeout=600)
+
+        # copy script to host
+        self.client.run("cat > ~/netperf.sh",
+                        stdin=open(self.target_script, "rb"))
+
+        # copy script to host and client
+        self.install_script = pkg_resources.resource_filename(
+            'yardstick.benchmark.scenarios.networking',
+            NetperfNode.INSTALL_SCRIPT)
+        self.remove_script = pkg_resources.resource_filename(
+            'yardstick.benchmark.scenarios.networking',
+            NetperfNode.REMOVE_SCRIPT)
+
+        self.server.run("cat > ~/netperf_install.sh",
+                        stdin=open(self.install_script, "rb"))
+        self.client.run("cat > ~/netperf_install.sh",
+                        stdin=open(self.install_script, "rb"))
+        self.server.run("cat > ~/netperf_remove.sh",
+                        stdin=open(self.remove_script, "rb"))
+        self.client.run("cat > ~/netperf_remove.sh",
+                        stdin=open(self.remove_script, "rb"))
+        self.server.execute("sudo bash netperf_install.sh")
+        self.client.execute("sudo bash netperf_install.sh")
+
+        self.setup_done = True
+
+    def run(self, result):
+        """execute the benchmark"""
+
+        if not self.setup_done:
+            self.setup()
+
+        # get global options
+        ipaddr = self.context_cfg['target'].get("ipaddr", '127.0.0.1')
+        ipaddr = self.target_ip
+        options = self.scenario_cfg['options']
+        testname = options.get("testname", 'TCP_STREAM')
+        duration_time = self.scenario_cfg["runner"].get("duration", None) \
+            if "runner" in self.scenario_cfg else None
+        arithmetic_time = options.get("duration", None)
+        if duration_time:
+            testlen = duration_time
+        elif arithmetic_time:
+            testlen = arithmetic_time
+        else:
+            testlen = 20
+
+        cmd_args = "-H %s -l %s -t %s" % (ipaddr, testlen, testname)
+
+        # get test specific options
+        default_args = "-O 'THROUGHPUT,THROUGHPUT_UNITS,MEAN_LATENCY'"
+        cmd_args += " -- %s" % default_args
+        option_pair_list = [("send_msg_size", "-m"),
+                            ("recv_msg_size", "-M"),
+                            ("req_rsp_size", "-r")]
+        for option_pair in option_pair_list:
+            if option_pair[0] in options:
+                cmd_args += " %s %s" % (option_pair[1],
+                                        options[option_pair[0]])
+
+        cmd = "sudo bash netperf.sh %s" % (cmd_args)
+        LOG.debug("Executing command: %s", cmd)
+        status, stdout, stderr = self.client.execute(cmd)
+
+        if status:
+            raise RuntimeError(stderr)
+
+        result.update(json.loads(stdout))
+
+        if result['mean_latency'] == '':
+            raise RuntimeError(stdout)
+
+        # sla check
+        mean_latency = float(result['mean_latency'])
+        if "sla" in self.scenario_cfg:
+            sla_max_mean_latency = int(
+                self.scenario_cfg["sla"]["mean_latency"])
+
+            assert mean_latency <= sla_max_mean_latency, \
+                "mean_latency %f > sla_max_mean_latency(%f); " % \
+                (mean_latency, sla_max_mean_latency)
+
+    def teardown(self):
+        '''remove netperf from nodes after test'''
+        self.server.execute("sudo bash netperf_remove.sh")
+        self.client.execute("sudo bash netperf_remove.sh")
+
+
+def _test():    # pragma: no cover
+    '''internal test function'''
+    ctx = {
+        "host": {
+            "ip": "192.168.10.10",
+            "user": "root",
+            "password": "root"
+        },
+        "target": {
+            "ip": "192.168.10.11",
+            "user": "root",
+            "password": "root"
+        }
+    }
+
+    logger = logging.getLogger("yardstick")
+    logger.setLevel(logging.DEBUG)
+
+    options = {
+        "testname": 'TCP_STREAM'
+    }
+
+    args = {"options": options}
+    result = {}
+
+    netperf = NetperfNode(args, ctx)
+    netperf.run(result)
+    print result
+
+if __name__ == '__main__':
+    _test()
diff --git a/yardstick/benchmark/scenarios/networking/netperf_remove.bash b/yardstick/benchmark/scenarios/networking/netperf_remove.bash
new file mode 100755 (executable)
index 0000000..bb2299a
--- /dev/null
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -e
+echo "===Remove netperf after install begin!==="
+cp /etc/apt/sources.list_bkp /etc/apt/sources.list
+cp /etc/resolv.conf_bkp /etc/resolv.conf
+
+service netperf stop
+
+sudo apt-get purge -y netperf
+
+echo "===Remove netperf after install end!!!==="