5 # see license for license details
11 from vstf.controller.unittest import model
12 from vstf.controller.settings.flows_settings import FlowsSettings
13 from vstf.controller.settings.tool_settings import ToolSettings
14 from vstf.controller.settings.perf_settings import PerfSettings
15 from vstf.controller.sw_perf.perf_provider import PerfProvider
16 from vstf.controller.sw_perf.flow_producer import FlowsProducer
17 from vstf.controller.settings.tester_settings import TesterSettings
18 from vstf.controller.env_build.env_build import EnvBuildApi as Builder
19 from vstf.common.log import setup_logging
20 import vstf.controller.sw_perf.performance as pf
22 LOG = logging.getLogger(__name__)
25 class TestPerf(model.Test):
28 LOG.info("start performance unit test.")
29 super(TestPerf, self).setUp()
30 self.dir = os.path.dirname(__file__)
31 self.perf_path = os.path.join(self.dir, '../../../etc/vstf/perf')
32 self.base_path = os.path.join(self.dir, '../../../etc/vstf/env')
35 LOG.info("stop performance unit test.")
37 @unittest.skip('for now')
38 def test_batch_perf(self):
40 LOG.info(self.perf_path)
41 LOG.info(self.base_path)
42 perf_settings = PerfSettings(path=self.perf_path)
43 flows_settings = FlowsSettings(path=self.perf_path)
44 tool_settings = ToolSettings(path=self.base_path)
45 tester_settings = TesterSettings(path=self.base_path)
46 flow_producer = FlowsProducer(self.conn, flows_settings)
47 provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
48 perf = pf.Performance(self.conn, provider)
49 tests = perf_settings.settings
50 for scenario, cases in tests.items():
54 config_file = os.path.join(self.base_path, scenario + '.json')
58 env = Builder(self.conn, config_file)
62 casetag = case['case']
64 protocol = case['protocol']
65 profile = case['profile']
69 flow_producer.create(scenario, casetag)
70 result = perf.run(tool, protocol, ttype, sizes)
71 self.assertEqual(True, isinstance(result, dict))
74 @unittest.skip('for now')
75 def test_perf_settings(self):
76 perf_settings = PerfSettings()
77 self.assertEqual(True, perf_settings.input())
79 def test_tool_settings(self):
80 tool_settings = ToolSettings()
85 tool_settings.set_pktgen(value)
86 tool_settings.set_netperf(value)
87 tool_settings.set_iperf(value)
88 tool_settings.set_qperf(value)
89 LOG.info(tool_settings.settings)
91 def test_flow_settings(self):
93 "Tn": ["Tn-1", "Tn-2", "Tn-3", "Tn-4"],
94 "Tnv": ["Tnv-1", "Tnv-2", "Tnv-3", "Tnv-4"],
95 "Ti": ["Ti-1", "Ti-2", "Ti-3", "Ti-4", "Ti-5", "Ti-6"],
96 "Tu": ["Tu-1", "Tu-2", "Tu-3", "Tu-4", "Tu-5", "Tu-6"]
98 flows_settings = FlowsSettings(path=self.perf_path)
99 flow_producer = FlowsProducer(self.conn, flows_settings)
101 for scenario, cases in tests.items():
105 config_file = os.path.join(self.base_path, scenario + '.json')
107 LOG.info(config_file)
109 env = Builder(self.conn, config_file)
114 flow_producer.create(scenario, case)
115 LOG.info(flows_settings.settings)
118 if __name__ == "__main__":
119 setup_logging(level=logging.INFO, log_file="/var/log/vstf/vstf-unit-test.log", clevel=logging.INFO)