1 # Copyright (c) 2016-2017 Intel Corporation
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """ Add generic L3 forwarder implementation based on sample_vnf.py"""
16 from __future__ import absolute_import
22 from netaddr import IPRange
24 from six.moves import zip
26 from yardstick.benchmark.contexts.base import Context
27 from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF, \
30 LOG = logging.getLogger(__name__)
33 class RouterVNF(SampleVNF):
37 def __init__(self, name, vnfd, task_id, setup_env_helper_type=None,
38 resource_helper_type=None):
39 if setup_env_helper_type is None:
40 setup_env_helper_type = DpdkVnfSetupEnvHelper
43 vnfd['mgmt-interface'].pop("pkey", "")
44 vnfd['mgmt-interface']['password'] = 'password'
46 super(RouterVNF, self).__init__(
47 name, vnfd, task_id, setup_env_helper_type, resource_helper_type)
49 def instantiate(self, scenario_cfg, context_cfg):
50 self.scenario_helper.scenario_cfg = scenario_cfg
51 self.context_cfg = context_cfg
52 self.configure_routes(self.name, scenario_cfg, context_cfg)
54 def wait_for_instantiate(self):
55 time.sleep(self.WAIT_TIME)
58 # we can't share ssh paramiko objects to force new connection
59 self.ssh_helper.drop_connection()
63 self.resource_helper.stop_collect()
65 def scale(self, flavor=""):
69 def row_with_header(header, data):
70 """Returns dictionary per row of values for 'ip show stats'.
73 header(str): output header
74 data(str): output data
77 dict: dictionary per row of values for 'ip show stats'
80 prefix, columns = header.strip().split(':')
81 column_names = ["{0}:{1}".format(prefix, h) for h in columns.split()]
82 return dict(list(zip(column_names, data.strip().split())))
84 RX_TX_RE = re.compile(r"\s+[RT]X[^:]*:")
87 def get_stats(cls, stdout):
88 """Returns list of IP statistics.
91 stdout(str): command output
94 dict: list of IP statistics
97 input_lines = stdout.splitlines()
99 for n, row in enumerate(input_lines):
100 if cls.RX_TX_RE.match(row):
101 # use pairs of rows, header and data
102 table.update(cls.row_with_header(*input_lines[n:n + 2]))
105 def collect_kpi(self):
106 # Implement stats collection
107 ip_link_stats = '/sbin/ip -s link'
108 stdout = self.ssh_helper.execute(ip_link_stats)[1]
109 link_stats = self.get_stats(stdout)
110 # get RX/TX from link_stats and assign to results
111 physical_node = Context.get_physical_node_from_server(
112 self.scenario_helper.nodes[self.name])
115 "physical_node": physical_node,
117 "packets_dropped": 0,
119 "link_stats": link_stats
122 LOG.debug("%s collect KPIs %s", "RouterVNF", result)
127 def configure_routes(self, node_name, scenario_cfg, context_cfg):
128 # Configure IP of dataplane ports and add static ARP entries
130 # This function should be modified to configure a 3rd party/commercial VNF.
131 # The current implementation works on a Linux based VNF with "ip" command.
134 # {'src_ip': ['152.16.100.26-152.16.100.27'],
135 # 'dst_ip': ['152.16.40.26-152.16.40.27'], 'count': 2}
140 ip_cmd_replace = '/sbin/ip addr replace %s/24 dev %s'
141 ip_cmd_up = '/sbin/ip link set %s up'
142 ip_cmd_flush = '/sbin/ip address flush dev %s'
144 # Get VNF IPs from test case file
145 for value in context_cfg['nodes'][node_name]['interfaces'].values():
146 dst_macs.append(value['dst_mac'])
148 # Get the network interface name using local_mac
149 iname = self.ssh_helper.execute("/sbin/ip a |grep -B 1 %s | head -n 1"
150 % (value['local_mac']))
151 iname = iname[1].split(":")[1].strip()
154 self.ssh_helper.execute(ip_cmd_flush % iname)
156 # Get the local_ip from context_cfg and assign to the data ports
157 self.ssh_helper.execute(ip_cmd_replace % (str(value['local_ip']),
160 self.ssh_helper.execute(ip_cmd_up % iname)
161 time.sleep(self.INTERFACE_WAIT)
163 # Configure static ARP entries for each IP
164 # using SSH or REST API calls
166 src_ips = scenario_cfg['options']['flow']['src_ip']
167 dst_ips = scenario_cfg['options']['flow']['dst_ip']
169 raise KeyError("Missing flow definition in scenario section" +
170 " of the task definition file")
174 for src, dst in zip(src_ips, dst_ips):
175 range1 = itertools.cycle(iter(src.split('-')))
176 range2 = itertools.cycle(iter(dst.split('-')))
178 range1 = IPRange(next(range1), next(range1))
179 range2 = IPRange(next(range2), next(range2))
180 ip_ranges.append(range1)
181 ip_ranges.append(range2)
183 ip_cmd = '/sbin/ip neigh add %s lladdr %s dev %s nud perm'
184 for idx, iface in enumerate(ifaces):
185 for addr in ip_ranges[idx]:
186 self.ssh_helper.execute(ip_cmd % (addr, dst_macs[idx], iface))
188 arp_status = self.ssh_helper.execute("arp -a -n")
189 LOG.debug('arp %s', arp_status)