Merge "Add test result dispatcher"
authorHou Jingwen <houjingwen@huawei.com>
Tue, 15 Sep 2015 06:39:37 +0000 (06:39 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Tue, 15 Sep 2015 06:39:37 +0000 (06:39 +0000)
33 files changed:
.coveragerc [new file with mode: 0644]
.testr.conf [new file with mode: 0644]
ci/run_tasks.sh [new file with mode: 0644]
docs/Yardstick_task_templates.rst [new file with mode: 0755]
samples/fio-template.yaml [new file with mode: 0644]
samples/fio.yaml
samples/iperf3.yaml
samples/lmbench.yaml
samples/perf.yaml
samples/ping-ext-ip.yaml
samples/ping-ext-stimuli.yaml
samples/ping-iteration.yaml
samples/ping-multiple-context.yaml
samples/ping-option-list.yaml
samples/ping-parallel.yaml
samples/ping-serial.yaml
samples/ping-template.yaml [new file with mode: 0644]
samples/ping.yaml
samples/pktgen.yaml
setup.py [changed mode: 0644->0755]
tests/__init__.py [new file with mode: 0644]
tests/unit/benchmark/context/__init__.py [new file with mode: 0644]
tests/unit/benchmark/context/test_model.py [new file with mode: 0644]
tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json [new file with mode: 0644]
tests/unit/benchmark/scenarios/networking/test_iperf3.py [new file with mode: 0644]
tests/unit/benchmark/scenarios/storage/__init__.py [new file with mode: 0644]
tests/unit/benchmark/scenarios/storage/test_fio.py [new file with mode: 0644]
tests/unit/common/test_utils.py [new file with mode: 0644]
tools/ubuntu-server-cloudimg-modify.sh
tools/yardstick-img-modify
yardstick/benchmark/scenarios/storage/fio.py
yardstick/cmd/commands/task.py [changed mode: 0644->0755]
yardstick/common/task_template.py [new file with mode: 0755]

diff --git a/.coveragerc b/.coveragerc
new file mode 100644 (file)
index 0000000..07ca209
--- /dev/null
@@ -0,0 +1,7 @@
+[run]
+branch = True
+source = yardstick
+
+[report]
+ignore_errors = True
+precision = 3
diff --git a/.testr.conf b/.testr.conf
new file mode 100644 (file)
index 0000000..1262335
--- /dev/null
@@ -0,0 +1,4 @@
+[DEFAULT]
+test_command=OS_STDOUT_CAPTURE=1 OS_STDERR_CAPTURE=1 ${PYTHON:-python} -m subunit.run discover -t ./ ./tests/unit $LISTOPT $IDOPTION
+test_id_option=--load-list $IDFILE
+test_list_option=--list
diff --git a/ci/run_tasks.sh b/ci/run_tasks.sh
new file mode 100644 (file)
index 0000000..27ccb3a
--- /dev/null
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Run yardstick tasks back-to-back
+# This script is called from yardstick-{pod} job and decides which tasks
+# are executed as part of that job.
+
+
+# verify that virtual environment is activated
+# assumes the virtual environment has been created as described in README.rst
+if [[ ! $(which python | grep venv) ]]; then
+    echo "Unable to activate venv...Exiting"
+    exit 1
+fi
+
+EXIT_CODE=0
+
+# Define tasks to be run
+TASK_FILE_NAMES[0]='samples/ping.yaml'
+TASK_FILE_NAMES[1]='samples/iperf3.yaml'
+TASK_FILE_NAMES[2]='samples/pktgen.yaml'
+TASK_FILE_NAMES[3]='samples/fio.yaml'
+
+# Execute tasks
+for TASK_FILE in ${TASK_FILE_NAMES[@]}
+do
+    echo "Executing task from file: $TASK_FILE"
+    yardstick -d task start $TASK_FILE
+
+    if [ $? -ne 0 ]; then
+        EXIT_CODE=1
+    fi
+done
+
+exit $EXIT_CODE
\ No newline at end of file
diff --git a/docs/Yardstick_task_templates.rst b/docs/Yardstick_task_templates.rst
new file mode 100755 (executable)
index 0000000..538937f
--- /dev/null
@@ -0,0 +1,141 @@
+Task Template Syntax
+====================
+
+Basic template syntax
+---------------------
+A nice feature of the input task format used in Yardstick is that it supports the template syntax based on Jinja2.
+This turns out to be extremely useful when, say, you have a fixed structure of your task but you want to
+parameterize this task in some way.
+For example, imagine your input task file (task.yaml) runs a set of Ping scenarios:
+
+::
+
+  # Sample benchmark task config file
+  # measure network latency using ping
+  schema: "yardstick:task:0.1"
+
+  scenarios:
+  -
+    type: Ping
+    options:
+      packetsize: 200
+    host: athena.demo
+    target: ares.demo
+
+    runner:
+      type: Duration
+      duration: 60
+      interval: 1
+
+    sla:
+      max_rtt: 10
+      action: monitor
+
+  context:
+      ...
+
+Let's say you want to run the same set of scenarios with the same runner/context/sla,
+but you want to try another packetsize to compare the performance.
+The most elegant solution is then to turn the packetsize name into a template variable:
+
+::
+
+  # Sample benchmark task config file
+  # measure network latency using ping
+
+  schema: "yardstick:task:0.1"
+  scenarios:
+  -
+    type: Ping
+    options:
+      packetsize: {{packetsize}}
+    host: athena.demo
+    target: ares.demo
+
+    runner:
+      type: Duration
+      duration: 60
+      interval: 1
+
+    sla:
+      max_rtt: 10
+      action: monitor
+
+  context:
+      ...
+
+and then pass the argument value for {{packetsize}} when starting a task with this configuration file.
+Yardstick provides you with different ways to do that:
+
+1.Pass the argument values directly in the command-line interface (with either a JSON or YAML dictionary):
+
+::
+
+ yardstick task start samples/ping-template.yaml --task-args '{"packetsize": "200"}'
+
+2.Refer to a file that specifies the argument values (JSON/YAML):
+
+::
+
+ yardstick task start samples/ping-template.yaml --task-args-file args.yaml
+
+Using the default values
+------------------------
+Note that the Jinja2 template syntax allows you to set the default values for your parameters.
+With default values set, your task file will work even if you don't parameterize it explicitly while starting a task.
+The default values should be set using the {% set ... %} clause (task.yaml).For example:
+
+::
+
+  # Sample benchmark task config file
+  # measure network latency using ping
+  schema: "yardstick:task:0.1"
+  {% set packetsize = packetsize or "100" %}
+  scenarios:
+  -
+    type: Ping
+    options:
+    packetsize: {{packetsize}}
+    host: athena.demo
+    target: ares.demo
+
+    runner:
+      type: Duration
+      duration: 60
+      interval: 1
+    ...
+
+If you don't pass the value for {{packetsize}} while starting a task, the default one will be used.
+
+Advanced templates
+------------------
+Yardstick makes it possible to use all the power of Jinja2 template syntax, including the mechanism of built-in functions.
+As an example, let us make up a task file that will do a block storage performance test.
+The input task file (fio-template.yaml) below uses the Jinja2 for-endfor construct to accomplish that:
+
+::
+
+  #Test block sizes of 4KB, 8KB, 64KB, 1MB
+  #Test 5 workloads: read, write, randwrite, randread, rw
+  schema: "yardstick:task:0.1"
+
+   scenarios:
+  {% for bs in ['4k', '8k', '64k', '1024k' ] %}
+    {% for rw in ['read', 'write', 'randwrite', 'randread', 'rw' ] %}
+  -
+    type: Fio
+    options:
+      filename: /home/ec2-user/data.raw
+      bs: {{bs}}
+      rw: {{rw}}
+      ramp_time: 10
+    host: fio.demo
+    runner:
+      type: Duration
+      duration: 60
+      interval: 60
+
+    {% endfor %}
+  {% endfor %}
+  context
+      ...
diff --git a/samples/fio-template.yaml b/samples/fio-template.yaml
new file mode 100644 (file)
index 0000000..395e4c4
--- /dev/null
@@ -0,0 +1,39 @@
+# Sample benchmark task config file
+# measure storage performance using fio
+# Jinja2 Syntax is supported
+# using built-in functions ( Jinja2 for-endfor construct ) to test complex tasks
+# Test block sizes of 4KB, 8KB, 64KB, 1MB
+# Test 5 workloads: 4 corners and 1 mixed :read, write, randwrite, randread, rw
+schema: "yardstick:task:0.1"
+
+scenarios:
+{% for rw in ['read', 'write', 'randwrite', 'randread', 'rw'] %}
+  {% for bs in ['4k', '8k', '64k', '1024k'] %}
+-
+  type: Fio
+  options:
+    filename: /home/ec2-user/data.raw
+    bs: {{bs}}
+    rw: {{rw}}
+    ramp_time: 10
+    duration: 20
+  host: fio.demo
+  runner:
+    type: Iteration
+    iterations: 2
+    interval: 1
+  {% endfor %}
+{% endfor %}
+
+context:
+  name: demo
+  image: yardstick-trusty-server
+  flavor: yardstick-flavor
+  user: ec2-user
+  servers:
+    fio:
+      floating_ip: true
+  networks:
+    test:
+      cidr: "10.0.1.0/24"
+
index f70912a..44444c7 100644 (file)
@@ -5,9 +5,10 @@
 # For this sample just like running the command below on the test vm and 
 # getting benchmark info back to the yardstick.
 #
-# sudo fio -filename=/home/ec2-user/data.raw -bs=4k -rw=write -ramp_time=10 \
-#          -runtime=60 -name=yardstick-fio -ioengine=libaio -direct=1 \
-#          -group_reporting -numjobs=1 -time_based --output=yardstick-fio.log
+# sudo fio -filename=/home/ec2-user/data.raw -bs=4k -ipdepth=1 -rw=write \
+#          -ramp_time=10 -runtime=60 -name=yardstick-fio -ioengine=libaio \
+#          -direct=1 -group_reporting -numjobs=1 -time_based \
+#          --output=yardstick-fio.log
 #
 # When the above fio command done, the yardstick-fio.log file will contain 
 # information like below and the benchmark script will take iops, throughput 
@@ -52,13 +53,14 @@ scenarios:
   options:
     filename: /home/ec2-user/data.raw
     bs: 4k
+    iodepth: 1
     rw: write
     ramp_time: 10
   host: fio.demo
   runner:
     type: Duration
     duration: 60
-    interval: 60
+    interval: 1
 
 context:
   name: demo
@@ -71,4 +73,3 @@ context:
   networks:
     test:
       cidr: "10.0.1.0/24"
-      external_network: "net04_ext"
index b3a7a9f..8de4467 100644 (file)
@@ -40,5 +40,4 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
index c7526c0..256d8c6 100644 (file)
@@ -41,6 +41,5 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
 
index e7ba2d0..b6ce2a2 100644 (file)
@@ -40,4 +40,3 @@ context:
   networks:
     test:
       cidr: "10.0.1.0/24"
-      external_network: "net04_ext"
index f2923f6..d36c295 100644 (file)
@@ -32,5 +32,4 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
index cfe7915..451f010 100644 (file)
@@ -45,5 +45,4 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
index 810530c..a5e9094 100755 (executable)
@@ -41,5 +41,4 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
index c529fcc..1c27e1b 100644 (file)
@@ -36,7 +36,6 @@ contexts:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 -
   name: demo2
   image: cirros-0.3.3
@@ -52,5 +51,4 @@ contexts:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
index 1fa95e6..30d133e 100644 (file)
@@ -39,6 +39,5 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
 
index f3f6989..00d2613 100644 (file)
@@ -53,5 +53,4 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
index ff281ee..37ea715 100644 (file)
@@ -45,5 +45,4 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
diff --git a/samples/ping-template.yaml b/samples/ping-template.yaml
new file mode 100644 (file)
index 0000000..cde6ddd
--- /dev/null
@@ -0,0 +1,48 @@
+# Sample benchmark task config file
+# measure network latency using ping
+# Jinja2 Syntax is supported
+# parameterize this task, {{packetsize}} is passed to the scenario as an argument
+# If you don't pass the value for {{packetsize}} while starting a task,
+# the default one will be used.
+
+
+schema: "yardstick:task:0.1"
+{% set packetsize = packetsize or "100" %}
+scenarios:
+-
+  type: Ping
+  options:
+    packetsize: {{packetsize}}
+  host: athena.demo
+  target: ares.demo
+
+  runner:
+    type: Duration
+    duration: 60
+    interval: 1
+
+  sla:
+    max_rtt: 10
+    action: monitor
+
+context:
+  name: demo
+  image: cirros-0.3.3
+  flavor: m1.tiny
+  user: cirros
+
+  placement_groups:
+    pgrp1:
+      policy: "availability"
+
+  servers:
+    athena:
+      floating_ip: true
+      placement: "pgrp1"
+    ares:
+      placement: "pgrp1"
+
+  networks:
+    test:
+      cidr: '10.0.1.0/24'
+
index f8b6a31..845d10d 100644 (file)
@@ -41,5 +41,4 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
index 6097308..11d6279 100644 (file)
@@ -47,6 +47,5 @@ context:
   networks:
     test:
       cidr: '10.0.1.0/24'
-      external_network: "net04_ext"
 
 
old mode 100644 (file)
new mode 100755 (executable)
index f73094a..f171aaf
--- a/setup.py
+++ b/setup.py
@@ -19,6 +19,7 @@ setup(
     url="https://www.opnfv.org",
     install_requires=["backport_ipaddress",  # remove with python3
                       "flake8",
+                      "Jinja2>=2.6",
                       "PyYAML>=3.10",
                       "pbr<2.0,>=1.3",
                       "python-glanceclient>=0.12.0",
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/unit/benchmark/context/__init__.py b/tests/unit/benchmark/context/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/unit/benchmark/context/test_model.py b/tests/unit/benchmark/context/test_model.py
new file mode 100644 (file)
index 0000000..cf0a605
--- /dev/null
@@ -0,0 +1,345 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.context.model
+
+import mock
+import unittest
+
+from yardstick.benchmark.context import model
+
+
+class ObjectTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.mock_context = mock.Mock()
+
+    def test_construct(self):
+
+        test_object = model.Object('foo', self.mock_context)
+
+        self.assertEqual(test_object.name, 'foo')
+        self.assertEqual(test_object._context, self.mock_context)
+        self.assertIsNone(test_object.stack_name)
+        self.assertIsNone(test_object.stack_id)
+
+    def test_dn(self):
+
+        self.mock_context.name = 'bar'
+        test_object = model.Object('foo', self.mock_context)
+
+        self.assertEqual('foo.bar', test_object.dn)
+
+
+class PlacementGroupTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.mock_context = mock.Mock()
+        self.mock_context.name = 'bar'
+
+    def tearDown(self):
+        model.PlacementGroup.map = {}
+
+    def test_sucessful_construct(self):
+
+        test_pg = model.PlacementGroup('foo', self.mock_context, 'affinity')
+
+        self.assertEqual(test_pg.name, 'foo')
+        self.assertEqual(test_pg.members, set())
+        self.assertEqual(test_pg.stack_name, 'bar-foo')
+        self.assertEqual(test_pg.policy, 'affinity')
+
+        test_map = {'foo': test_pg}
+        self.assertEqual(model.PlacementGroup.map, test_map)
+
+    def test_wrong_policy_in_construct(self):
+
+        self.assertRaises(ValueError, model.PlacementGroup, 'foo',
+                          self.mock_context, 'baz')
+
+    def test_add_member(self):
+
+        test_pg = model.PlacementGroup('foo', self.mock_context, 'affinity')
+        test_pg.add_member('foo')
+
+        self.assertEqual(test_pg.members, set(['foo']))
+
+    def test_get_name_successful(self):
+
+        model.PlacementGroup.map = {'foo': True}
+        self.assertTrue(model.PlacementGroup.get('foo'))
+
+    def test_get_name_unsuccessful(self):
+
+        self.assertIsNone(model.PlacementGroup.get('foo'))
+
+
+class RouterTestCase(unittest.TestCase):
+
+    def test_construct(self):
+
+        mock_context = mock.Mock()
+        mock_context.name = 'baz'
+        test_router = model.Router('foo', 'bar', mock_context, 'qux')
+
+        self.assertEqual(test_router.stack_name, 'baz-bar-foo')
+        self.assertEqual(test_router.stack_if_name, 'baz-bar-foo-if0')
+        self.assertEqual(test_router.external_gateway_info, 'qux')
+
+
+class NetworkTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.mock_context = mock.Mock()
+        self.mock_context.name = 'bar'
+
+    def tearDown(self):
+        model.Network.list = []
+
+    def test_construct_no_external_network(self):
+
+        attrs = {'cidr': '10.0.0.0/24'}
+        test_network = model.Network('foo', self.mock_context, attrs)
+
+        self.assertEqual(test_network.stack_name, 'bar-foo')
+        self.assertEqual(test_network.subnet_stack_name, 'bar-foo-subnet')
+        self.assertEqual(test_network.subnet_cidr, attrs['cidr'])
+        self.assertIsNone(test_network.router)
+        self.assertIn(test_network, model.Network.list)
+
+    def test_construct_has_external_network(self):
+
+        attrs = {'external_network': 'ext_net'}
+        test_network = model.Network('foo', self.mock_context, attrs)
+        exp_router = model.Router('router', 'foo', self.mock_context, 'ext_net')
+
+        self.assertEqual(test_network.router.stack_name, exp_router.stack_name)
+        self.assertEqual(test_network.router.stack_if_name,
+                         exp_router.stack_if_name)
+        self.assertEqual(test_network.router.external_gateway_info,
+                         exp_router.external_gateway_info)
+
+    def test_has_route_to(self):
+
+        attrs = {'external_network': 'ext_net'}
+        test_network = model.Network('foo', self.mock_context, attrs)
+
+        self.assertTrue(test_network.has_route_to('ext_net'))
+
+    def test_has_no_route_to(self):
+
+        attrs = {}
+        test_network = model.Network('foo', self.mock_context, attrs)
+
+        self.assertFalse(test_network.has_route_to('ext_net'))
+
+    @mock.patch('yardstick.benchmark.context.model.Network.has_route_to')
+    def test_find_by_route_to(self, mock_has_route_to):
+
+        mock_network = mock.Mock()
+        model.Network.list = [mock_network]
+        mock_has_route_to.return_value = True
+
+        self.assertIs(mock_network, model.Network.find_by_route_to('foo'))
+
+    def test_find_external_network(self):
+
+        mock_network = mock.Mock()
+        mock_network.router = mock.Mock()
+        mock_network.router.external_gateway_info = 'ext_net'
+        model.Network.list = [mock_network]
+
+        self.assertEqual(model.Network.find_external_network(), 'ext_net')
+
+
+class ServerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.mock_context = mock.Mock()
+        self.mock_context.name = 'bar'
+        self.mock_context.keypair_name = 'some-keys'
+        self.mock_context.secgroup_name = 'some-secgroup'
+
+    def test_construct_defaults(self):
+
+        attrs = None
+        test_server = model.Server('foo', self.mock_context, attrs)
+
+        self.assertEqual(test_server.stack_name, 'foo.bar')
+        self.assertEqual(test_server.keypair_name, 'some-keys')
+        self.assertEqual(test_server.secgroup_name, 'some-secgroup')
+        self.assertEqual(test_server.placement_groups, [])
+        self.assertEqual(test_server.instances, 1)
+        self.assertIsNone(test_server.floating_ip)
+        self.assertIsNone(test_server._image)
+        self.assertIsNone(test_server._flavor)
+        self.assertIn(test_server, model.Server.list)
+
+    @mock.patch('yardstick.benchmark.context.model.PlacementGroup')
+    def test_construct_get_wrong_placement_group(self, mock_pg):
+
+        attrs = {'placement': 'baz'}
+        mock_pg.get.return_value = None
+
+        self.assertRaises(ValueError, model.Server, 'foo',
+                          self.mock_context, attrs)
+
+    @mock.patch('yardstick.benchmark.context.model.HeatTemplate')
+    def test__add_instance(self, mock_template):
+
+        attrs = {'image': 'some-image', 'flavor': 'some-flavor'}
+        test_server = model.Server('foo', self.mock_context, attrs)
+
+        mock_network = mock.Mock()
+        mock_network.name = 'some-network'
+        mock_network.stack_name = 'some-network-stack'
+        mock_network.subnet_stack_name = 'some-network-stack-subnet'
+
+        test_server._add_instance(mock_template, 'some-server',
+                                  [mock_network], 'hints')
+
+        mock_template.add_port.assert_called_with(
+            'some-server-some-network-port',
+            mock_network.stack_name,
+            mock_network.subnet_stack_name,
+            sec_group_id=self.mock_context.secgroup_name)
+
+        mock_template.add_server.assert_called_with(
+            'some-server', 'some-image', 'some-flavor',
+            ports=['some-server-some-network-port'],
+            key_name=self.mock_context.keypair_name,
+            scheduler_hints='hints')
+
+
+class ContextTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.test_context = model.Context()
+        self.mock_context = mock.Mock()
+
+    def tearDown(self):
+        model.Context.list = []
+
+    def test_construct(self):
+
+        self.assertIsNone(self.test_context.name)
+        self.assertIsNone(self.test_context.stack)
+        self.assertEqual(self.test_context.networks, [])
+        self.assertEqual(self.test_context.servers, [])
+        self.assertEqual(self.test_context.placement_groups, [])
+        self.assertIsNone(self.test_context.keypair_name)
+        self.assertIsNone(self.test_context.secgroup_name)
+        self.assertEqual(self.test_context._server_map, {})
+        self.assertIsNone(self.test_context._image)
+        self.assertIsNone(self.test_context._flavor)
+        self.assertIsNone(self.test_context._user)
+        self.assertIsNone(self.test_context.template_file)
+        self.assertIsNone(self.test_context.heat_parameters)
+        self.assertIn(self.test_context, model.Context.list)
+
+    @mock.patch('yardstick.benchmark.context.model.PlacementGroup')
+    @mock.patch('yardstick.benchmark.context.model.Network')
+    @mock.patch('yardstick.benchmark.context.model.Server')
+    def test_init(self, mock_server, mock_network, mock_pg):
+
+        pgs = {'pgrp1': {'policy': 'availability'}}
+        networks = {'bar': {'cidr': '10.0.1.0/24'}}
+        servers = {'baz': {'floating_ip': True, 'placement': 'pgrp1'}}
+        attrs = {'name': 'foo',
+                 'placement_groups': pgs,
+                 'networks': networks,
+                 'servers': servers}
+
+        self.test_context.init(attrs)
+
+        self.assertEqual(self.test_context.keypair_name, "foo-key")
+        self.assertEqual(self.test_context.secgroup_name, "foo-secgroup")
+
+        mock_pg.assert_called_with('pgrp1', self.test_context,
+                                   pgs['pgrp1']['policy'])
+        self.assertTrue(len(self.test_context.placement_groups) == 1)
+
+        mock_network.assert_called_with(
+            'bar', self.test_context, networks['bar'])
+        self.assertTrue(len(self.test_context.networks) == 1)
+
+        mock_server.assert_called_with('baz', self.test_context, servers['baz'])
+        self.assertTrue(len(self.test_context.servers) == 1)
+
+    @mock.patch('yardstick.benchmark.context.model.HeatTemplate')
+    def test__add_resources_to_template_no_servers(self, mock_template):
+
+        self.test_context.keypair_name = "foo-key"
+        self.test_context.secgroup_name = "foo-secgroup"
+
+        self.test_context._add_resources_to_template(mock_template)
+        mock_template.add_keypair.assert_called_with("foo-key")
+        mock_template.add_security_group.assert_called_with("foo-secgroup")
+
+    @mock.patch('yardstick.benchmark.context.model.HeatTemplate')
+    def test_deploy(self, mock_template):
+
+        self.test_context.name = 'foo'
+        self.test_context.template_file = '/bar/baz/some-heat-file'
+        self.test_context.heat_parameters = {'image': 'cirros'}
+        self.test_context.deploy()
+
+        mock_template.assert_called_with(self.test_context.name,
+                                         self.test_context.template_file,
+                                         self.test_context.heat_parameters)
+        self.assertIsNotNone(self.test_context.stack)
+
+    @mock.patch('yardstick.benchmark.context.model.HeatTemplate')
+    def test_undeploy(self, mock_template):
+
+        self.test_context.stack = mock_template
+        self.test_context.undeploy()
+
+        self.assertTrue(mock_template.delete.called)
+
+    def test_get_server_by_name(self):
+
+        self.mock_context._server_map = {'foo.bar': True}
+        model.Context.list = [self.mock_context]
+
+        self.assertTrue(model.Context.get_server_by_name('foo.bar'))
+
+    def test_get_server_by_wrong_name(self):
+
+        self.assertRaises(ValueError, model.Context.get_server_by_name, 'foo')
+
+    def test_get_context_by_name(self):
+
+        self.mock_context.name = 'foo'
+        model.Context.list = [self.mock_context]
+
+        self.assertIs(model.Context.get_context_by_name('foo'),
+                      self.mock_context)
+
+    def test_get_unknown_context_by_name(self):
+
+        model.Context.list = []
+        self.assertIsNone(model.Context.get_context_by_name('foo'))
+
+    @mock.patch('yardstick.benchmark.context.model.Server')
+    def test_get_server(self, mock_server):
+
+        self.mock_context.name = 'bar'
+        self.mock_context.stack.outputs = {'public_ip': '127.0.0.1',
+                                           'private_ip': '10.0.0.1'}
+        model.Context.list = [self.mock_context]
+        attr_name = {'name': 'foo.bar',
+                     'public_ip_attr': 'public_ip',
+                     'private_ip_attr': 'private_ip'}
+        result = model.Context.get_server(attr_name)
+
+        self.assertEqual(result.public_ip, '127.0.0.1')
+        self.assertEqual(result.private_ip, '10.0.0.1')
diff --git a/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json b/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json
new file mode 100644 (file)
index 0000000..b56009b
--- /dev/null
@@ -0,0 +1 @@
+{"start": {"connecting_to": {"host": "172.16.0.252", "port": 5201}, "timestamp": {"timesecs": 1436254758, "time": "Tue, 07 Jul 2015 07:39:18 GMT"}, "test_start": {"protocol": "TCP", "num_streams": 1, "omit": 0, "bytes": 0, "blksize": 131072, "duration": 10, "blocks": 0, "reverse": 0}, "system_info": "Linux client 3.13.0-55-generic #94-Ubuntu SMP Thu Jun 18 00:27:10 UTC 2015 x86_64 x86_64 x86_64 GNU/Linux\n", "version": "iperf 3.0.7", "connected": [{"local_host": "10.0.1.2", "local_port": 37633, "remote_host": "172.16.0.252", "socket": 4, "remote_port": 5201}], "cookie": "client.1436254758.606879.1fb328dc230", "tcp_mss_default": 1448}, "intervals": [{"sum": {"end": 1.00068, "seconds": 1.00068, "bytes": 16996624, "bits_per_second": 135881000.0, "start": 0, "retransmits": 0, "omitted": false}, "streams": [{"end": 1.00068, "socket": 4, "seconds": 1.00068, "bytes": 16996624, "bits_per_second": 135881000.0, "start": 0, "retransmits": 0, "omitted": false, "snd_cwnd": 451776}]}, {"sum": {"end": 2.00048, "seconds": 0.999804, "bytes": 20010192, "bits_per_second": 160113000.0, "start": 1.00068, "retransmits": 0, "omitted": false}, "streams": [{"end": 2.00048, "socket": 4, "seconds": 0.999804, "bytes": 20010192, "bits_per_second": 160113000.0, "start": 1.00068, "retransmits": 0, "omitted": false, "snd_cwnd": 713864}]}, {"sum": {"end": 3.00083, "seconds": 1.00035, "bytes": 18330464, "bits_per_second": 146592000.0, "start": 2.00048, "retransmits": 0, "omitted": false}, "streams": [{"end": 3.00083, "socket": 4, "seconds": 1.00035, "bytes": 18330464, "bits_per_second": 146592000.0, "start": 2.00048, "retransmits": 0, "omitted": false, "snd_cwnd": 768888}]}, {"sum": {"end": 4.00707, "seconds": 1.00624, "bytes": 19658376, "bits_per_second": 156292000.0, "start": 3.00083, "retransmits": 0, "omitted": false}, "streams": [{"end": 4.00707, "socket": 4, "seconds": 1.00624, "bytes": 19658376, "bits_per_second": 156292000.0, "start": 3.00083, "retransmits": 0, "omitted": false, "snd_cwnd": 812328}]}, {"sum": {"end": 5.00104, "seconds": 0.993972, "bytes": 15709072, "bits_per_second": 126435000.0, "start": 4.00707, "retransmits": 0, "omitted": false}, "streams": [{"end": 5.00104, "socket": 4, "seconds": 0.993972, "bytes": 15709072, "bits_per_second": 126435000.0, "start": 4.00707, "retransmits": 0, "omitted": false, "snd_cwnd": 849976}]}, {"sum": {"end": 6.00049, "seconds": 0.999443, "bytes": 19616288, "bits_per_second": 157018000.0, "start": 5.00104, "retransmits": 53, "omitted": false}, "streams": [{"end": 6.00049, "socket": 4, "seconds": 0.999443, "bytes": 19616288, "bits_per_second": 157018000.0, "start": 5.00104, "retransmits": 53, "omitted": false, "snd_cwnd": 641464}]}, {"sum": {"end": 7.00085, "seconds": 1.00036, "bytes": 22250480, "bits_per_second": 177939000.0, "start": 6.00049, "retransmits": 0, "omitted": false}, "streams": [{"end": 7.00085, "socket": 4, "seconds": 1.00036, "bytes": 22250480, "bits_per_second": 177939000.0, "start": 6.00049, "retransmits": 0, "omitted": false, "snd_cwnd": 706624}]}, {"sum": {"end": 8.00476, "seconds": 1.00391, "bytes": 22282240, "bits_per_second": 177564000.0, "start": 7.00085, "retransmits": 0, "omitted": false}, "streams": [{"end": 8.00476, "socket": 4, "seconds": 1.00391, "bytes": 22282240, "bits_per_second": 177564000.0, "start": 7.00085, "retransmits": 0, "omitted": false, "snd_cwnd": 761648}]}, {"sum": {"end": 9.0016, "seconds": 0.996847, "bytes": 19657680, "bits_per_second": 157759000.0, "start": 8.00476, "retransmits": 28, "omitted": false}, "streams": [{"end": 9.0016, "socket": 4, "seconds": 0.996847, "bytes": 19657680, "bits_per_second": 157759000.0, "start": 8.00476, "retransmits": 28, "omitted": false, "snd_cwnd": 570512}]}, {"sum": {"end": 10.0112, "seconds": 1.00955, "bytes": 20932520, "bits_per_second": 165876000.0, "start": 9.0016, "retransmits": 0, "omitted": false}, "streams": [{"end": 10.0112, "socket": 4, "seconds": 1.00955, "bytes": 20932520, "bits_per_second": 165876000.0, "start": 9.0016, "retransmits": 0, "omitted": false, "snd_cwnd": 615400}]}], "end": {"sum_received": {"seconds": 10.0112, "start": 0, "end": 10.0112, "bytes": 193366712, "bits_per_second": 154521000.0}, "streams": [{"sender": {"end": 10.0112, "socket": 4, "seconds": 10.0112, "bytes": 195443936, "bits_per_second": 156181000.0, "start": 0, "retransmits": 81}, "receiver": {"end": 10.0112, "socket": 4, "seconds": 10.0112, "bytes": 193366712, "bits_per_second": 154521000.0, "start": 0}}], "sum_sent": {"end": 10.0112, "seconds": 10.0112, "bytes": 195443936, "bits_per_second": 156181000.0, "start": 0, "retransmits": 81}, "cpu_utilization_percent": {"remote_user": 1.10295, "remote_system": 40.0403, "host_user": 2.41785, "remote_total": 41.1438, "host_system": 5.09548, "host_total": 7.51411}}}
\ No newline at end of file
diff --git a/tests/unit/benchmark/scenarios/networking/test_iperf3.py b/tests/unit/benchmark/scenarios/networking/test_iperf3.py
new file mode 100644 (file)
index 0000000..239e46a
--- /dev/null
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
+
+import mock
+import unittest
+import os
+import json
+
+from yardstick.benchmark.scenarios.networking import iperf3
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.iperf3.ssh')
+class IperfTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.ctx = {
+            'host': '172.16.0.137',
+            'target': '172.16.0.138',
+            'user': 'cirros',
+            'key_filename': "mykey.key"
+        }
+
+    def test_iperf_successful_setup(self, mock_ssh):
+
+        p = iperf3.Iperf(self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+
+        p.setup()
+        self.assertIsNotNone(p.target)
+        self.assertIsNotNone(p.host)
+        mock_ssh.SSH().execute.assert_called_with("iperf3 -s -D")
+
+    def test_iperf_unsuccessful_setup(self, mock_ssh):
+
+        p = iperf3.Iperf(self.ctx)
+        mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+        self.assertRaises(RuntimeError, p.setup)
+
+    def test_iperf_successful_teardown(self, mock_ssh):
+
+        p = iperf3.Iperf(self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+        p.target = mock_ssh.SSH()
+
+        p.teardown()
+        self.assertTrue(mock_ssh.SSH().close.called)
+        mock_ssh.SSH().execute.assert_called_with("pkill iperf3")
+
+    def test_iperf_successful_no_sla(self, mock_ssh):
+
+        p = iperf3.Iperf(self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        options = {}
+        args = {'options': options}
+
+        sample_output = self._read_sample_output()
+        mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+        expected_result = json.loads(sample_output)
+        result = p.run(args)
+        self.assertEqual(result, expected_result)
+
+    def test_iperf_successful_sla(self, mock_ssh):
+
+        p = iperf3.Iperf(self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        options = {}
+        args = {
+            'options': options,
+            'sla': {'bytes_per_second': 15000000}
+        }
+
+        sample_output = self._read_sample_output()
+        mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+        expected_result = json.loads(sample_output)
+        result = p.run(args)
+        self.assertEqual(result, expected_result)
+
+    def test_iperf_unsuccessful_sla(self, mock_ssh):
+
+        p = iperf3.Iperf(self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        options = {}
+        args = {
+            'options': options,
+            'sla': {'bytes_per_second': 25000000}
+        }
+
+        sample_output = self._read_sample_output()
+        mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+        self.assertRaises(AssertionError, p.run, args)
+
+    def test_iperf_unsuccessful_script_error(self, mock_ssh):
+
+        p = iperf3.Iperf(self.ctx)
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        p.host = mock_ssh.SSH()
+
+        options = {}
+        args = {'options': options}
+
+        mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+        self.assertRaises(RuntimeError, p.run, args)
+
+    def _read_sample_output(self):
+        curr_path = os.path.dirname(os.path.abspath(__file__))
+        output = os.path.join(curr_path, 'iperf3_sample_output.json')
+        with open(output) as f:
+            sample_output = f.read()
+        return sample_output
+
+
+def main():
+    unittest.main()
+
+if __name__ == '__main__':
+    main()
diff --git a/tests/unit/benchmark/scenarios/storage/__init__.py b/tests/unit/benchmark/scenarios/storage/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/unit/benchmark/scenarios/storage/test_fio.py b/tests/unit/benchmark/scenarios/storage/test_fio.py
new file mode 100644 (file)
index 0000000..54f493e
--- /dev/null
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.storage.fio.Fio
+
+import mock
+import unittest
+import json
+
+from yardstick.benchmark.scenarios.storage import fio
+
+
+@mock.patch('yardstick.benchmark.scenarios.storage.fio.ssh')
+class FioTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.ctx = {
+            'host': '172.16.0.137',
+            'user': 'cirros',
+            'key_filename': "mykey.key"
+        }
+
+    def test_fio_successful_setup(self, mock_ssh):
+
+        p = fio.Fio(self.ctx)
+        options = {
+            'filename': "/home/ec2-user/data.raw",
+            'bs': "4k",
+            'rw': "write",
+            'ramp_time': 10
+        }
+        args = {'options': options}
+        p.setup()
+
+        mock_ssh.SSH().execute.return_value = (0, '', '')
+        self.assertIsNotNone(p.client)
+        self.assertEqual(p.setup_done, True)
+
+    def test_fio_successful_no_sla(self, mock_ssh):
+
+        p = fio.Fio(self.ctx)
+        options = {
+            'filename': "/home/ec2-user/data.raw",
+            'bs': "4k",
+            'rw': "write",
+            'ramp_time': 10
+        }
+        args = {'options': options}
+        p.client = mock_ssh.SSH()
+
+        sample_output = '{"read_bw": "N/A", "write_lat": "407.08usec", \
+            "read_iops": "N/A", "write_bw": "9507KB/s", \
+            "write_iops": "2376", "read_lat": "N/A"}'
+        mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+        result = p.run(args)
+        expected_result = json.loads(sample_output)
+        self.assertEqual(result, expected_result)
+
+    def test_fio_unsuccessful_script_error(self, mock_ssh):
+
+        p = fio.Fio(self.ctx)
+        options = {
+            'filename': "/home/ec2-user/data.raw",
+            'bs': "4k",
+            'rw': "write",
+            'ramp_time': 10
+        }
+        args = {'options': options}
+        p.client = mock_ssh.SSH()
+
+        mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+        self.assertRaises(RuntimeError, p.run, args)
+
+
+def main():
+    unittest.main()
+
+if __name__ == '__main__':
+    main()
diff --git a/tests/unit/common/test_utils.py b/tests/unit/common/test_utils.py
new file mode 100644 (file)
index 0000000..002d049
--- /dev/null
@@ -0,0 +1,90 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.common.utils
+
+import os
+import mock
+import unittest
+
+from yardstick.common import utils
+
+
+class IterSubclassesTestCase(unittest.TestCase):
+# Disclaimer: this class is a modified copy from
+# rally/tests/unit/common/plugin/test_discover.py
+# Copyright 2015: Mirantis Inc.
+    def test_itersubclasses(self):
+        class A(object):
+            pass
+
+        class B(A):
+            pass
+
+        class C(A):
+            pass
+
+        class D(C):
+            pass
+
+        self.assertEqual([B, C, D], list(utils.itersubclasses(A)))
+
+
+class TryAppendModuleTestCase(unittest.TestCase):
+
+    @mock.patch('yardstick.common.utils.importutils')
+    def test_try_append_module_not_in_modules(self, mock_importutils):
+
+        modules = {}
+        name = 'foo'
+        utils.try_append_module(name, modules)
+        mock_importutils.import_module.assert_called_with(name)
+
+    @mock.patch('yardstick.common.utils.importutils')
+    def test_try_append_module_already_in_modules(self, mock_importutils):
+
+        modules = {'foo'}
+        name = 'foo'
+        utils.try_append_module(name, modules)
+        self.assertFalse(mock_importutils.import_module.called)
+
+
+class ImportModulesFromPackageTestCase(unittest.TestCase):
+
+    @mock.patch('yardstick.common.utils.os.walk')
+    @mock.patch('yardstick.common.utils.try_append_module')
+    def test_import_modules_from_package_no_mod(self, mock_append, mock_walk):
+
+        sep = os.sep
+        mock_walk.return_value = ([
+            ('..' + sep + 'foo', ['bar'], ['__init__.py']),
+            ('..' + sep + 'foo' + sep + 'bar', [], ['baz.txt', 'qux.rst'])
+        ])
+
+        utils.import_modules_from_package('foo.bar')
+        self.assertFalse(mock_append.called)
+
+    @mock.patch('yardstick.common.utils.os.walk')
+    @mock.patch('yardstick.common.utils.importutils')
+    def test_import_modules_from_package(self, mock_importutils, mock_walk):
+
+        sep = os.sep
+        mock_walk.return_value = ([
+            ('foo' + sep + '..' + sep + 'bar', [], ['baz.py'])
+        ])
+
+        utils.import_modules_from_package('foo.bar')
+        mock_importutils.import_module.assert_called_with('bar.baz')
+
+
+def main():
+    unittest.main()
+
+if __name__ == '__main__':
+    main()
index 41d654a..ac43d28 100755 (executable)
@@ -25,6 +25,13 @@ fi
 # iperf3 only available for trusty in backports
 grep trusty /etc/apt/sources.list && \
     echo "deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted universe multiverse" >> /etc/apt/sources.list
+
+# Workaround for building on CentOS (apt-get is not working with http sources)
+sed -i 's/http/ftp/' /etc/apt/sources.list
+
+# Force apt to use ipv4 due to build problems on LF POD.
+echo 'Acquire::ForceIPv4 "true";' > /etc/apt/apt.conf.d/99force-ipv4
+
 apt-get update
 apt-get install -y \
     fio \
index eba8547..c900272 100755 (executable)
@@ -66,15 +66,22 @@ download() {
     cd -
 }
 
-# mount image using qemu-nbd
+# mount image
 setup() {
-    modprobe nbd max_part=16
-    qemu-nbd -c /dev/nbd0 $imgfile
-    partprobe /dev/nbd0
-
     mkdir -p $mountdir
-    mount /dev/nbd0p1 $mountdir
-
+    if [ -f /etc/centos-release ]; then
+        # CentOS, mount image using guestmount.
+        # (needs libguestfs-tools installed)
+        export LIBGUESTFS_BACKEND=direct
+        guestmount -a $imgfile -i --rw $mountdir
+    else
+        # mount image using qemu-nbd
+        modprobe nbd max_part=16
+        qemu-nbd -c /dev/nbd0 $imgfile
+        partprobe /dev/nbd0
+
+        mount /dev/nbd0p1 $mountdir
+    fi
     cp $cmd $mountdir/$(basename $cmd)
 }
 
index a39af06..4eaf879 100644 (file)
@@ -29,6 +29,10 @@ class Fio(base.Scenario):
         type:    int
         unit:    bytes
         default: 4k
+    iodepth - number of iobuffers to keep in flight
+        type:    int
+        unit:    na
+        default: 1
     rw - type of io pattern [read, write, randwrite, randread, rw, randrw]
         type:    string
         unit:    na
@@ -79,6 +83,7 @@ class Fio(base.Scenario):
         options = args["options"]
         filename = options.get("filename", "/home/ec2-user/data.raw")
         bs = options.get("bs", "4k")
+        iodepth = options.get("iodepth", "1")
         rw = options.get("rw", "write")
         ramp_time = options.get("ramp_time", 20)
         name = "yardstick-fio"
@@ -93,10 +98,11 @@ class Fio(base.Scenario):
         else:
             runtime = 30
 
-        args = "-filename=%s -bs=%s -rw=%s -ramp_time=%s -runtime=%s -name=%s" \
-            % (filename, bs, rw, ramp_time, runtime, name)
+        args = "-filename=%s -bs=%s -iodepth=%s -rw=%s -ramp_time=%s " \
+               "-runtime=%s -name=%s" \
+               % (filename, bs, iodepth, rw, ramp_time, runtime, name)
         cmd = "sudo bash fio.sh %s %s %s" \
-            % (filename, args, default_args)
+              % (filename, args, default_args)
         LOG.debug("Executing command: %s", cmd)
         status, stdout, stderr = self.client.execute(cmd)
         if status:
@@ -127,6 +133,7 @@ def _test():
     options = {
         "filename": "/home/ec2-user/data.raw",
         "bs": "4k",
+        "iodepth": "1",
         "rw": "write",
         "ramp_time": 10,
     }
old mode 100644 (file)
new mode 100755 (executable)
index 8b9f269..6e117ed
@@ -18,7 +18,7 @@ import ipaddress
 
 from yardstick.benchmark.context.model import Context
 from yardstick.benchmark.runners import base as base_runner
-
+from yardstick.common.task_template import TaskTemplate
 from yardstick.common.utils import cliargs
 
 output_file_default = "/tmp/yardstick.out"
@@ -31,6 +31,13 @@ class TaskCommands(object):
     '''
 
     @cliargs("taskfile", type=str, help="path to taskfile", nargs=1)
+    @cliargs("--task-args", dest="task_args",
+             help="Input task args (dict in json). These args are used"
+             "to render input task that is jinja2 template.")
+    @cliargs("--task-args-file", dest="task_args_file",
+             help="Path to the file with input task args (dict in "
+             "json/yaml). These args are used to render input"
+             "task that is jinja2 template.")
     @cliargs("--keep-deploy", help="keep context deployed in cloud",
              action="store_true")
     @cliargs("--parse-only", help="parse the benchmark config file and exit",
@@ -43,7 +50,8 @@ class TaskCommands(object):
         atexit.register(atexit_handler)
 
         parser = TaskParser(args.taskfile[0])
-        scenarios, run_in_parallel = parser.parse()
+        scenarios, run_in_parallel = parser.parse(args.task_args,
+                                                  args.task_args_file)
 
         if args.parse_only:
             sys.exit(0)
@@ -80,20 +88,39 @@ class TaskCommands(object):
 
         print "Done, exiting"
 
-
 # TODO: Move stuff below into TaskCommands class !?
 
+
 class TaskParser(object):
     '''Parser for task config files in yaml format'''
     def __init__(self, path):
         self.path = path
 
-    def parse(self):
+    def parse(self, task_args=None, task_args_file=None):
         '''parses the task file and return an context and scenario instances'''
         print "Parsing task config:", self.path
+
+        try:
+            kw = {}
+            if task_args_file:
+                with open(task_args_file) as f:
+                    kw.update(parse_task_args("task_args_file", f.read()))
+            kw.update(parse_task_args("task_args", task_args))
+        except TypeError:
+            raise TypeError()
+
         try:
-            with open(self.path) as stream:
-                cfg = yaml.load(stream)
+            with open(self.path) as f:
+                try:
+                    input_task = f.read()
+                    rendered_task = TaskTemplate.render(input_task, **kw)
+                except Exception as e:
+                    print(("Failed to render template:\n%(task)s\n%(err)s\n")
+                          % {"task": input_task, "err": e})
+                    raise e
+                print(("Input task is:\n%s\n") % rendered_task)
+
+                cfg = yaml.load(rendered_task)
         except IOError as ioerror:
             sys.exit(ioerror)
 
@@ -108,6 +135,10 @@ class TaskParser(object):
             context_cfgs = cfg["contexts"]
 
         for cfg_attrs in context_cfgs:
+            # config external_network based on env var
+            for _, attrs in cfg_attrs["networks"].items():
+                attrs["external_network"] = os.environ.get('EXTERNAL_NETWORK',
+                                                           'net04_ext')
             context = Context()
             context.init(cfg_attrs)
 
@@ -181,3 +212,26 @@ def runner_join(runner):
     base_runner.Runner.release(runner)
     if status != 0:
         sys.exit("Runner failed")
+
+
+def print_invalid_header(source_name, args):
+    print(("Invalid %(source)s passed:\n\n %(args)s\n")
+          % {"source": source_name, "args": args})
+
+
+def parse_task_args(src_name, args):
+    try:
+        kw = args and yaml.safe_load(args)
+        kw = {} if kw is None else kw
+    except yaml.parser.ParserError as e:
+        print_invalid_header(src_name, args)
+        print(("%(source)s has to be YAML. Details:\n\n%(err)s\n")
+              % {"source": src_name, "err": e})
+        raise TypeError()
+
+    if not isinstance(kw, dict):
+        print_invalid_header(src_name, args)
+        print(("%(src)s had to be dict, actually %(src_type)s\n")
+              % {"src": src_name, "src_type": type(kw)})
+        raise TypeError()
+    return kw
diff --git a/yardstick/common/task_template.py b/yardstick/common/task_template.py
new file mode 100755 (executable)
index 0000000..2739323
--- /dev/null
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+# yardstick: this file is copied from rally and slightly modified
+##############################################################################
+import re
+import jinja2
+import jinja2.meta
+
+
+class TaskTemplate(object):
+    @classmethod
+    def render(cls, task_template, **kwargs):
+        """Render jinja2 task template to Yardstick input task.
+
+        :param task_template: string that contains template
+        :param kwargs: Dict with template arguments
+        :returns:rendered template str
+        """
+
+        from six.moves import builtins
+
+        ast = jinja2.Environment().parse(task_template)
+        required_kwargs = jinja2.meta.find_undeclared_variables(ast)
+
+        missing = set(required_kwargs) - set(kwargs) - set(dir(builtins))
+        real_missing = [mis for mis in missing
+                        if is_really_missing(mis, task_template)]
+
+        if real_missing:
+            multi_msg = ("Please specify next template task arguments:%s")
+            single_msg = ("Please specify template task argument:%s")
+            raise TypeError((len(real_missing) > 1 and multi_msg or single_msg)
+                            % ", ".join(real_missing))
+        return jinja2.Template(task_template).render(**kwargs)
+
+
+def is_really_missing(mis, task_template):
+    # Removing variables that have default values from
+    # missing. Construction that won't be properly
+    # check is {% set x = x or 1}
+    if re.search(mis.join(["{%\s*set\s+", "\s*=\s*", "[^\w]+"]),
+                 task_template):
+        return False
+    # Also check for a default filter which can show up as
+    # a missing variable
+    if re.search(mis + "\s*\|\s*default\(", task_template):
+        return False
+    return True