Allow User-Specified Sample Size 07/35907/1
authormbeierl <mark.beierl@dell.com>
Thu, 8 Jun 2017 01:11:55 +0000 (21:11 -0400)
committerMark Beierl <mark.beierl@dell.com>
Thu, 8 Jun 2017 02:30:58 +0000 (02:30 +0000)
Adds the ability to accept steady_state_sample_size as a parameter.
Removes deprecated nowarm and nossd.
Adds OS_PROJECT_DOMAIN_ID.

Change-Id: I00c01b05cd2cbf001bc5446faa30f36137350ccf
JIRA: STORPERF-139
Signed-off-by: mbeierl <mark.beierl@dell.com>
(cherry picked from commit c7806a0f08f6114d8b1f037a77af041a2b0364d5)

ci/daily.sh
rest_server.py
storperf/storperf_master.py
storperf/test_executor.py
storperf/utilities/data_handler.py
tests/utilities_tests/data_handler_test.py

index 10a06c5..21fbc5c 100755 (executable)
@@ -14,20 +14,6 @@ then
     WORKSPACE=`pwd`
 fi
 
-export AGENT_COUNT=${AGENT_COUNT:-$CINDER_NODES}
-export VOLUME_SIZE=${VOLUME_SIZE:-2}
-export WORKLOADS=${WORKLOADS:-ws,wr,rs,rr,rw}
-export BLOCK_SIZES=${BLOCK_SIZES:-1024,16384}
-export QUEUE_DEPTHS=${QUEUE_DEPTHS:-1,4}
-export STEADY_STATE_SAMPLES=${STEADY_STATE_SAMPLES:-10}
-export DEADLINE=${DEADLINE:-`expr $STEADY_STATE_SAMPLES \* 3`}
-export TEST_CASE=${TEST_CASE:-snia_steady_state}
-export SCENARIO_NAME=${DEPLOY_SCENARIO:-none}
-export DISK_TYPE=${DISK_TYPE:-unspecified}
-
-# This is set by Jenkins, but if we are running manually, just use the
-# current hostname.
-export POD_NAME=${NODE_NAME:-`hostname`}
 
 git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/ci/job/releng
 
@@ -54,7 +40,18 @@ do
     export "$env"
 done < $WORKSPACE/ci/job/admin.rc
 
+export AGENT_COUNT=${AGENT_COUNT:-$CINDER_NODES}
+export BLOCK_SIZES=${BLOCK_SIZES:-1024,16384}
+export DEADLINE=${DEADLINE:-`expr $STEADY_STATE_SAMPLES \* 3`}
+export DISK_TYPE=${DISK_TYPE:-unspecified}
+export QUEUE_DEPTHS=${QUEUE_DEPTHS:-1,4}
+export POD_NAME=${NODE_NAME:-`hostname`}
+export SCENARIO_NAME=${DEPLOY_SCENARIO:-none}
+export STEADY_STATE_SAMPLES=${STEADY_STATE_SAMPLES:-10}
+export TEST_CASE=${TEST_CASE:-snia_steady_state}
 export VERSION=`echo ${BUILD_TAG#*daily-} | cut -d- -f1`
+export VOLUME_SIZE=${VOLUME_SIZE:-2}
+export WORKLOADS=${WORKLOADS:-ws,wr,rs,rr,rw}
 
 echo ==========================================================================
 echo Environment
index 574fc32..e1dd778 100644 (file)
 import json
 import logging.config
 import os
+from storperf.db.job_db import JobDB
+from storperf.plot.barchart import Barchart
+from storperf.storperf_master import StorPerfMaster
 import sys
 
 from flask import abort, Flask, request, jsonify, send_from_directory
 from flask_restful import Resource, Api, fields
 from flask_restful_swagger import swagger
 
-from storperf.db.job_db import JobDB
-from storperf.plot.barchart import Barchart
-from storperf.storperf_master import StorPerfMaster
-
 
 app = Flask(__name__, static_url_path="")
 api = swagger.docs(Api(app), apiVersion='1.0')
@@ -215,9 +214,8 @@ class Configure(Resource):
 class WorkloadModel:
     resource_fields = {
         'target': fields.String,
-        'nossd': fields.String,
-        'nowarm': fields.String,
         'deadline': fields.Integer,
+        "steady_state_samples": fields.Integer,
         'workload': fields.String,
         'queue_depths': fields.String,
         'block_sizes': fields.String
@@ -300,12 +298,6 @@ following parameters:
 "deadline": if specified, the maximum duration in minutes
 for any single test iteration.
 
-"nossd": Do not fill the target with random
-data prior to running the test,
-
-"nowarm": Do not refill the target with data
-prior to running any further tests,
-
 "workload":if specified, the workload to run. Defaults to all.
                 """,
                 "required": True,
@@ -336,6 +328,9 @@ prior to running any further tests,
                 storperf.filename = request.json['target']
             if ('deadline' in request.json):
                 storperf.deadline = request.json['deadline']
+            if ('steady_state_samples' in request.json):
+                storperf.steady_state_samples = request.json[
+                    'steady_state_samples']
             if ('queue_depths' in request.json):
                 storperf.queue_depths = request.json['queue_depths']
             if ('block_sizes' in request.json):
index 440f5b3..5432ece 100644 (file)
@@ -173,6 +173,14 @@ class StorPerfMaster(object):
     def deadline(self, value):
         self._test_executor.deadline = value
 
+    @property
+    def steady_state_samples(self):
+        return self._test_executor.steady_state_samples
+
+    @steady_state_samples.setter
+    def steady_state_samples(self, value):
+        self._test_executor.steady_state_samples = value
+
     @property
     def queue_depths(self):
         return self._test_executor.queue_depths
@@ -386,6 +394,8 @@ class StorPerfMaster(object):
                 "username": os.environ.get('OS_USERNAME'),
                 "password": os.environ.get('OS_PASSWORD'),
                 "auth_url": os.environ.get('OS_AUTH_URL'),
+                "project_domain_id":
+                    os.environ.get('OS_PROJECT_DOMAIN_ID'),
                 "project_domain_name":
                     os.environ.get('OS_PROJECT_DOMAIN_NAME'),
                 "project_id": os.environ.get('OS_PROJECT_ID'),
index 2fadc81..b2d5914 100644 (file)
@@ -15,15 +15,14 @@ from os import listdir
 import os
 from os.path import isfile, join
 import sched
-from threading import Thread
-import time
-
 from storperf.carbon.converter import Converter
 from storperf.carbon.emitter import CarbonMetricTransmitter
 from storperf.db.job_db import JobDB
 from storperf.fio.fio_invoker import FIOInvoker
 from storperf.utilities.data_handler import DataHandler
 from storperf.utilities.thread_gate import ThreadGate
+from threading import Thread
+import time
 
 
 class UnknownWorkload(Exception):
@@ -37,6 +36,7 @@ class TestExecutor(object):
         self.workload_modules = []
         self.filename = None
         self.deadline = None
+        self.steady_state_samples = 10
         self.metadata = {}
         self.start_time = None
         self.end_time = None
index d95d6fa..1da869c 100644 (file)
@@ -9,9 +9,6 @@
 
 import logging
 import os
-from time import sleep
-import time
-
 from storperf.db import test_results_db
 from storperf.db.graphite_db import GraphiteDB
 from storperf.db.job_db import JobDB
@@ -19,13 +16,14 @@ from storperf.utilities import data_treatment as DataTreatment
 from storperf.utilities import dictionary
 from storperf.utilities import math as math
 from storperf.utilities import steady_state as SteadyState
+from time import sleep
+import time
 
 
 class DataHandler(object):
 
     def __init__(self):
         self.logger = logging.getLogger(__name__)
-        self.samples = 10
         self.job_db = JobDB()
 
     """
@@ -51,7 +49,8 @@ class DataHandler(object):
                     series = self._lookup_prior_data(executor, metric, io_type)
                     series = self._convert_timestamps_to_samples(
                         executor, series)
-                    steady = self._evaluate_prior_data(series)
+                    steady = self._evaluate_prior_data(
+                        series, executor.steady_state_samples)
 
                     self.logger.debug("Steady state for %s %s: %s"
                                       % (io_type, metric, steady))
@@ -94,7 +93,7 @@ class DataHandler(object):
         # A bit of a hack here as Carbon might not be finished storing the
         # data we just sent to it
         now = int(time.time())
-        backtime = 60 * (self.samples + 2)
+        backtime = 60 * (executor.steady_state_samples + 2)
         data_series = graphite_db.fetch_series(workload,
                                                metric,
                                                io_type,
@@ -135,13 +134,13 @@ class DataHandler(object):
 
         return normalized_series
 
-    def _evaluate_prior_data(self, data_series):
+    def _evaluate_prior_data(self, data_series, samples):
         self.logger.debug("Data series: %s" % data_series)
         number_of_samples = len(data_series)
 
         if number_of_samples == 0:
             return False
-        if (number_of_samples < self.samples):
+        if (number_of_samples < samples):
             self.logger.debug("Only %s samples, ignoring" % number_of_samples)
             return False
 
index 4630d54..6d57b0d 100644 (file)
@@ -8,12 +8,11 @@
 ##############################################################################
 
 import os
+from storperf.utilities.data_handler import DataHandler
 import unittest
 
 import mock
 
-from storperf.utilities.data_handler import DataHandler
-
 
 class MockGraphiteDB(object):
 
@@ -32,6 +31,7 @@ class DataHandlerTest(unittest.TestCase):
         self._terminated = False
         self.args = None
         self.start_time = 0
+        self.steady_state_samples = 10
         self.end_time = 1
         self.metadata = {}
         self.block_sizes = "1"
@@ -96,7 +96,8 @@ class DataHandlerTest(unittest.TestCase):
                   [1480456040, 219.28],
                   [1480456050, 217.75]]
 
-        actual = self.data_handler._evaluate_prior_data(series)
+        actual = self.data_handler._evaluate_prior_data(
+            series, self.steady_state_samples)
         self.assertEqual(False, actual)
 
     def test_long_not_steady_sample(self):
@@ -106,7 +107,8 @@ class DataHandlerTest(unittest.TestCase):
                   [4804560300, 21937],
                   [4804560400, 21928],
                   [4804560500, 21775]]
-        actual = self.data_handler._evaluate_prior_data(series)
+        actual = self.data_handler._evaluate_prior_data(
+            series, self.steady_state_samples)
         self.assertEqual(False, actual)
 
     def test_long_steady_sample(self):
@@ -120,7 +122,8 @@ class DataHandlerTest(unittest.TestCase):
                   [4804560300, 219.37],
                   [4804560400, 219.28],
                   [4804560500, 217.75]]
-        actual = self.data_handler._evaluate_prior_data(series)
+        actual = self.data_handler._evaluate_prior_data(
+            series, self.steady_state_samples)
         self.assertEqual(True, actual)
 
     @mock.patch.dict(os.environ, {'TEST_DB_URL': 'mock'})