Adds the ability to accept steady_state_sample_size as a parameter.
Removes deprecated nowarm and nossd.
Adds OS_PROJECT_DOMAIN_ID.
Change-Id: I00c01b05cd2cbf001bc5446faa30f36137350ccf
JIRA: STORPERF-139
Signed-off-by: mbeierl <mark.beierl@dell.com>
WORKSPACE=`pwd`
fi
-export AGENT_COUNT=${AGENT_COUNT:-$CINDER_NODES}
-export VOLUME_SIZE=${VOLUME_SIZE:-2}
-export WORKLOADS=${WORKLOADS:-ws,wr,rs,rr,rw}
-export BLOCK_SIZES=${BLOCK_SIZES:-1024,16384}
-export QUEUE_DEPTHS=${QUEUE_DEPTHS:-1,4}
-export STEADY_STATE_SAMPLES=${STEADY_STATE_SAMPLES:-10}
-export DEADLINE=${DEADLINE:-`expr $STEADY_STATE_SAMPLES \* 3`}
-export TEST_CASE=${TEST_CASE:-snia_steady_state}
-export SCENARIO_NAME=${DEPLOY_SCENARIO:-none}
-export DISK_TYPE=${DISK_TYPE:-unspecified}
-
-# This is set by Jenkins, but if we are running manually, just use the
-# current hostname.
-export POD_NAME=${NODE_NAME:-`hostname`}
git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/ci/job/releng
export "$env"
done < $WORKSPACE/ci/job/admin.rc
+export AGENT_COUNT=${AGENT_COUNT:-$CINDER_NODES}
+export BLOCK_SIZES=${BLOCK_SIZES:-1024,16384}
+export DEADLINE=${DEADLINE:-`expr $STEADY_STATE_SAMPLES \* 3`}
+export DISK_TYPE=${DISK_TYPE:-unspecified}
+export QUEUE_DEPTHS=${QUEUE_DEPTHS:-1,4}
+export POD_NAME=${NODE_NAME:-`hostname`}
+export SCENARIO_NAME=${DEPLOY_SCENARIO:-none}
+export STEADY_STATE_SAMPLES=${STEADY_STATE_SAMPLES:-10}
+export TEST_CASE=${TEST_CASE:-snia_steady_state}
export VERSION=`echo ${BUILD_TAG#*daily-} | cut -d- -f1`
+export VOLUME_SIZE=${VOLUME_SIZE:-2}
+export WORKLOADS=${WORKLOADS:-ws,wr,rs,rr,rw}
echo ==========================================================================
echo Environment
import json
import logging.config
import os
+from storperf.db.job_db import JobDB
+from storperf.plot.barchart import Barchart
+from storperf.storperf_master import StorPerfMaster
import sys
from flask import abort, Flask, request, jsonify, send_from_directory
from flask_restful import Resource, Api, fields
from flask_restful_swagger import swagger
-from storperf.db.job_db import JobDB
-from storperf.plot.barchart import Barchart
-from storperf.storperf_master import StorPerfMaster
-
app = Flask(__name__, static_url_path="")
CORS(app)
class WorkloadModel:
resource_fields = {
'target': fields.String,
- 'nossd': fields.String,
- 'nowarm': fields.String,
'deadline': fields.Integer,
+ "steady_state_samples": fields.Integer,
'workload': fields.String,
'queue_depths': fields.String,
'block_sizes': fields.String
"deadline": if specified, the maximum duration in minutes
for any single test iteration.
-"nossd": Do not fill the target with random
-data prior to running the test,
-
-"nowarm": Do not refill the target with data
-prior to running any further tests,
-
"workload":if specified, the workload to run. Defaults to all.
""",
"required": True,
storperf.filename = request.json['target']
if ('deadline' in request.json):
storperf.deadline = request.json['deadline']
+ if ('steady_state_samples' in request.json):
+ storperf.steady_state_samples = request.json[
+ 'steady_state_samples']
if ('queue_depths' in request.json):
storperf.queue_depths = request.json['queue_depths']
if ('block_sizes' in request.json):
def deadline(self, value):
self._test_executor.deadline = value
+ @property
+ def steady_state_samples(self):
+ return self._test_executor.steady_state_samples
+
+ @steady_state_samples.setter
+ def steady_state_samples(self, value):
+ self._test_executor.steady_state_samples = value
+
@property
def queue_depths(self):
return self._test_executor.queue_depths
"username": os.environ.get('OS_USERNAME'),
"password": os.environ.get('OS_PASSWORD'),
"auth_url": os.environ.get('OS_AUTH_URL'),
+ "project_domain_id":
+ os.environ.get('OS_PROJECT_DOMAIN_ID'),
"project_domain_name":
os.environ.get('OS_PROJECT_DOMAIN_NAME'),
"project_id": os.environ.get('OS_PROJECT_ID'),
import os
from os.path import isfile, join
import sched
-from threading import Thread
-import time
-
from storperf.carbon.converter import Converter
from storperf.carbon.emitter import CarbonMetricTransmitter
from storperf.db.job_db import JobDB
from storperf.fio.fio_invoker import FIOInvoker
from storperf.utilities.data_handler import DataHandler
from storperf.utilities.thread_gate import ThreadGate
+from threading import Thread
+import time
class UnknownWorkload(Exception):
self.workload_modules = []
self.filename = None
self.deadline = None
+ self.steady_state_samples = 10
self.metadata = {}
self.start_time = None
self.end_time = None
import logging
import os
-from time import sleep
-import time
-
from storperf.db import test_results_db
from storperf.db.graphite_db import GraphiteDB
from storperf.db.job_db import JobDB
from storperf.utilities import dictionary
from storperf.utilities import math as math
from storperf.utilities import steady_state as SteadyState
+from time import sleep
+import time
class DataHandler(object):
def __init__(self):
self.logger = logging.getLogger(__name__)
- self.samples = 10
self.job_db = JobDB()
"""
series = self._lookup_prior_data(executor, metric, io_type)
series = self._convert_timestamps_to_samples(
executor, series)
- steady = self._evaluate_prior_data(series)
+ steady = self._evaluate_prior_data(
+ series, executor.steady_state_samples)
self.logger.debug("Steady state for %s %s: %s"
% (io_type, metric, steady))
# A bit of a hack here as Carbon might not be finished storing the
# data we just sent to it
now = int(time.time())
- backtime = 60 * (self.samples + 2)
+ backtime = 60 * (executor.steady_state_samples + 2)
data_series = graphite_db.fetch_series(workload,
metric,
io_type,
return normalized_series
- def _evaluate_prior_data(self, data_series):
+ def _evaluate_prior_data(self, data_series, samples):
self.logger.debug("Data series: %s" % data_series)
number_of_samples = len(data_series)
if number_of_samples == 0:
return False
- if (number_of_samples < self.samples):
+ if (number_of_samples < samples):
self.logger.debug("Only %s samples, ignoring" % number_of_samples)
return False
##############################################################################
import os
+from storperf.utilities.data_handler import DataHandler
import unittest
import mock
-from storperf.utilities.data_handler import DataHandler
-
class MockGraphiteDB(object):
self._terminated = False
self.args = None
self.start_time = 0
+ self.steady_state_samples = 10
self.end_time = 1
self.metadata = {}
self.block_sizes = "1"
[1480456040, 219.28],
[1480456050, 217.75]]
- actual = self.data_handler._evaluate_prior_data(series)
+ actual = self.data_handler._evaluate_prior_data(
+ series, self.steady_state_samples)
self.assertEqual(False, actual)
def test_long_not_steady_sample(self):
[4804560300, 21937],
[4804560400, 21928],
[4804560500, 21775]]
- actual = self.data_handler._evaluate_prior_data(series)
+ actual = self.data_handler._evaluate_prior_data(
+ series, self.steady_state_samples)
self.assertEqual(False, actual)
def test_long_steady_sample(self):
[4804560300, 219.37],
[4804560400, 219.28],
[4804560500, 217.75]]
- actual = self.data_handler._evaluate_prior_data(series)
+ actual = self.data_handler._evaluate_prior_data(
+ series, self.steady_state_samples)
self.assertEqual(True, actual)
@mock.patch.dict(os.environ, {'TEST_DB_URL': 'mock'})