rm -rf "${WORKSPACE}/ci/job/joid"
git clone --depth 1 https://gerrit.opnfv.org/gerrit/joid "${WORKSPACE}/ci/job/joid"
-virtualenv "${WORKSPACE}/ci/job/storperf_daily_venv"
+python3 -m venv "${WORKSPACE}/ci/job/storperf_daily_venv"
# shellcheck source=/dev/null
source "${WORKSPACE}/ci/job/storperf_daily_venv/bin/activate"
-pip install --upgrade setuptools==40.5.0
-pip install python-openstackclient==3.16.1
-pip install python-heatclient==1.16.1
+python3 -m pip install --upgrade setuptools==40.5.0
+python3 -m pip install python-openstackclient==3.16.1
+python3 -m pip install python-heatclient==1.16.1
"${WORKSPACE}/ci/generate-admin-rc.sh"
echo Using $ARCH architecture
+export CURRENT_UID=$(id -u):$(id -g)
+
docker-compose -f local-docker-compose.yaml down
docker-compose -f local-docker-compose.yaml build
docker-compose -f local-docker-compose.yaml up -d
WORKSPACE="$HOME"
fi
-virtualenv $WORKSPACE/storperf_venv
+python3 -m venv $WORKSPACE/storperf_venv
source $WORKSPACE/storperf_venv/bin/activate
-pip install --upgrade setuptools
-pip install autoflake==1.2
-pip install autopep8==1.3.5
-pip install coverage==4.5.1
-pip install flake8==3.5.0
-pip install mock==2.0.0
-pip install nose==1.3.7
-pip install -r docker/storperf-master/requirements.pip
+python3 -m pip install --upgrade setuptools
+python3 -m pip install autoflake==1.2
+python3 -m pip install autopep8==1.3.5
+python3 -m pip install coverage==4.5.1
+python3 -m pip install flake8==3.5.0
+python3 -m pip install mock==2.0.0
+python3 -m pip install nose==1.3.7
+python3 -m pip install -r docker/storperf-master/requirements.pip
final_rc=0
+++ /dev/null
-##############################################################################
-# Copyright (c) 2015 EMC and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-"""
-"""
-
-from storperf.storperf_master import StorPerfMaster
-from threading import Thread
-import cPickle
-import getopt
-import json
-import logging
-import logging.config
-import logging.handlers
-import requests
-import socket
-import struct
-import sys
-
-
-class Usage(Exception):
- pass
-
-
-def event(event_string):
- logging.getLogger(__name__).info(event_string)
-
-
-class LogRecordStreamHandler(object):
-
- def __init__(self):
- self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- self.socket.bind((
- 'localhost', logging.handlers.DEFAULT_UDP_LOGGING_PORT))
- self.level = logging.INFO
-
- def read_logs(self):
- try:
- while True:
- datagram = self.socket.recv(8192)
- chunk = datagram[0:4]
- struct.unpack(">L", chunk)[0]
- chunk = datagram[4:]
- obj = cPickle.loads(chunk)
- record = logging.makeLogRecord(obj)
- if (record.levelno >= self.level):
- logger = logging.getLogger(record.name)
- logger.handle(record)
-
- except Exception as e:
- print "ERROR: " + str(e)
- finally:
- self.socket.close()
-
-
-def main(argv=None):
- verbose = False
- debug = False
- report = None
- erase = False
- terminate = False
- options = {}
-
- storperf = StorPerfMaster()
-
- if argv is None:
- argv = sys.argv
- try:
- try:
- opts, args = getopt.getopt(argv[1:], "t:w:r:f:escvdTh",
- ["target=",
- "workload=",
- "report=",
- "configure=",
- "erase",
- "nossd",
- "nowarm",
- "verbose",
- "debug",
- "terminate",
- "help",
- ])
- except getopt.error, msg:
- raise Usage(msg)
-
- configuration = None
- options['workload'] = None
-
- for o, a in opts:
- if o in ("-h", "--help"):
- print __doc__
- return 0
- elif o in ("-t", "--target"):
- options['filename'] = a
- elif o in ("-v", "--verbose"):
- verbose = True
- elif o in ("-d", "--debug"):
- debug = True
- elif o in ("-s", "--nossd"):
- options['nossd'] = a
- elif o in ("-c", "--nowarm"):
- options['nowarm'] = False
- elif o in ("-w", "--workload"):
- options['workload'] = a
- elif o in ("-r", "--report"):
- report = a
- elif o in ("-e", "--erase"):
- erase = True
- elif o in ("-T", "--terminate"):
- terminate = True
- elif o in ("-f", "--configure"):
- configuration = dict(x.split('=') for x in a.split(','))
-
- if (debug) or (verbose):
- udpserver = LogRecordStreamHandler()
-
- if (debug):
- udpserver.level = logging.DEBUG
-
- logging.basicConfig(format="%(asctime)s - %(name)s - " +
- "%(levelname)s - %(message)s")
-
- t = Thread(target=udpserver.read_logs, args=())
- t.setDaemon(True)
- t.start()
-
- if (erase):
- response = requests.delete(
- 'http://127.0.0.1:5000/api/v1.0/configurations')
- if (response.status_code == 400):
- content = json.loads(response.content)
- raise Usage(content['message'])
- return 0
-
- if (terminate):
- response = requests.delete(
- 'http://127.0.0.1:5000/api/v1.0/jobs')
- if (response.status_code == 400):
- content = json.loads(response.content)
- raise Usage(content['message'])
- return 0
-
- if (configuration is not None):
- response = requests.post(
- 'http://127.0.0.1:5000/api/v1.0/configurations', json=configuration)
- if (response.status_code == 400):
- content = json.loads(response.content)
- raise Usage(content['message'])
-
- if (report is not None):
- print "Fetching report for %s..." % (report,)
- response = requests.get(
- 'http://127.0.0.1:5000/api/v1.0/jobs?id=%s' % (report,))
- if (response.status_code == 400):
- content = json.loads(response.content)
- raise Usage(content['message'])
- content = json.loads(response.content)
- print content
- else:
- print "Calling start..."
- response = requests.post(
- 'http://127.0.0.1:5000/api/v1.0/jobs', json=options)
- if (response.status_code == 400):
- content = json.loads(response.content)
- raise Usage(content['message'])
-
- content = json.loads(response.content)
- print "Started job id: " + content['job_id']
-
- except Usage as e:
- print >> sys.stderr, str(e)
- print >> sys.stderr, "For help use --help"
- return 2
-
- except Exception as e:
- print >> sys.stderr, str(e)
- return 2
-
-
-if __name__ == "__main__":
- sys.exit(main())
args:
ARCH: ${ARCH}
env_file: ${ENV_FILE}
+ user: ${CURRENT_UID}
volumes:
- ./storperf-master/:/storperf
+ - ./certs:/etc/ssl/certs/
links:
- storperf-graphite
context: storperf-reporting
args:
ARCH: ${ARCH}
+ user: ${CURRENT_UID}
volumes:
- ./storperf-reporting/:/home/opnfv/storperf-reporting
##
ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
FROM nginx:alpine
EXPOSE 80 443
#
ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
FROM multiarch/alpine:$ARCH-$ALPINE_VERSION as storperf-builder
RUN ulimit -n 1024
-LABEL version="7.0" description="OPNFV Storperf Docker container"
+LABEL version="8.0" description="OPNFV Storperf Docker container"
ARG BRANCH=master
RUN apk --no-cache add --update \
libffi-dev \
libressl-dev \
- python \
- py-pip \
- python-dev \
+ python3=3.7.5-r1 \
+ python3-dev=3.7.5-r1 \
alpine-sdk \
- linux-headers \
- bash
+ linux-headers
# Install StorPerf
COPY requirements.pip /storperf/
-RUN pip install --upgrade setuptools==33.1.1
-RUN pip install -r /storperf/requirements.pip
+RUN python3 -m pip install --upgrade setuptools==33.1.1
+RUN python3 -m pip install -r /storperf/requirements.pip
# Build stripped down StorPerf image
FROM multiarch/alpine:$ARCH-$ALPINE_VERSION as storperf-master
RUN apk --no-cache add --update \
- python \
+ libressl-dev \
+ python3=3.7.5-r1 \
bash
-COPY --from=storperf-builder /usr/lib/python2.7/site-packages /usr/lib/python2.7/site-packages
+COPY --from=storperf-builder /usr/lib/python3.7/site-packages /usr/lib/python3.7/site-packages
COPY --from=storperf-builder /usr/local/bin/fio /usr/local/bin/fio
COPY . /storperf
EXPOSE 5000
# Entry point
-CMD [ "python", "./rest_server.py" ]
+CMD [ "python3", "./rest_server.py" ]
def resurse_to_flat_dictionary(self, json, prefix=None):
if type(json) == dict:
- for k, v in json.items():
+ for k, v in list(json.items()):
if prefix is None:
- key = k.decode("utf-8").replace(" ", "_")
+ key = k.replace(" ", "_")
else:
- key = prefix + "." + k.decode("utf-8").replace(" ", "_")
- if hasattr(v, '__iter__'):
+ key = prefix + "." + k.replace(" ", "_")
+ if type(v) is list or type(v) is dict:
self.resurse_to_flat_dictionary(v, key)
else:
self.flat_dictionary[key] = str(v).replace(" ", "_")
index = 0
for v in json:
index += 1
- if hasattr(v, '__iter__'):
+ if type(v) is list or type(v) is dict:
self.resurse_to_flat_dictionary(
v, prefix + "." + str(index))
else:
message = "%s %s %s\n" \
% (key, value, timestamp)
self.logger.debug("Metric: " + message.strip())
- carbon_socket.send(message)
+ carbon_socket.send(message.encode('utf-8'))
except ValueError:
self.logger.debug("Ignoring non numeric metric %s %s"
% (key, value))
message = "%s.commit-marker %s %s\n" \
% (commit_marker, timestamp, timestamp)
- carbon_socket.send(message)
+ carbon_socket.send(message.encode('utf-8'))
self.logger.debug("Marker %s" % message.strip())
self.logger.info("Sent metrics to %s:%s with timestamp %s"
% (self.host, self.port, timestamp))
- except Exception, e:
+ except Exception as e:
self.logger.error("While notifying carbon %s:%s %s"
% (self.host, self.port, e))
db = sqlite3.connect(JobDB.db_name)
cursor = db.cursor()
- for param, value in params.iteritems():
+ for param, value in params.items():
cursor.execute(
"""insert into job_params
(job_id,
self.json_body = ""
try:
for line in iter(stdout.readline, b''):
+ if type(line) == bytes:
+ line = line.decode('utf=8')
if line.startswith("fio"):
line = ""
continue
def stderr_handler(self, stderr):
self.logger.debug("Started")
for line in iter(stderr.readline, b''):
- self.logger.error("FIO Error: %s", line.rstrip())
+ if len(line) > 0:
+ self.logger.error("FIO Error: %s", line.rstrip())
self.stderr.append(line.rstrip())
# Sometime, FIO gets stuck and will give us this message:
ssh = self._ssh_client()
- command = "sudo killall fio"
-
- self.logger.debug("Executing on %s: %s" % (self.remote_host, command))
- (_, stdout, stderr) = ssh.exec_command(command)
+ kill_commands = ['sudo killall fio',
+ 'sudo pkill fio']
+ for command in kill_commands:
+ self.logger.debug("Executing on %s: %s" %
+ (self.remote_host, command))
+ (_, stdout, stderr) = ssh.exec_command(command)
for line in stdout.readlines():
self.logger.debug(line.strip())
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import StringIO
+
+from _io import StringIO
from datetime import datetime
-import json
-import logging
from multiprocessing.pool import ThreadPool
-import os
-import socket
-from time import sleep
-import uuid
-
-import paramiko
from scp import SCPClient
from snaps.config.stack import StackConfig
from snaps.openstack.create_stack import OpenStackHeatStack
from snaps.openstack.os_credentials import OSCreds
from snaps.openstack.utils import heat_utils, cinder_utils, glance_utils
from snaps.thread_utils import worker_pool
-
from storperf.db.job_db import JobDB
from storperf.test_executor import TestExecutor
+from time import sleep
+import json
+import logging
+import os
+import paramiko
+import socket
+import uuid
class ParameterError(Exception):
workload = current_workload['workload']
self._thread_gate = ThreadGate(
len(self.slaves) * min(1, self.volume_count),
- workload.options['status-interval'])
+ float(workload.options['status-interval']))
self.current_workload = current_workload['name']
workloads = []
if self._custom_workloads:
- for workload_name in self._custom_workloads.iterkeys():
+ for workload_name in self._custom_workloads.keys():
real_name = workload_name
if real_name.startswith('_'):
real_name = real_name.replace('_', '')
workload.id = self.job_db.job_id
workload_params = self._custom_workloads[workload_name]
- for param, value in workload_params.iteritems():
+ for param, value in workload_params.items():
if param == "readwrite":
param = "rw"
if param in workload.fixed_options:
self.setup()
- for key, value in self.options.iteritems():
+ for key, value in self.options.items():
if value is not None:
args.append('--' + key + "=" + str(value))
else:
from time import strptime
import unittest
-import mock
+from unittest import mock
from storperf.carbon import converter
from storperf.carbon.emitter import CarbonMetricTransmitter
emitter.carbon_port = self.listen_port
emitter.transmit_metrics(result, None)
+ element = ""
+ for element in data:
+ element = element.decode('utf-8')
+ if element.startswith("host.run-name"):
+ break
+
self.assertEqual("host.run-name.key 123.0 975542400\n",
- data[1],
- data[1])
+ element,
+ data)
@mock.patch("socket.socket")
@mock.patch("time.gmtime")
emitter.carbon_port = self.listen_port
emitter.transmit_metrics(result, None)
+ element = ""
+ for element in data:
+ element = element.decode('utf-8')
+ if element.startswith("None.commit-marker"):
+ break
self.assertEqual("None.commit-marker 975542400 975542400\n",
- data[1],
- data[1])
+ element,
+ data)
@mock.patch("socket.socket")
def test_connect_fails(self, mock_socket):
import unittest
-import mock
-
+from unittest import mock
from storperf.db.graphite_db import GraphiteDB
import sqlite3
import unittest
-import mock
-
+from unittest import mock
from storperf.db.job_db import JobDB
from storperf.workloads.rr import rr
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-from StringIO import StringIO
import json
import unittest
from storperf.fio.fio_invoker import FIOInvoker
+from io import BytesIO
class Test(unittest.TestCase):
self.fio_invoker.register(self.event)
string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
- output = StringIO(string + "\n")
+ output = BytesIO((string + "\n").encode('utf-8'))
self.fio_invoker.stdout_handler(output)
self.assertEqual(self.simple_dictionary, self.metric)
self.fio_invoker.register(self.event)
string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
terminating = "fio: terminating on signal 2\n"
- output = StringIO(terminating + string + "\n")
+ output = BytesIO((terminating + string + "\n").encode('utf-8'))
self.fio_invoker.stdout_handler(output)
self.assertEqual(self.simple_dictionary, self.metric)
self.fio_invoker.register(self.event)
string = "{'key': 'value'}"
- output = StringIO(string + "\n")
+ output = BytesIO((string + "\n").encode('utf-8'))
self.fio_invoker.stdout_handler(output)
self.assertEqual(None, self.metric)
self.fio_invoker.register(self.event)
string = "{'key':\n}"
- output = StringIO(string + "\n")
+ output = BytesIO((string + "\n").encode('utf-8'))
self.fio_invoker.stdout_handler(output)
self.assertEqual(None, self.metric)
string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
self.fio_invoker.terminated = True
- output = StringIO(string + "\n")
+ output = BytesIO((string + "\n").encode('utf-8'))
self.fio_invoker.stdout_handler(output)
self.assertEqual(None, self.metric)
self.fio_invoker.register(self.event)
string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
- output = StringIO(string + "\n")
+ output = BytesIO((string + "\n").encode('utf-8'))
self.fio_invoker.stdout_handler(output)
self.assertEqual(self.simple_dictionary, self.metric)
import unittest
-import mock
+from unittest.mock import patch
from storperf.storperf_master import StorPerfMaster
class StorPerfMasterTest(unittest.TestCase):
def setUp(self):
- with mock.patch("storperf.storperf_master.OSCreds"), \
- mock.patch(
+ with patch("storperf.storperf_master.OSCreds"), \
+ patch(
"storperf.storperf_master.OpenStackHeatStack") as oshs:
oshs.return_value.get_stack.return_value = None
import os
import unittest
-import mock
+from unittest import mock
from storperf.utilities.data_handler import DataHandler
def test_pass_criteria(self):
metadata = {
"details": {
- "steady_state": {
- "_warm_up.queue-depth.8.block-size.16384": False,
- "rw.queue-depth.4.block-size.16384": True
- }
+ "steady_state": {
+ "_warm_up.queue-depth.8.block-size.16384": False,
+ "rw.queue-depth.4.block-size.16384": True
+ }
},
}
criteria = self.data_handler._determine_criteria(metadata)
def test_fail_criteria(self):
metadata = {
"details": {
- "steady_state": {
- "_warm_up.queue-depth.8.block-size.16384": False,
- "rw.queue-depth.4.block-size.16384": True,
- "rw.queue-depth.8.block-size.16384": False
- }
+ "steady_state": {
+ "_warm_up.queue-depth.8.block-size.16384": False,
+ "rw.queue-depth.4.block-size.16384": True,
+ "rw.queue-depth.8.block-size.16384": False
+ }
},
}
criteria = self.data_handler._determine_criteria(metadata)
ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
FROM multiarch/alpine:$ARCH-$ALPINE_VERSION
MAINTAINER Mark Beierl <mark.beierl@dell.com>
-LABEL version="0.1" description="OPNFV Storperf Reporting Container"
+LABEL version="8.0" description="OPNFV Storperf Reporting Container"
ARG BRANCH=master
RUN ulimit -n 1024
-RUN apk add --update python py-pip
+RUN apk add --update python3=3.7.5-r1
COPY . /home/opnfv/storperf-reporting
WORKDIR /home/opnfv/storperf-reporting/src
-RUN pip install -r /home/opnfv/storperf-reporting/requirements.txt
+RUN python3 -m pip install -r /home/opnfv/storperf-reporting/requirements.txt
-CMD ["python", "app.py"]
+CMD ["python3", "app.py"]
EXPOSE 5000
##
ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
FROM node:10-alpine
RUN ulimit -n 1024