Update to Python3 60/69360/6
authorMark Beierl <mbeierl@vmware.com>
Mon, 16 Dec 2019 21:37:36 +0000 (16:37 -0500)
committerMark Beierl <mbeierl@vmware.com>
Tue, 17 Dec 2019 16:35:36 +0000 (11:35 -0500)
Updates to make StorPerf Python3 compatible, including pulling
in SNAPS-OO as python3.

Change-Id: Ie4e7af684bd5a1b8db086893848b1889da6308ba
JIRA: STORPERF-268
Signed-off-by: Mark Beierl <mbeierl@vmware.com>
22 files changed:
ci/daily.sh
ci/verify-build.sh
ci/verify.sh
cli.py [deleted file]
docker/local-docker-compose.yaml
docker/storperf-httpfrontend/Dockerfile
docker/storperf-master/Dockerfile
docker/storperf-master/storperf/carbon/converter.py
docker/storperf-master/storperf/carbon/emitter.py
docker/storperf-master/storperf/db/job_db.py
docker/storperf-master/storperf/fio/fio_invoker.py
docker/storperf-master/storperf/storperf_master.py
docker/storperf-master/storperf/test_executor.py
docker/storperf-master/storperf/workloads/_base_workload.py
docker/storperf-master/tests/carbon_tests/emitter_test.py
docker/storperf-master/tests/db_tests/graphite_db_test.py
docker/storperf-master/tests/db_tests/job_db_test.py
docker/storperf-master/tests/fio_tests/fio_invoker_test.py
docker/storperf-master/tests/storperf_master_test.py
docker/storperf-master/tests/utilities_tests/data_handler_test.py
docker/storperf-reporting/Dockerfile
docker/storperf-swaggerui/Dockerfile

index 17026f3..1e99709 100755 (executable)
@@ -26,13 +26,13 @@ git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng "${WORKSPACE}/ci/job/
 rm -rf "${WORKSPACE}/ci/job/joid"
 git clone --depth 1 https://gerrit.opnfv.org/gerrit/joid "${WORKSPACE}/ci/job/joid"
 
-virtualenv "${WORKSPACE}/ci/job/storperf_daily_venv"
+python3 -m venv "${WORKSPACE}/ci/job/storperf_daily_venv"
 # shellcheck source=/dev/null
 source "${WORKSPACE}/ci/job/storperf_daily_venv/bin/activate"
 
-pip install --upgrade setuptools==40.5.0
-pip install python-openstackclient==3.16.1
-pip install python-heatclient==1.16.1
+python3 -m pip install --upgrade setuptools==40.5.0
+python3 -m pip install python-openstackclient==3.16.1
+python3 -m pip install python-heatclient==1.16.1
 
 "${WORKSPACE}/ci/generate-admin-rc.sh"
 
index c98fea4..5230cee 100755 (executable)
@@ -35,6 +35,8 @@ export ARCH=${ARCH}
 
 echo Using $ARCH architecture
 
+export CURRENT_UID=$(id -u):$(id -g)
+
 docker-compose -f local-docker-compose.yaml down
 docker-compose -f local-docker-compose.yaml build
 docker-compose -f local-docker-compose.yaml up -d
index 40e94b6..deaafb5 100755 (executable)
@@ -15,17 +15,17 @@ then
     WORKSPACE="$HOME"
 fi
 
-virtualenv $WORKSPACE/storperf_venv
+python3 -m venv $WORKSPACE/storperf_venv
 source $WORKSPACE/storperf_venv/bin/activate
 
-pip install --upgrade setuptools
-pip install autoflake==1.2
-pip install autopep8==1.3.5
-pip install coverage==4.5.1
-pip install flake8==3.5.0
-pip install mock==2.0.0
-pip install nose==1.3.7
-pip install -r docker/storperf-master/requirements.pip
+python3 -m pip  install --upgrade setuptools
+python3 -m pip  install autoflake==1.2
+python3 -m pip  install autopep8==1.3.5
+python3 -m pip  install coverage==4.5.1
+python3 -m pip  install flake8==3.5.0
+python3 -m pip  install mock==2.0.0
+python3 -m pip  install nose==1.3.7
+python3 -m pip  install -r docker/storperf-master/requirements.pip
 
 final_rc=0
 
diff --git a/cli.py b/cli.py
deleted file mode 100644 (file)
index fda05c2..0000000
--- a/cli.py
+++ /dev/null
@@ -1,186 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 EMC and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-"""
-"""
-
-from storperf.storperf_master import StorPerfMaster
-from threading import Thread
-import cPickle
-import getopt
-import json
-import logging
-import logging.config
-import logging.handlers
-import requests
-import socket
-import struct
-import sys
-
-
-class Usage(Exception):
-    pass
-
-
-def event(event_string):
-    logging.getLogger(__name__).info(event_string)
-
-
-class LogRecordStreamHandler(object):
-
-    def __init__(self):
-        self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-        self.socket.bind((
-            'localhost', logging.handlers.DEFAULT_UDP_LOGGING_PORT))
-        self.level = logging.INFO
-
-    def read_logs(self):
-        try:
-            while True:
-                datagram = self.socket.recv(8192)
-                chunk = datagram[0:4]
-                struct.unpack(">L", chunk)[0]
-                chunk = datagram[4:]
-                obj = cPickle.loads(chunk)
-                record = logging.makeLogRecord(obj)
-                if (record.levelno >= self.level):
-                    logger = logging.getLogger(record.name)
-                    logger.handle(record)
-
-        except Exception as e:
-            print "ERROR: " + str(e)
-        finally:
-            self.socket.close()
-
-
-def main(argv=None):
-    verbose = False
-    debug = False
-    report = None
-    erase = False
-    terminate = False
-    options = {}
-
-    storperf = StorPerfMaster()
-
-    if argv is None:
-        argv = sys.argv
-    try:
-        try:
-            opts, args = getopt.getopt(argv[1:], "t:w:r:f:escvdTh",
-                                       ["target=",
-                                        "workload=",
-                                        "report=",
-                                        "configure=",
-                                        "erase",
-                                        "nossd",
-                                        "nowarm",
-                                        "verbose",
-                                        "debug",
-                                        "terminate",
-                                        "help",
-                                        ])
-        except getopt.error, msg:
-            raise Usage(msg)
-
-        configuration = None
-        options['workload'] = None
-
-        for o, a in opts:
-            if o in ("-h", "--help"):
-                print __doc__
-                return 0
-            elif o in ("-t", "--target"):
-                options['filename'] = a
-            elif o in ("-v", "--verbose"):
-                verbose = True
-            elif o in ("-d", "--debug"):
-                debug = True
-            elif o in ("-s", "--nossd"):
-                options['nossd'] = a
-            elif o in ("-c", "--nowarm"):
-                options['nowarm'] = False
-            elif o in ("-w", "--workload"):
-                options['workload'] = a
-            elif o in ("-r", "--report"):
-                report = a
-            elif o in ("-e", "--erase"):
-                erase = True
-            elif o in ("-T", "--terminate"):
-                terminate = True
-            elif o in ("-f", "--configure"):
-                configuration = dict(x.split('=') for x in a.split(','))
-
-        if (debug) or (verbose):
-            udpserver = LogRecordStreamHandler()
-
-            if (debug):
-                udpserver.level = logging.DEBUG
-
-            logging.basicConfig(format="%(asctime)s - %(name)s - " +
-                                "%(levelname)s - %(message)s")
-
-            t = Thread(target=udpserver.read_logs, args=())
-            t.setDaemon(True)
-            t.start()
-
-        if (erase):
-            response = requests.delete(
-                'http://127.0.0.1:5000/api/v1.0/configurations')
-            if (response.status_code == 400):
-                content = json.loads(response.content)
-                raise Usage(content['message'])
-            return 0
-
-        if (terminate):
-            response = requests.delete(
-                'http://127.0.0.1:5000/api/v1.0/jobs')
-            if (response.status_code == 400):
-                content = json.loads(response.content)
-                raise Usage(content['message'])
-            return 0
-
-        if (configuration is not None):
-            response = requests.post(
-                'http://127.0.0.1:5000/api/v1.0/configurations', json=configuration)
-            if (response.status_code == 400):
-                content = json.loads(response.content)
-                raise Usage(content['message'])
-
-        if (report is not None):
-            print "Fetching report for %s..." % (report,)
-            response = requests.get(
-                'http://127.0.0.1:5000/api/v1.0/jobs?id=%s' % (report,))
-            if (response.status_code == 400):
-                content = json.loads(response.content)
-                raise Usage(content['message'])
-            content = json.loads(response.content)
-            print content
-        else:
-            print "Calling start..."
-            response = requests.post(
-                'http://127.0.0.1:5000/api/v1.0/jobs', json=options)
-            if (response.status_code == 400):
-                content = json.loads(response.content)
-                raise Usage(content['message'])
-
-            content = json.loads(response.content)
-            print "Started job id: " + content['job_id']
-
-    except Usage as e:
-        print >> sys.stderr, str(e)
-        print >> sys.stderr, "For help use --help"
-        return 2
-
-    except Exception as e:
-        print >> sys.stderr, str(e)
-        return 2
-
-
-if __name__ == "__main__":
-    sys.exit(main())
index 6daa6e2..a4b69b4 100644 (file)
@@ -17,8 +17,10 @@ services:
             args:
                 ARCH: ${ARCH}
         env_file: ${ENV_FILE}
+        user: ${CURRENT_UID}
         volumes:
             - ./storperf-master/:/storperf
+            - ./certs:/etc/ssl/certs/
         links:
             - storperf-graphite
 
@@ -28,6 +30,7 @@ services:
             context: storperf-reporting
             args:
                 ARCH: ${ARCH}
+        user: ${CURRENT_UID}
         volumes:
             - ./storperf-reporting/:/home/opnfv/storperf-reporting
 
index 95188b5..6f072b0 100644 (file)
@@ -13,7 +13,7 @@
 ##
 
 ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
 FROM nginx:alpine
 
 EXPOSE 80 443
index 9764a8d..a2e1a1d 100644 (file)
 #
 
 ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
 FROM multiarch/alpine:$ARCH-$ALPINE_VERSION as storperf-builder
 
 RUN ulimit -n 1024
 
-LABEL version="7.0" description="OPNFV Storperf Docker container"
+LABEL version="8.0" description="OPNFV Storperf Docker container"
 
 ARG BRANCH=master
 
@@ -47,28 +47,27 @@ RUN cd ${repos_dir}/fio && EXTFLAGS="-static" make -j $(grep -c ^processor /proc
 RUN apk --no-cache add --update \
     libffi-dev \
     libressl-dev \
-    python \
-    py-pip \
-    python-dev \
+    python3=3.7.5-r1 \
+    python3-dev=3.7.5-r1 \
     alpine-sdk \
-    linux-headers \
-    bash
+    linux-headers
 
 # Install StorPerf
 
 COPY requirements.pip /storperf/
-RUN pip install --upgrade setuptools==33.1.1
-RUN pip install -r /storperf/requirements.pip
+RUN python3 -m pip  install --upgrade setuptools==33.1.1
+RUN python3 -m pip  install -r /storperf/requirements.pip
 
 # Build stripped down StorPerf image
 
 FROM multiarch/alpine:$ARCH-$ALPINE_VERSION as storperf-master
 
 RUN apk --no-cache add --update \
-    python \
+    libressl-dev \
+    python3=3.7.5-r1 \
     bash
 
-COPY --from=storperf-builder /usr/lib/python2.7/site-packages /usr/lib/python2.7/site-packages
+COPY --from=storperf-builder /usr/lib/python3.7/site-packages /usr/lib/python3.7/site-packages
 COPY --from=storperf-builder /usr/local/bin/fio /usr/local/bin/fio
 COPY . /storperf
 
@@ -80,4 +79,4 @@ RUN chmod 600 storperf/resources/ssh/storperf_rsa
 EXPOSE 5000
 
 # Entry point
-CMD [ "python", "./rest_server.py" ]
+CMD [ "python3", "./rest_server.py" ]
index 623c144..4b5e6aa 100644 (file)
@@ -32,12 +32,12 @@ class Converter(object):
 
     def resurse_to_flat_dictionary(self, json, prefix=None):
         if type(json) == dict:
-            for k, v in json.items():
+            for k, v in list(json.items()):
                 if prefix is None:
-                    key = k.decode("utf-8").replace(" ", "_")
+                    key = k.replace(" ", "_")
                 else:
-                    key = prefix + "." + k.decode("utf-8").replace(" ", "_")
-                if hasattr(v, '__iter__'):
+                    key = prefix + "." + k.replace(" ", "_")
+                if type(v) is list or type(v) is dict:
                     self.resurse_to_flat_dictionary(v, key)
                 else:
                     self.flat_dictionary[key] = str(v).replace(" ", "_")
@@ -45,7 +45,7 @@ class Converter(object):
             index = 0
             for v in json:
                 index += 1
-                if hasattr(v, '__iter__'):
+                if type(v) is list or type(v) is dict:
                     self.resurse_to_flat_dictionary(
                         v, prefix + "." + str(index))
                 else:
index b196709..13503b2 100644 (file)
@@ -40,19 +40,19 @@ class CarbonMetricTransmitter():
                     message = "%s %s %s\n" \
                         % (key, value, timestamp)
                     self.logger.debug("Metric: " + message.strip())
-                    carbon_socket.send(message)
+                    carbon_socket.send(message.encode('utf-8'))
                 except ValueError:
                     self.logger.debug("Ignoring non numeric metric %s %s"
                                       % (key, value))
 
             message = "%s.commit-marker %s %s\n" \
                 % (commit_marker, timestamp, timestamp)
-            carbon_socket.send(message)
+            carbon_socket.send(message.encode('utf-8'))
             self.logger.debug("Marker %s" % message.strip())
             self.logger.info("Sent metrics to %s:%s with timestamp %s"
                              % (self.host, self.port, timestamp))
 
-        except Exception, e:
+        except Exception as e:
             self.logger.error("While notifying carbon %s:%s %s"
                               % (self.host, self.port, e))
 
index b029a35..c3632e4 100644 (file)
@@ -220,7 +220,7 @@ class JobDB(object):
 
             db = sqlite3.connect(JobDB.db_name)
             cursor = db.cursor()
-            for param, value in params.iteritems():
+            for param, value in params.items():
                 cursor.execute(
                     """insert into job_params
                                (job_id,
index c665598..2437763 100644 (file)
@@ -45,6 +45,8 @@ class FIOInvoker(object):
         self.json_body = ""
         try:
             for line in iter(stdout.readline, b''):
+                if type(line) == bytes:
+                    line = line.decode('utf=8')
                 if line.startswith("fio"):
                     line = ""
                     continue
@@ -78,7 +80,8 @@ class FIOInvoker(object):
     def stderr_handler(self, stderr):
         self.logger.debug("Started")
         for line in iter(stderr.readline, b''):
-            self.logger.error("FIO Error: %s", line.rstrip())
+            if len(line) > 0:
+                self.logger.error("FIO Error: %s", line.rstrip())
             self.stderr.append(line.rstrip())
 
             # Sometime, FIO gets stuck and will give us this message:
@@ -137,10 +140,12 @@ class FIOInvoker(object):
 
         ssh = self._ssh_client()
 
-        command = "sudo killall fio"
-
-        self.logger.debug("Executing on %s: %s" % (self.remote_host, command))
-        (_, stdout, stderr) = ssh.exec_command(command)
+        kill_commands = ['sudo killall fio',
+                         'sudo pkill fio']
+        for command in kill_commands:
+            self.logger.debug("Executing on %s: %s" %
+                              (self.remote_host, command))
+            (_, stdout, stderr) = ssh.exec_command(command)
 
         for line in stdout.readlines():
             self.logger.debug(line.strip())
index 3de8478..afcd018 100644 (file)
@@ -7,26 +7,25 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-import StringIO
+
+from _io import StringIO
 from datetime import datetime
-import json
-import logging
 from multiprocessing.pool import ThreadPool
-import os
-import socket
-from time import sleep
-import uuid
-
-import paramiko
 from scp import SCPClient
 from snaps.config.stack import StackConfig
 from snaps.openstack.create_stack import OpenStackHeatStack
 from snaps.openstack.os_credentials import OSCreds
 from snaps.openstack.utils import heat_utils, cinder_utils, glance_utils
 from snaps.thread_utils import worker_pool
-
 from storperf.db.job_db import JobDB
 from storperf.test_executor import TestExecutor
+from time import sleep
+import json
+import logging
+import os
+import paramiko
+import socket
+import uuid
 
 
 class ParameterError(Exception):
index 4b5bbd4..cb7e478 100644 (file)
@@ -318,7 +318,7 @@ class TestExecutor(object):
             workload = current_workload['workload']
             self._thread_gate = ThreadGate(
                 len(self.slaves) * min(1, self.volume_count),
-                workload.options['status-interval'])
+                float(workload.options['status-interval']))
 
             self.current_workload = current_workload['name']
 
@@ -362,7 +362,7 @@ class TestExecutor(object):
         workloads = []
 
         if self._custom_workloads:
-            for workload_name in self._custom_workloads.iterkeys():
+            for workload_name in self._custom_workloads.keys():
                 real_name = workload_name
                 if real_name.startswith('_'):
                     real_name = real_name.replace('_', '')
@@ -380,7 +380,7 @@ class TestExecutor(object):
                 workload.id = self.job_db.job_id
 
                 workload_params = self._custom_workloads[workload_name]
-                for param, value in workload_params.iteritems():
+                for param, value in workload_params.items():
                     if param == "readwrite":
                         param = "rw"
                     if param in workload.fixed_options:
index 7468fea..5aa596e 100644 (file)
@@ -57,7 +57,7 @@ class _base_workload(object):
 
         self.setup()
 
-        for key, value in self.options.iteritems():
+        for key, value in self.options.items():
             if value is not None:
                 args.append('--' + key + "=" + str(value))
             else:
index f5a78d1..7ea515b 100644 (file)
@@ -11,7 +11,7 @@ import json
 from time import strptime
 import unittest
 
-import mock
+from unittest import mock
 
 from storperf.carbon import converter
 from storperf.carbon.emitter import CarbonMetricTransmitter
@@ -69,9 +69,15 @@ class CarbonMetricTransmitterTest(unittest.TestCase):
         emitter.carbon_port = self.listen_port
         emitter.transmit_metrics(result, None)
 
+        element = ""
+        for element in data:
+            element = element.decode('utf-8')
+            if element.startswith("host.run-name"):
+                break
+
         self.assertEqual("host.run-name.key 123.0 975542400\n",
-                         data[1],
-                         data[1])
+                         element,
+                         data)
 
     @mock.patch("socket.socket")
     @mock.patch("time.gmtime")
@@ -90,9 +96,14 @@ class CarbonMetricTransmitterTest(unittest.TestCase):
         emitter.carbon_port = self.listen_port
         emitter.transmit_metrics(result, None)
 
+        element = ""
+        for element in data:
+            element = element.decode('utf-8')
+            if element.startswith("None.commit-marker"):
+                break
         self.assertEqual("None.commit-marker 975542400 975542400\n",
-                         data[1],
-                         data[1])
+                         element,
+                         data)
 
     @mock.patch("socket.socket")
     def test_connect_fails(self, mock_socket):
index d5fbbfc..2fabfd4 100644 (file)
@@ -9,8 +9,7 @@
 
 import unittest
 
-import mock
-
+from unittest import mock
 from storperf.db.graphite_db import GraphiteDB
 
 
index 25fda1f..5201963 100644 (file)
@@ -11,8 +11,7 @@ import os
 import sqlite3
 import unittest
 
-import mock
-
+from unittest import mock
 from storperf.db.job_db import JobDB
 from storperf.workloads.rr import rr
 
index 4672651..3a30500 100644 (file)
@@ -7,11 +7,11 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-from StringIO import StringIO
 import json
 import unittest
 
 from storperf.fio.fio_invoker import FIOInvoker
+from io import BytesIO
 
 
 class Test(unittest.TestCase):
@@ -34,7 +34,7 @@ class Test(unittest.TestCase):
         self.fio_invoker.register(self.event)
         string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
 
-        output = StringIO(string + "\n")
+        output = BytesIO((string + "\n").encode('utf-8'))
         self.fio_invoker.stdout_handler(output)
 
         self.assertEqual(self.simple_dictionary, self.metric)
@@ -43,7 +43,7 @@ class Test(unittest.TestCase):
         self.fio_invoker.register(self.event)
         string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
         terminating = "fio: terminating on signal 2\n"
-        output = StringIO(terminating + string + "\n")
+        output = BytesIO((terminating + string + "\n").encode('utf-8'))
         self.fio_invoker.stdout_handler(output)
 
         self.assertEqual(self.simple_dictionary, self.metric)
@@ -52,7 +52,7 @@ class Test(unittest.TestCase):
         self.fio_invoker.register(self.event)
         string = "{'key': 'value'}"
 
-        output = StringIO(string + "\n")
+        output = BytesIO((string + "\n").encode('utf-8'))
         self.fio_invoker.stdout_handler(output)
 
         self.assertEqual(None, self.metric)
@@ -61,7 +61,7 @@ class Test(unittest.TestCase):
         self.fio_invoker.register(self.event)
         string = "{'key':\n}"
 
-        output = StringIO(string + "\n")
+        output = BytesIO((string + "\n").encode('utf-8'))
         self.fio_invoker.stdout_handler(output)
 
         self.assertEqual(None, self.metric)
@@ -71,7 +71,7 @@ class Test(unittest.TestCase):
         string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
 
         self.fio_invoker.terminated = True
-        output = StringIO(string + "\n")
+        output = BytesIO((string + "\n").encode('utf-8'))
         self.fio_invoker.stdout_handler(output)
 
         self.assertEqual(None, self.metric)
@@ -81,7 +81,7 @@ class Test(unittest.TestCase):
         self.fio_invoker.register(self.event)
         string = json.dumps(self.simple_dictionary, indent=4, sort_keys=True)
 
-        output = StringIO(string + "\n")
+        output = BytesIO((string + "\n").encode('utf-8'))
         self.fio_invoker.stdout_handler(output)
 
         self.assertEqual(self.simple_dictionary, self.metric)
index 03009d1..1edac6d 100644 (file)
@@ -9,7 +9,7 @@
 
 import unittest
 
-import mock
+from unittest.mock import patch
 
 from storperf.storperf_master import StorPerfMaster
 
@@ -17,8 +17,8 @@ from storperf.storperf_master import StorPerfMaster
 class StorPerfMasterTest(unittest.TestCase):
 
     def setUp(self):
-        with mock.patch("storperf.storperf_master.OSCreds"), \
-                mock.patch(
+        with patch("storperf.storperf_master.OSCreds"), \
+                patch(
                     "storperf.storperf_master.OpenStackHeatStack") as oshs:
             oshs.return_value.get_stack.return_value = None
 
index 35150dd..7e8cbcc 100644 (file)
@@ -10,7 +10,7 @@
 import os
 import unittest
 
-import mock
+from unittest import mock
 
 from storperf.utilities.data_handler import DataHandler
 
@@ -311,10 +311,10 @@ class DataHandlerTest(unittest.TestCase):
     def test_pass_criteria(self):
         metadata = {
             "details": {
-              "steady_state": {
-                "_warm_up.queue-depth.8.block-size.16384": False,
-                "rw.queue-depth.4.block-size.16384": True
-              }
+                "steady_state": {
+                    "_warm_up.queue-depth.8.block-size.16384": False,
+                    "rw.queue-depth.4.block-size.16384": True
+                }
             },
         }
         criteria = self.data_handler._determine_criteria(metadata)
@@ -325,11 +325,11 @@ class DataHandlerTest(unittest.TestCase):
     def test_fail_criteria(self):
         metadata = {
             "details": {
-              "steady_state": {
-                "_warm_up.queue-depth.8.block-size.16384": False,
-                "rw.queue-depth.4.block-size.16384": True,
-                "rw.queue-depth.8.block-size.16384": False
-              }
+                "steady_state": {
+                    "_warm_up.queue-depth.8.block-size.16384": False,
+                    "rw.queue-depth.4.block-size.16384": True,
+                    "rw.queue-depth.8.block-size.16384": False
+                }
             },
         }
         criteria = self.data_handler._determine_criteria(metadata)
index ff28dd1..6d017ae 100644 (file)
 
 
 ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
 FROM multiarch/alpine:$ARCH-$ALPINE_VERSION
 MAINTAINER Mark Beierl <mark.beierl@dell.com>
-LABEL version="0.1" description="OPNFV Storperf Reporting Container"
+LABEL version="8.0" description="OPNFV Storperf Reporting Container"
 
 ARG BRANCH=master
 
 RUN ulimit -n 1024
 
-RUN apk add --update python py-pip
+RUN apk add --update python3=3.7.5-r1
 
 COPY . /home/opnfv/storperf-reporting
 WORKDIR /home/opnfv/storperf-reporting/src
 
-RUN pip install -r /home/opnfv/storperf-reporting/requirements.txt
+RUN python3 -m pip install -r /home/opnfv/storperf-reporting/requirements.txt
 
-CMD ["python", "app.py"]
+CMD ["python3", "app.py"]
 
 EXPOSE 5000
index 5d58a30..9f82890 100644 (file)
@@ -13,7 +13,7 @@
 ##
 
 ARG ARCH=x86_64
-ARG ALPINE_VERSION=v3.6
+ARG ALPINE_VERSION=v3.10
 FROM node:10-alpine
 
 RUN ulimit -n 1024