Fix pep8 errors for python files in benchmarks,data,dashboard 59/16759/2
authorzhifeng.jiang <jiang.zhifeng@zte.com.cn>
Tue, 12 Jul 2016 14:36:43 +0000 (22:36 +0800)
committerzhifeng jiang <jiang.zhifeng@zte.com.cn>
Wed, 13 Jul 2016 01:51:02 +0000 (01:51 +0000)
JIRA:QTIP-89

Change-Id: I3465221f0bdc9a8eb7c4e26069f7367fb1add729
Signed-off-by: zhifeng.jiang <jiang.zhifeng@zte.com.cn>
benchmarks/playbooks/result_transform/dpi/dpi_transform.py
benchmarks/playbooks/result_transform/fio/fio_result_transform.py
benchmarks/playbooks/result_transform/iperf/iperf_transform.py
benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
benchmarks/playbooks/result_transform/ssl/ssl_transform.py
benchmarks/playbooks/result_transform/ubench_transform.py
dashboard/pushtoDB.py
data/report/Qtip_Report.py
data/report/get_indices.py
data/report/get_results.py
data/report/qtip_graph.py

index b95e0e2..622030c 100644 (file)
@@ -1,5 +1,4 @@
 import os
-import json
 import pickle
 import datetime
 
@@ -46,4 +45,4 @@ result = {}
 result['DPI_benchmark(M pps)'] = round(dpi_result_pps, 3)
 result['DPI_benchmark(Gb/s)'] = round(dpi_result_bps, 3)
 with open('./result_temp', 'w+') as result_file:
-    pickle.dump(result, result_file)
\ No newline at end of file
+    pickle.dump(result, result_file)
index f9410a6..9929aa1 100755 (executable)
@@ -4,22 +4,20 @@ import os
 import datetime
 
 with open("fio_result.json") as fio_raw:
-    fio_data=json.load(fio_raw)
+    fio_data = json.load(fio_raw)
 
-r_iops=[];
-r_io_bytes=[];
-r_io_runtime=[];
-r_lat=[];
-w_iops=[];
-w_io_bytes=[];
-w_io_runtime=[];
-w_lat=[];
+r_iops = []
+r_io_bytes = []
+r_io_runtime = []
+r_lat = []
+w_iops = []
+w_io_bytes = []
+w_io_runtime = []
+w_lat = []
 
+total_jobs = len(fio_data["jobs"])
 
-
-total_jobs=len(fio_data["jobs"])
-
-for x in range (0,int(total_jobs)):
+for x in range(0, int(total_jobs)):
     r_iops.append(fio_data["jobs"][x]["read"]["iops"])
     r_io_bytes.append(fio_data["jobs"][x]["read"]["io_bytes"])
     r_io_runtime.append(fio_data["jobs"][x]["read"]["runtime"])
@@ -29,29 +27,24 @@ for x in range (0,int(total_jobs)):
     w_io_runtime.append(fio_data["jobs"][x]["write"]["runtime"])
     w_lat.append(fio_data["jobs"][x]["write"]["lat"]["mean"])
 
+FIO_result_dict = {}
 
+for x in range(0, total_jobs):
+    FIO_result_dict['Job_' + str(x)] = {}
+    FIO_result_dict['Job_' + str(x)]['read'] = {}
+    FIO_result_dict['Job_' + str(x)]['read']['Total_IO_Bytes'] = r_io_bytes[x]
+    FIO_result_dict['Job_' + str(x)]['read']['IO/sec'] = r_iops[x]
+    FIO_result_dict['Job_' + str(x)]['read']['IO_runtime (millisec)'] = r_io_runtime[x]
+    FIO_result_dict['Job_' + str(x)]['read']['mean_IO_latenchy (microsec)'] = r_lat[x]
 
-FIO_result_dict={};
-
-for x in range (0,total_jobs):
-    FIO_result_dict['Job_'+str(x)]={};
-    FIO_result_dict['Job_'+str(x)]['read']={};
-    FIO_result_dict['Job_'+str(x)]['read']['Total_IO_Bytes']=r_io_bytes[x]
-    FIO_result_dict['Job_'+str(x)]['read']['IO/sec']=r_iops[x]
-    FIO_result_dict['Job_'+str(x)]['read']['IO_runtime (millisec)']=r_io_runtime[x]
-    FIO_result_dict['Job_'+str(x)]['read']['mean_IO_latenchy (microsec)']=r_lat[x]
-    
-    FIO_result_dict['Job_'+str(x)]['write']={};
-    FIO_result_dict['Job_'+str(x)]['write']['Total_IO_Bytes']=w_io_bytes[x]
-    FIO_result_dict['Job_'+str(x)]['write']['IO/sec']=w_iops[x]
-    FIO_result_dict['Job_'+str(x)]['write']['IO_runtime (millisec)']=w_io_runtime[x]
-    FIO_result_dict['Job_'+str(x)]['write']['mean_IO_latenchy (microsec)']=w_lat[x]
-
-
+    FIO_result_dict['Job_' + str(x)]['write'] = {}
+    FIO_result_dict['Job_' + str(x)]['write']['Total_IO_Bytes'] = w_io_bytes[x]
+    FIO_result_dict['Job_' + str(x)]['write']['IO/sec'] = w_iops[x]
+    FIO_result_dict['Job_' + str(x)]['write']['IO_runtime (millisec)'] = w_io_runtime[x]
+    FIO_result_dict['Job_' + str(x)]['write']['mean_IO_latenchy (microsec)'] = w_lat[x]
 
 host_name = (os.popen("hostname").read().rstrip())
 report_time = str(datetime.datetime.utcnow().isoformat())
-os.system("mv fio_result.json "+str(host_name)+"-"+report_time+".log")
-with open('./result_temp','w+')as out_fio_result:
-    pickle.dump(FIO_result_dict,out_fio_result)
-
+os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
+with open('./result_temp', 'w + ')as out_fio_result:
+    pickle.dump(FIO_result_dict, out_fio_result)
index 39c5956..8df5a79 100644 (file)
@@ -1,30 +1,29 @@
 import json\r
 import datetime\r
 import pickle\r
-with open('iperf_raw.json','r') as ifile:\r
-    raw_iperf_data=json.loads(ifile.read().rstrip())\r
-    \r
-    \r
-bits_sent= raw_iperf_data['end']['sum_sent']['bits_per_second']\r
-bits_received= raw_iperf_data['end']['sum_received']['bits_per_second']\r
-total_byte_sent=raw_iperf_data['end']['sum_sent']['bytes']\r
-total_byte_received=raw_iperf_data['end']['sum_received']['bytes']\r
-cpu_host_total_percent=raw_iperf_data['end']['cpu_utilization_percent']['host_total']\r
-cpu_remote_total_percent=raw_iperf_data['end']['cpu_utilization_percent']['remote_total']\r
+with open('iperf_raw.json', 'r') as ifile:\r
+    raw_iperf_data = json.loads(ifile.read().rstrip())\r
 \r
-result={}\r
+bits_sent = raw_iperf_data['end']['sum_sent']['bits_per_second']\r
+bits_received = raw_iperf_data['end']['sum_received']['bits_per_second']\r
+total_byte_sent = raw_iperf_data['end']['sum_sent']['bytes']\r
+total_byte_received = raw_iperf_data['end']['sum_received']['bytes']\r
+cpu_host_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['host_total']\r
+cpu_remote_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['remote_total']\r
+\r
+result = {}\r
 time_stamp = str(datetime.datetime.utcnow().isoformat())\r
 \r
-result['1. Version']=raw_iperf_data['start']['version']\r
-result['2. Bandwidth']={}\r
+result['1. Version'] = raw_iperf_data['start']['version']\r
+result['2. Bandwidth'] = {}\r
 result['2. Bandwidth']['1. throughput Sender (b/s)'] = bits_sent\r
 result['2. Bandwidth']['2. throughput Received (b/s)'] = bits_received\r
-result['3. CPU']={}\r
-result['3. CPU']['1. CPU host total (%)']=cpu_host_total_percent\r
-result['3. CPU']['2. CPU remote total (%)']=cpu_remote_total_percent\r
+result['3. CPU'] = {}\r
+result['3. CPU']['1. CPU host total (%)'] = cpu_host_total_percent\r
+result['3. CPU']['2. CPU remote total (%)'] = cpu_remote_total_percent\r
 \r
-with open('iperf_raw-'+time_stamp+'.log','w+') as ofile:\r
+with open('iperf_raw-' + time_stamp + '.log', 'w+') as ofile:\r
     ofile.write(json.dumps(raw_iperf_data))\r
-    \r
+\r
 with open('./result_temp', 'w+') as result_file:\r
-    pickle.dump(result,result_file)
\ No newline at end of file
+    pickle.dump(result, result_file)\r
index aed68ac..c3f03dd 100644 (file)
@@ -1,9 +1,7 @@
 import os
-import json
 import pickle
 import datetime
 
-
 intmem_copy = os.popen("cat Intmem | grep 'BatchRun   Copy' | awk '{print $4}'").read().rstrip()
 intmem_scale = os.popen("cat Intmem | grep 'BatchRun   Scale' | awk '{print $4}'").read().rstrip()
 intmem_add = os.popen("cat Intmem | grep 'BatchRun   Add' | awk '{print $4}'").read().rstrip()
@@ -22,35 +20,27 @@ floatmem_average = os.popen("cat Floatmem | grep 'BatchRun  AVERAGE' | awk '{pri
 print floatmem_copy
 print floatmem_average
 
-
 hostname = os.popen("hostname").read().rstrip()
 time_stamp = str(datetime.datetime.utcnow().isoformat())
 
-
 os.system("mv Intmem " + hostname + "-" + time_stamp + ".log")
 os.system("cp  Floatmem >> " + hostname + "-" + time_stamp + ".log")
 
+result = {}
 
-result = {};
-
-result['1. INTmem bandwidth'] = {};
-result['1. INTmem bandwidth']['1. Copy (MB/s)']=intmem_copy
-result['1. INTmem bandwidth']['2. Add (MB/s)']=intmem_add
-result['1. INTmem bandwidth']['3. Scale (MB/s)']=intmem_scale
-result['1. INTmem bandwidth']['4. Triad (MB/s)']=intmem_triad
-result['1. INTmem bandwidth']['5. Average (MB/s)']=intmem_average
-
-
-result['2. FLOATmem bandwidth'] = {};
-result['2. FLOATmem bandwidth']['1. Copy (MB/s)']=floatmem_copy
-result['2. FLOATmem bandwidth']['2. Add (MB/s)']=floatmem_add
-result['2. FLOATmem bandwidth']['3. Scale (MB/s)']=floatmem_scale
-result['2. FLOATmem bandwidth']['4. Triad (MB/s)']=floatmem_triad
-result['2. FLOATmem bandwidth']['5. Average (MB/s)']=floatmem_average
-
+result['1. INTmem bandwidth'] = {}
+result['1. INTmem bandwidth']['1. Copy (MB/s)'] = intmem_copy
+result['1. INTmem bandwidth']['2. Add (MB/s)'] = intmem_add
+result['1. INTmem bandwidth']['3. Scale (MB/s)'] = intmem_scale
+result['1. INTmem bandwidth']['4. Triad (MB/s)'] = intmem_triad
+result['1. INTmem bandwidth']['5. Average (MB/s)'] = intmem_average
 
+result['2. FLOATmem bandwidth'] = {}
+result['2. FLOATmem bandwidth']['1. Copy (MB/s)'] = floatmem_copy
+result['2. FLOATmem bandwidth']['2. Add (MB/s)'] = floatmem_add
+result['2. FLOATmem bandwidth']['3. Scale (MB/s)'] = floatmem_scale
+result['2. FLOATmem bandwidth']['4. Triad (MB/s)'] = floatmem_triad
+result['2. FLOATmem bandwidth']['5. Average (MB/s)'] = floatmem_average
 
 with open('./result_temp', 'w+') as result_file:
     pickle.dump(result, result_file)
-
-
index 6e63225..029135a 100644 (file)
@@ -1,10 +1,7 @@
 import os
-import json
 import pickle
 import datetime
 
-#total_cpu=os.popen("cat $HOME/tempD/nDPI/example/result.txt | tail -1").read()
-
 openssl_version = os.popen("cat RSA_dump | head -1").read().rstrip()
 rsa_512_sps = os.popen(
     "cat RSA_dump | grep  '512 bits ' | awk '{print $6}' ").read().rstrip()
@@ -23,7 +20,6 @@ rsa_4096_sps = os.popen(
 rsa_4096_vps = os.popen(
     "cat RSA_dump | grep  '4096 bits ' | awk '{print $7}' ").read().rstrip()
 
-
 aes_16B = os.popen(
     "cat AES-128-CBC_dump | grep  'aes-128-cbc  ' | awk '{print $2}' ").read().rstrip()
 aes_64B = os.popen(
@@ -35,16 +31,12 @@ aes_1024B = os.popen(
 aes_8192B = os.popen(
     "cat AES-128-CBC_dump | grep  'aes-128-cbc  ' | awk '{print $6}' ").read().rstrip()
 
-
 hostname = os.popen("hostname").read().rstrip()
 time_stamp = str(datetime.datetime.utcnow().isoformat())
 
-
 os.system("mv RSA_dump " + hostname + "-" + time_stamp + ".log")
 os.system("cat AES-128-CBC_dump >> " + hostname + "-" + time_stamp + ".log")
 
-
-
 result = {}
 
 result['1. Version'] = [openssl_version]
@@ -64,4 +56,3 @@ result['3. AES-128-cbc throughput']['5. 8192 Bytes block (B/sec)'] = aes_8192B
 
 with open('./result_temp', 'w+') as result_file:
     pickle.dump(result, result_file)
-
index f15943d..3c8ba1d 100644 (file)
@@ -1,7 +1,6 @@
 import os
 import json
 import pickle
-import datetime
 
 total_cpu = os.popen(
     "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $1;}' | awk 'NR==1'").read().rstrip()
index 75c1d61..d5458b1 100644 (file)
@@ -2,21 +2,25 @@ import requests
 import json
 import datetime
 import os
+import sys
 TEST_DB = 'http://testresults.opnfv.org/test/api/v1'
 
-suite_list = [('compute_result.json', 'compute_test_suite'),('network_result.json', 'network_test_suite'),('storage_result.json', 'storage_test_suite')]
-payload_list = { }
+suite_list = [('compute_result.json', 'compute_test_suite'),
+              ('network_result.json', 'network_test_suite'),
+              ('storage_result.json', 'storage_test_suite')]
+payload_list = {}
 
-def push_results_to_db(db_url, case_name, payload,logger=None, pod_name="dell-pod1"):
+
+def push_results_to_db(db_url, case_name, payload, logger=None, pod_name="dell-pod1"):
 
     url = db_url + "/results"
-    creation_date= str(datetime.datetime.utcnow().isoformat())
+    creation_date = str(datetime.datetime.utcnow().isoformat())
     installer = os.environ['INSTALLER_TYPE']
     pod_name = os.environ['NODE_NAME']
 
     params = {"project_name": "qtip", "case_name": case_name,
               "pod_name": pod_name, "installer": installer, "start_date": creation_date,
-              "version": "test" , "details": payload}
+              "version": "test", "details": payload}
 
     headers = {'Content-Type': 'application/json'}
     print pod_name
@@ -31,13 +35,15 @@ def push_results_to_db(db_url, case_name, payload,logger=None, pod_name="dell-po
         print "Error:", sys.exc_info()[0]
         return False
 
+
 def populate_payload(suite_list):
 
     global payload_list
-    for k,v in suite_list:
+    for k, v in suite_list:
+
+        if os.path.isfile('results/' + str(k)):
+            payload_list[k] = v
 
-        if os.path.isfile('results/'+str(k)):
-            payload_list[k]=v
 
 def main():
 
@@ -45,10 +51,10 @@ def main():
     populate_payload(suite_list)
     if payload_list:
         print payload_list
-        for suite,case in payload_list.items():
-            with open('results/'+suite,'r') as result_file:
-                j=json.load(result_file)
-            push_results_to_db(TEST_DB, case , j)
+        for suite, case in payload_list.items():
+            with open('results/' + suite, 'r') as result_file:
+                j = json.load(result_file)
+            push_results_to_db(TEST_DB, case, j)
     elif not payload_list:
         print 'Results not found'
 
index 9f2226c..cd20d57 100644 (file)
-from reportlab.pdfgen import canvas
 from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image
-from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
+from reportlab.lib.styles import getSampleStyleSheet
 from reportlab.lib.units import inch
 from reportlab.lib.pagesizes import letter
-from reportlab.platypus import ListFlowable, ListItem
-import qtip_graph  as graph
+import qtip_graph as graph
 import get_indices as results
 from get_results import report_concat
 from get_results import generate_result
 
-def dump_result(Stor,directory, testcase):
+
+def dump_result(Stor, directory, testcase):
     try:
-        lower_s=testcase.lower()
-        Stor.append(Paragraph(testcase,Style['h3']))
-        l1=report_concat(directory,lower_s)
-        l=1
+        lower_s = testcase.lower()
+        Stor.append(Paragraph(testcase, Style['h3']))
+        l1 = report_concat(directory, lower_s)
+        l = 1
         for a in l1:
-            Stor.append(Paragraph(testcase+" result_"+str(l),Style['h5']))
-            raw_string=generate_result(a,0)
-            replaced_string=raw_string.replace('\n', '<br/> ').replace(' ','&nbsp;')
-            Stor.append(Paragraph(replaced_string,Style['BodyText']))
-            l=l+1
+            Stor.append(Paragraph(testcase + " result_" + str(l), Style['h5']))
+            raw_string = generate_result(a, 0)
+            replaced_string = raw_string.replace('\n', '<br/> ').replace(' ', '&nbsp;')
+            Stor.append(Paragraph(replaced_string, Style['BodyText']))
+            l = l + 1
     except OSError:
         print "Results for {0} not found".format(testcase)
 
-doc = SimpleDocTemplate("../../results/QTIP_results.pdf",pagesize=letter,
-                        rightMargin=72,leftMargin=72,
-                        topMargin=72,bottomMargin=18)
-Stor=[]
-Style=getSampleStyleSheet()
-Title="QTIP Benchmark Suite"
-Stor.append(Paragraph(Title,Style['Title']))
-H1="Results"
-Stor.append(Spacer(0,36))
+doc = SimpleDocTemplate("../../results/QTIP_results.pdf", pagesize=letter,
+                        rightMargin=72, leftMargin=72,
+                        topMargin=72, bottomMargin=18)
+Stor = []
+Style = getSampleStyleSheet()
+Title = "QTIP Benchmark Suite"
+Stor.append(Paragraph(Title, Style['Title']))
+H1 = "Results"
+Stor.append(Spacer(0, 36))
 Stor.append(Paragraph(H1, Style['h2']))
-compute=0
-storage=0
-network=0
+compute = 0
+storage = 0
+network = 0
 try:
-    compute=results.get_index('compute_result')
+    compute = results.get_index('compute_result')
 except IOError:
     pass
 
 try:
-    storage=results.get_index('storage_result')
+    storage = results.get_index('storage_result')
 except IOError:
     pass
 try:
-    network=results.get_index('network_result')
+    network = results.get_index('network_result')
 except IOError:
     pass
 
-Stor.append(Paragraph("Compute Suite:   %f" %compute, Style['h5']))
-Stor.append(Paragraph("Storage Suite:   %f" %storage, Style['h5']))
-Stor.append(Paragraph("Network Suite:   %f" %network, Style['h5']))
-graph.plot_indices(compute,storage,network)
-qtip_graph=('qtip_graph.jpeg')
-im=Image(qtip_graph, 5*inch,4*inch)
+Stor.append(Paragraph("Compute Suite:   %f" % compute, Style['h5']))
+Stor.append(Paragraph("Storage Suite:   %f" % storage, Style['h5']))
+Stor.append(Paragraph("Network Suite:   %f" % network, Style['h5']))
+graph.plot_indices(compute, storage, network)
+qtip_graph = ('qtip_graph.jpeg')
+im = Image(qtip_graph, 5 * inch, 4 * inch)
 Stor.append(im)
 Stor.append(Spacer(0, 12))
 Stor.append(Paragraph("Reference POD", Style['h5']))
-ptext="The Dell OPNFV Lab POD3  has been taken as the reference POD against which the reference results have been collected. The POD consists of 6 identical servers. The details of such a server are:"
-Stor.append(Paragraph(ptext,Style['Normal']))
-ptext="<bullet>&bull;</bullet>Server Type: Dell PowerEdge R630 Server"
-Stor.append(Paragraph(ptext,Style['Bullet']))
-ptext="<bullet>&bull;</bullet>CPU: Intel  Xeon E5-2698 @ 2300 MHz"
+ptext = "The Dell OPNFV Lab POD3  has been taken as the reference POD against which the reference results have been collected. The POD consists of 6 identical servers. The details of such a server are:"
+Stor.append(Paragraph(ptext, Style['Normal']))
+ptext = "<bullet>&bull;</bullet>Server Type: Dell PowerEdge R630 Server"
+Stor.append(Paragraph(ptext, Style['Bullet']))
+ptext = "<bullet>&bull;</bullet>CPU: Intel  Xeon E5-2698 @ 2300 MHz"
 Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="<bullet>&bull;</bullet>RAM: 128GB"
+ptext = "<bullet>&bull;</bullet>RAM: 128GB"
 Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="<bullet>&bull;</bullet>Storage SSD: 420GB"
+ptext = "<bullet>&bull;</bullet>Storage SSD: 420GB"
 Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="<bullet>&bull;</bullet>Network Card: Intel 2P X520/2P I350 rNDC"
+ptext = "<bullet>&bull;</bullet>Network Card: Intel 2P X520/2P I350 rNDC"
 Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="Servers interconnected through a DELL S4810 switch using a 10Gbps physical link"
+ptext = "Servers interconnected through a DELL S4810 switch using a 10Gbps physical link"
 Stor.append(Paragraph(ptext, Style["Bullet"]))
 Stor.append(Spacer(0, 12))
-ptext="For Further  Details of the Reference POD hardware, please visit: https://wiki.opnfv.org/reference_pod_hardware_details"
-Stor.append(Paragraph(ptext,Style['Normal']))
+ptext = "For Further  Details of the Reference POD hardware, please visit: https://wiki.opnfv.org/reference_pod_hardware_details"
+Stor.append(Paragraph(ptext, Style['Normal']))
 Stor.append(Spacer(0, 12))
-ptext="For Details of the Reference POD Results,  please visit: https://wiki.opnfv.org/reference_pod_qtip_results"
+ptext = "For Details of the Reference POD Results,  please visit: https://wiki.opnfv.org/reference_pod_qtip_results"
 Stor.append(Spacer(0, 12))
-Stor.append(Paragraph(ptext,Style['Normal']))
+Stor.append(Paragraph(ptext, Style['Normal']))
 Stor.append(Paragraph("RAW Results", Style['h1']))
 Stor.append(Paragraph("Compute Results", Style['h2']))
 
-dump_result(Stor,"../../results/dhrystone/","Dhrystone_bm")
-dump_result(Stor,"../../results/dhrystone/","Dhrystone_vm")
-
-dump_result(Stor,"../../results/whetstone/","Whetstone_bm")
-dump_result(Stor,"../../results/whetstone/","Whetstone_vm")
+dump_result(Stor, "../../results/dhrystone/", "Dhrystone_bm")
+dump_result(Stor, "../../results/dhrystone/", "Dhrystone_vm")
 
-dump_result(Stor,"../../results/ramspeed/","Ramspeed_bm")
-dump_result(Stor,"../../results/ramspeed/","Ramspeed_vm")
+dump_result(Stor, "../../results/whetstone/", "Whetstone_bm")
+dump_result(Stor, "../../results/whetstone/", "Whetstone_vm")
 
-dump_result(Stor,"../../results/ssl/","SSL_bm")
-dump_result(Stor,"../../results/ssl/","SSL_vm")
+dump_result(Stor, "../../results/ramspeed/", "Ramspeed_bm")
+dump_result(Stor, "../../results/ramspeed/", "Ramspeed_vm")
 
-#dump_result(Stor,"../../results/dpi/","DPI_bm")
-#dump_result(Stor,"../../results/dpi/","DPI_vm")
+dump_result(Stor, "../../results/ssl/", "SSL_bm")
+dump_result(Stor, "../../results/ssl/", "SSL_vm")
 
 Stor.append(Paragraph("Network Results", Style['h2']))
-dump_result(Stor,"../../results/iperf/","IPERF_bm")
-dump_result(Stor,"../../results/iperf/","IPERF_vm")
-dump_result(Stor,"../../results/iperf/","IPERF_vm_2")
+dump_result(Stor, "../../results/iperf/", "IPERF_bm")
+dump_result(Stor, "../../results/iperf/", "IPERF_vm")
+dump_result(Stor, "../../results/iperf/", "IPERF_vm_2")
 
 Stor.append(Paragraph("Storage Results", Style['h2']))
-dump_result(Stor,"../../results/fio/","fio_bm")
-dump_result(Stor,"../../results/fio/","fio_vm")
+dump_result(Stor, "../../results/fio/", "fio_bm")
+dump_result(Stor, "../../results/fio/", "fio_vm")
 
 
 doc.build(Stor)
-#canvas.save()
index e23fdb8..91219c0 100644 (file)
@@ -1,8 +1,8 @@
 import json
 
+
 def get_index(suite):
-    with open ('../../results/'+suite+'.json') as result_file:
-         result_djson=json.load(result_file)
-         index=result_djson['index']
-        
+    with open('../../results/' + suite + '.json') as result_file:
+        result_djson = json.load(result_file)
+        index = result_djson['index']
     return index
index 01fb808..23fd538 100644 (file)
@@ -2,48 +2,49 @@ import os
 import json
 
 
-def report_concat (targ_dir, testcase):
-    machine_temp=[];
-    machines=[];
-    diction={};
+def report_concat(targ_dir, testcase):
+    machine_temp = []
+    machines = []
 
     for file in os.listdir(targ_dir):
         if file.endswith(".json"):
             machine_temp.append(file)
 
-    l=len(machine_temp)
+    l = len(machine_temp)
 
-    for x in range (0,l):
-        file_t=machine_temp[x]
-        with open (targ_dir+file_t) as result_file:
-            result_djson=json.load(result_file)
+    for x in range(0, l):
+        file_t = machine_temp[x]
+        with open(targ_dir + file_t) as result_file:
+            result_djson = json.load(result_file)
             if result_djson['1  Testcase Name'] == str(testcase):
                 machines.append(result_djson)
     return machines
 
+
 def space_count(l):
-    spc=''
+    spc = ''
     for x in range(l):
-        spc=spc+' '
+        spc = spc + ' '
     return spc
 
 
-def custom_dict(list1,list2,k):
-    string_1=''
-    for num_1 in range (0,len(list1)):
-        string_1=string_1+space_count(k)+str(list1[num_1][0])+"="+str(list2[num_1])+"\n"
+def custom_dict(list1, list2, k):
+    string_1 = ''
+    for num_1 in range(0, len(list1)):
+        string_1 = string_1 + space_count(k) + str(list1[num_1][0]) + "=" + str(list2[num_1]) + "\n"
     return string_1
 
-def generate_result(dict_a,k):
-    list_1=[]
-    list_2=[]
-    count=0
-    for i,j in sorted(dict_a.iteritems()):
+
+def generate_result(dict_a, k):
+    list_1 = []
+    list_2 = []
+    count = 0
+    for i, j in sorted(dict_a.iteritems()):
         list_1.append([])
         list_1[count].append(i)
         if (str(type(dict_a.get(i)))) == "<type 'dict'>":
-            list_2.append(str("\n"+generate_result(dict_a.get(i),int(k+1))))
+            list_2.append(str("\n" + generate_result(dict_a.get(i), int(k + 1))))
         else:
             list_2.append(dict_a.get(i))
-        count=count+1
-    return custom_dict(list_1,list_2,k)
+        count = count + 1
+    return custom_dict(list_1, list_2, k)
index d7e6414..acbda40 100644 (file)
@@ -1,29 +1,30 @@
 import matplotlib
-matplotlib.use('Agg')
 import matplotlib.pyplot as plt
 import numpy as np
 
-def plot_indices(a,b,c):
-    N=3
-    ind= np.arange(N)
-    y_axis = (a,b,c )
-    width=0.35
-    f=plt.figure()
-    ax=f.gca()
+matplotlib.use('Agg')
+
+
+def plot_indices(a, b, c):
+    N = 3
+    ind = np.arange(N)
+    y_axis = (a, b, c)
+    width = 0.35
+    f = plt.figure()
+    ax = f.gca()
     ax.set_autoscale_on(True)
-    my_bars=ax.bar(ind,y_axis,width, color='b')
+    my_bars = ax.bar(ind, y_axis, width, color='b')
     ax.set_ylabel('Index Score*')
     ax.set_xlabel('Suite')
     ax.set_title(' QTIP benchmark scores')
     ax.axis('on')
-    my_bars=ax.bar(ind,y_axis,width)
-    ax.set_xticks(ind+width/2)
-    ax.set_xticklabels(['Compute','Storage','Network'])
-    ax.axis([0,3,0,1.25])
-    f.text(0.7,0.01,'* With Comparison to Refernece POD', fontsize=9)
+    my_bars = ax.bar(ind, y_axis, width)
+    ax.set_xticks(ind + width / 2)
+    ax.set_xticklabels(['Compute', 'Storage', 'Network'])
+    ax.axis([0, 3, 0, 1.25])
+    f.text(0.7, 0.01, '* With Comparison to Refernece POD', fontsize=9)
 
     for rect in my_bars:
         height = rect.get_height()
-        ax.text(rect.get_x() + rect.get_width()/2., 1.05*height, height , ha='center', va='bottom')
-
+        ax.text(rect.get_x() + rect.get_width() / 2., 1.05 * height, height, ha='center', va='bottom')
     f.savefig('qtip_graph.jpeg')