diff --git a/tests/ten/ten_per_002/run.py b/tests/ten/ten_per_002/run.py index 92b3ec09..c6685bc9 100644 --- a/tests/ten/ten_per_002/run.py +++ b/tests/ten/ten_per_002/run.py @@ -1,4 +1,4 @@ -import os, time, re +import os, time from datetime import datetime from collections import OrderedDict from pysys.constants import PASSED diff --git a/tests/ten/ten_per_003/run.py b/tests/ten/ten_per_003/run.py index d2e07f91..b2679390 100644 --- a/tests/ten/ten_per_003/run.py +++ b/tests/ten/ten_per_003/run.py @@ -1,4 +1,4 @@ -import os, time, re +import os, time from datetime import datetime from collections import OrderedDict from pysys.constants import PASSED diff --git a/tests/ten/ten_per_004/run.py b/tests/ten/ten_per_004/run.py index a4ba0d84..782be672 100644 --- a/tests/ten/ten_per_004/run.py +++ b/tests/ten/ten_per_004/run.py @@ -1,4 +1,4 @@ -import os, time, re +import os, time from datetime import datetime from collections import OrderedDict from pysys.constants import PASSED diff --git a/tests/ten/ten_per_005/run.py b/tests/ten/ten_per_005/run.py index 8330bff8..0a5288a7 100644 --- a/tests/ten/ten_per_005/run.py +++ b/tests/ten/ten_per_005/run.py @@ -1,4 +1,4 @@ -import os, time, re +import os, time from datetime import datetime from collections import OrderedDict from pysys.constants import PASSED diff --git a/tests/ten/ten_per_006/run.py b/tests/ten/ten_per_006/run.py index 33d162e4..db2245d1 100644 --- a/tests/ten/ten_per_006/run.py +++ b/tests/ten/ten_per_006/run.py @@ -1,8 +1,8 @@ -import os, time, sys, re +import os, time, sys import numpy as np from datetime import datetime from collections import OrderedDict -from pysys.constants import PASSED, FAILED +from pysys.constants import PASSED from ten.test.basetest import TenNetworkTest from ten.test.utils.gnuplot import GnuplotHelper diff --git a/tests/ten/ten_per_010/run.py b/tests/ten/ten_per_010/run.py index f4aa03e6..2607406e 100644 --- a/tests/ten/ten_per_010/run.py +++ b/tests/ten/ten_per_010/run.py @@ -29,16 +29,14 @@ def execute(self): self.run_client('client_%s' % i, network, self.ITERATIONS, start_ns, out_dir, signal) with open(signal, 'w') as sig: sig.write('go') - txs_sent = 0 for i in range(0, clients): stdout = os.path.join(out_dir, 'client_%s.out' % i) self.waitForGrep(file=stdout, expr='Client client_%s completed' % i, timeout=300) self.ratio_failures(file=stdout) - txs_sent += self.txs_sent(stdout) end_ns = time.perf_counter_ns() - bulk_throughput = float(txs_sent) / float((end_ns-start_ns)/1e9) - avg_latency, mode_latency = self.process_latency(clients, out_dir) + num_data, avg_latency, mode_latency = self.process_latency(clients, out_dir) + bulk_throughput = float(num_data) / float((end_ns-start_ns)/1e9) throughput = self.process_throughput(clients, out_dir, start_ns, end_ns) self.log.info('Bulk rate throughput %.2f (requests/sec)' % bulk_throughput) self.log.info('Approx. throughput %.2f (requests/sec)' % throughput) @@ -87,7 +85,8 @@ def process_latency(self, num_clients, out_dir): with open(os.path.join(out_dir, 'client_%s_latency.log' % i), 'r') as fp: for line in fp.readlines(): data.append(float(line.strip())) data.sort() - avg_latency = (sum(data) / len(data)) + num_data = len(data) + avg_latency = (sum(data) / num_data) bins = self.bin_array(data) max_value = 0 @@ -99,7 +98,7 @@ def process_latency(self, num_clients, out_dir): mode_latency = b fp.write('%.2f %d\n' % (b, v)) fp.flush() - return avg_latency, mode_latency + return num_data, avg_latency, mode_latency def process_throughput(self, num_clients, out_dir, start, end): client_bins = [] # bins for a given client diff --git a/tests/ten/ten_per_011/run.py b/tests/ten/ten_per_011/run.py index 22b73364..b250dd73 100644 --- a/tests/ten/ten_per_011/run.py +++ b/tests/ten/ten_per_011/run.py @@ -44,19 +44,17 @@ def execute(self): self.run_client('client_%s' % i, network, self.ITERATIONS, storage, start_ns, out_dir, signal) with open(signal, 'w') as sig: sig.write('go') - txs_sent = 0 for i in range(0, clients): stdout = os.path.join(out_dir, 'client_%s.out' % i) self.waitForGrep(file=stdout, expr='Client client_%s completed' % i, timeout=300) self.ratio_failures(file=stdout) - txs_sent += self.txs_sent(stdout) # stop transacting to set the storage value hprocess.stop() end_ns = time.perf_counter_ns() - bulk_throughput = float(txs_sent) / float((end_ns - start_ns) / 1e9) - avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir) + num_data, avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir) + bulk_throughput = float(num_data) / float((end_ns - start_ns) / 1e9) throughput = self.process_throughput(clients, out_dir, start_ns, end_ns) self.log.info('Bulk rate throughput %.2f (requests/sec)' % bulk_throughput) self.log.info('Approx. throughput %.2f (requests/sec)' % throughput) @@ -128,7 +126,8 @@ def process_latency(self, num_clients, out_dir): with open(os.path.join(out_dir, 'client_%s_latency.log' % i), 'r') as fp: for line in fp.readlines(): data.append(float(line.strip())) data.sort() - avg_latency = (sum(data) / len(data)) + num_data = len(data) + avg_latency = (sum(data) / num_data) nnth_percentile = np.percentile(data, 99) bins = self.bin_array(data) @@ -141,7 +140,7 @@ def process_latency(self, num_clients, out_dir): mode_latency = b fp.write('%.2f %d\n' % (b, v)) fp.flush() - return avg_latency, mode_latency, nnth_percentile + return num_data, avg_latency, mode_latency, nnth_percentile def process_throughput(self, num_clients, out_dir, start, end): client_bins = [] # bins for a given client diff --git a/tests/ten/ten_per_012/run.py b/tests/ten/ten_per_012/run.py index 2a01a1d5..52f5ba45 100644 --- a/tests/ten/ten_per_012/run.py +++ b/tests/ten/ten_per_012/run.py @@ -44,19 +44,17 @@ def execute(self): self.run_client('client_%s' % i, network, self.ITERATIONS, storage, funds_needed, start_ns, out_dir, signal) with open(signal, 'w') as sig: sig.write('go') - txs_sent = 0 for i in range(0, clients): stdout = os.path.join(out_dir, 'client_%s.out' % i) self.waitForGrep(file=stdout, expr='Client client_%s completed' % i, timeout=600) self.ratio_failures(file=stdout) - txs_sent += self.txs_sent(stdout) # stop transacting to set the storage value hprocess.stop() end_ns = time.perf_counter_ns() - bulk_throughput = float(txs_sent) / float((end_ns - start_ns) / 1e9) - avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir) + num_data, avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir) + bulk_throughput = float(num_data) / float((end_ns - start_ns) / 1e9) throughput = self.process_throughput(clients, out_dir, start_ns, end_ns) self.log.info('Bulk rate throughput %.2f (requests/sec)' % bulk_throughput) self.log.info('Approx. throughput %.2f (requests/sec)' % throughput) @@ -128,7 +126,8 @@ def process_latency(self, num_clients, out_dir): with open(os.path.join(out_dir, 'client_%s_latency.log' % i), 'r') as fp: for line in fp.readlines(): data.append(float(line.strip())) data.sort() - avg_latency = (sum(data) / len(data)) + num_data = len(data) + avg_latency = (sum(data) / num_data) nnth_percentile = np.percentile(data, 99) bins = self.bin_array(data) @@ -141,7 +140,7 @@ def process_latency(self, num_clients, out_dir): mode_latency = b fp.write('%.2f %d\n' % (b, v)) fp.flush() - return avg_latency, mode_latency, nnth_percentile + return num_data, avg_latency, mode_latency, nnth_percentile def process_throughput(self, num_clients, out_dir, start, end): client_bins = [] # bins for a given client