Skip to content

Commit

Permalink
Correct num data points
Browse files Browse the repository at this point in the history
  • Loading branch information
moraygrieve committed Dec 11, 2024
1 parent 228a11f commit 5efadf1
Show file tree
Hide file tree
Showing 8 changed files with 21 additions and 24 deletions.
2 changes: 1 addition & 1 deletion tests/ten/ten_per_002/run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os, time, re
import os, time
from datetime import datetime
from collections import OrderedDict
from pysys.constants import PASSED
Expand Down
2 changes: 1 addition & 1 deletion tests/ten/ten_per_003/run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os, time, re
import os, time
from datetime import datetime
from collections import OrderedDict
from pysys.constants import PASSED
Expand Down
2 changes: 1 addition & 1 deletion tests/ten/ten_per_004/run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os, time, re
import os, time
from datetime import datetime
from collections import OrderedDict
from pysys.constants import PASSED
Expand Down
2 changes: 1 addition & 1 deletion tests/ten/ten_per_005/run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os, time, re
import os, time
from datetime import datetime
from collections import OrderedDict
from pysys.constants import PASSED
Expand Down
4 changes: 2 additions & 2 deletions tests/ten/ten_per_006/run.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import os, time, sys, re
import os, time, sys
import numpy as np
from datetime import datetime
from collections import OrderedDict
from pysys.constants import PASSED, FAILED
from pysys.constants import PASSED
from ten.test.basetest import TenNetworkTest
from ten.test.utils.gnuplot import GnuplotHelper

Expand Down
11 changes: 5 additions & 6 deletions tests/ten/ten_per_010/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,14 @@ def execute(self):
self.run_client('client_%s' % i, network, self.ITERATIONS, start_ns, out_dir, signal)

with open(signal, 'w') as sig: sig.write('go')
txs_sent = 0
for i in range(0, clients):
stdout = os.path.join(out_dir, 'client_%s.out' % i)
self.waitForGrep(file=stdout, expr='Client client_%s completed' % i, timeout=300)
self.ratio_failures(file=stdout)
txs_sent += self.txs_sent(stdout)

end_ns = time.perf_counter_ns()
bulk_throughput = float(txs_sent) / float((end_ns-start_ns)/1e9)
avg_latency, mode_latency = self.process_latency(clients, out_dir)
num_data, avg_latency, mode_latency = self.process_latency(clients, out_dir)
bulk_throughput = float(num_data) / float((end_ns-start_ns)/1e9)
throughput = self.process_throughput(clients, out_dir, start_ns, end_ns)
self.log.info('Bulk rate throughput %.2f (requests/sec)' % bulk_throughput)
self.log.info('Approx. throughput %.2f (requests/sec)' % throughput)
Expand Down Expand Up @@ -87,7 +85,8 @@ def process_latency(self, num_clients, out_dir):
with open(os.path.join(out_dir, 'client_%s_latency.log' % i), 'r') as fp:
for line in fp.readlines(): data.append(float(line.strip()))
data.sort()
avg_latency = (sum(data) / len(data))
num_data = len(data)
avg_latency = (sum(data) / num_data)

bins = self.bin_array(data)
max_value = 0
Expand All @@ -99,7 +98,7 @@ def process_latency(self, num_clients, out_dir):
mode_latency = b
fp.write('%.2f %d\n' % (b, v))
fp.flush()
return avg_latency, mode_latency
return num_data, avg_latency, mode_latency

def process_throughput(self, num_clients, out_dir, start, end):
client_bins = [] # bins for a given client
Expand Down
11 changes: 5 additions & 6 deletions tests/ten/ten_per_011/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,17 @@ def execute(self):
self.run_client('client_%s' % i, network, self.ITERATIONS, storage, start_ns, out_dir, signal)

with open(signal, 'w') as sig: sig.write('go')
txs_sent = 0
for i in range(0, clients):
stdout = os.path.join(out_dir, 'client_%s.out' % i)
self.waitForGrep(file=stdout, expr='Client client_%s completed' % i, timeout=300)
self.ratio_failures(file=stdout)
txs_sent += self.txs_sent(stdout)

# stop transacting to set the storage value
hprocess.stop()

end_ns = time.perf_counter_ns()
bulk_throughput = float(txs_sent) / float((end_ns - start_ns) / 1e9)
avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir)
num_data, avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir)
bulk_throughput = float(num_data) / float((end_ns - start_ns) / 1e9)
throughput = self.process_throughput(clients, out_dir, start_ns, end_ns)
self.log.info('Bulk rate throughput %.2f (requests/sec)' % bulk_throughput)
self.log.info('Approx. throughput %.2f (requests/sec)' % throughput)
Expand Down Expand Up @@ -128,7 +126,8 @@ def process_latency(self, num_clients, out_dir):
with open(os.path.join(out_dir, 'client_%s_latency.log' % i), 'r') as fp:
for line in fp.readlines(): data.append(float(line.strip()))
data.sort()
avg_latency = (sum(data) / len(data))
num_data = len(data)
avg_latency = (sum(data) / num_data)
nnth_percentile = np.percentile(data, 99)

bins = self.bin_array(data)
Expand All @@ -141,7 +140,7 @@ def process_latency(self, num_clients, out_dir):
mode_latency = b
fp.write('%.2f %d\n' % (b, v))
fp.flush()
return avg_latency, mode_latency, nnth_percentile
return num_data, avg_latency, mode_latency, nnth_percentile

def process_throughput(self, num_clients, out_dir, start, end):
client_bins = [] # bins for a given client
Expand Down
11 changes: 5 additions & 6 deletions tests/ten/ten_per_012/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,17 @@ def execute(self):
self.run_client('client_%s' % i, network, self.ITERATIONS, storage, funds_needed, start_ns, out_dir, signal)

with open(signal, 'w') as sig: sig.write('go')
txs_sent = 0
for i in range(0, clients):
stdout = os.path.join(out_dir, 'client_%s.out' % i)
self.waitForGrep(file=stdout, expr='Client client_%s completed' % i, timeout=600)
self.ratio_failures(file=stdout)
txs_sent += self.txs_sent(stdout)

# stop transacting to set the storage value
hprocess.stop()

end_ns = time.perf_counter_ns()
bulk_throughput = float(txs_sent) / float((end_ns - start_ns) / 1e9)
avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir)
num_data, avg_latency, mode_latency, nnth_percentile = self.process_latency(clients, out_dir)
bulk_throughput = float(num_data) / float((end_ns - start_ns) / 1e9)
throughput = self.process_throughput(clients, out_dir, start_ns, end_ns)
self.log.info('Bulk rate throughput %.2f (requests/sec)' % bulk_throughput)
self.log.info('Approx. throughput %.2f (requests/sec)' % throughput)
Expand Down Expand Up @@ -128,7 +126,8 @@ def process_latency(self, num_clients, out_dir):
with open(os.path.join(out_dir, 'client_%s_latency.log' % i), 'r') as fp:
for line in fp.readlines(): data.append(float(line.strip()))
data.sort()
avg_latency = (sum(data) / len(data))
num_data = len(data)
avg_latency = (sum(data) / num_data)
nnth_percentile = np.percentile(data, 99)

bins = self.bin_array(data)
Expand All @@ -141,7 +140,7 @@ def process_latency(self, num_clients, out_dir):
mode_latency = b
fp.write('%.2f %d\n' % (b, v))
fp.flush()
return avg_latency, mode_latency, nnth_percentile
return num_data, avg_latency, mode_latency, nnth_percentile

def process_throughput(self, num_clients, out_dir, start, end):
client_bins = [] # bins for a given client
Expand Down

0 comments on commit 5efadf1

Please sign in to comment.