Skip to content

Commit b4e6d74

Browse files
author
Alberto Sonnino
committed
Merge branch 'optimistic-sync' into main
2 parents 12e485f + 6455c7b commit b4e6d74

41 files changed

Lines changed: 811 additions & 721 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

benchmark/aws/instance.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ def create_instances(self, instances):
163163
client.run_instances(
164164
ImageId=self._get_ami(client),
165165
InstanceType=self.settings.instance_type,
166-
KeyName='aws',
166+
KeyName=self.settings.key_name,
167167
MaxCount=instances,
168168
MinCount=instances,
169169
SecurityGroups=[self.SECURITY_GROUP_NAME],

benchmark/aws/remote.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -75,12 +75,11 @@ def install(self):
7575
hosts = self.manager.hosts(flat=True)
7676
try:
7777
g = Group(*hosts, user='ubuntu', connect_kwargs=self.connect)
78-
output = g.run(' && '.join(cmd), hide=True)
79-
self._check_stderr(output)
78+
g.run(' && '.join(cmd), hide=True)
8079
Print.heading(f'Initialized testbed of {len(hosts)} nodes')
81-
except GroupException as e:
82-
error = FabricError(e)
83-
raise BenchError('Failed to install repo on testbed', error)
80+
except (GroupException, ExecutionError) as e:
81+
e = FabricError(e) if isinstance(e, GroupException) else e
82+
raise BenchError('Failed to install repo on testbed', e)
8483

8584
def kill(self, hosts=[], delete_logs=False):
8685
assert isinstance(hosts, list)

benchmark/aws/settings.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,22 @@ class SettingsError(Exception):
66

77

88
class Settings:
9-
def __init__(self, key_path, consensus_port, mempool_port, front_port, repo_name,
9+
def __init__(self, key_name, key_path, consensus_port, mempool_port, front_port, repo_name,
1010
repo_url, branch, instance_type, aws_regions):
11-
regions = aws_regions if isinstance(aws_regions, list) else [aws_regions]
12-
inputs_str = [key_path, repo_name, repo_url, branch, instance_type] + regions
11+
regions = aws_regions if isinstance(
12+
aws_regions, list) else [aws_regions]
13+
inputs_str = [
14+
key_name, key_path, repo_name, repo_url, branch, instance_type
15+
]
16+
inputs_str += regions
1317
inputs_int = [consensus_port, mempool_port, front_port]
1418
ok = all(isinstance(x, str) for x in inputs_str)
1519
ok &= all(isinstance(x, int) for x in inputs_int)
1620
ok &= len(regions) > 0
1721
if not ok:
1822
raise SettingsError('Invalid settings types')
1923

24+
self.key_name = key_name
2025
self.key_path = key_path
2126

2227
self.consensus_port = consensus_port
@@ -37,7 +42,8 @@ def load(cls, filename):
3742
data = load(f)
3843

3944
return cls(
40-
data['key_path'],
45+
data['key']['name'],
46+
data['key']['path'],
4147
data['ports']['consensus'],
4248
data['ports']['mempool'],
4349
data['ports']['front'],
@@ -51,4 +57,4 @@ def load(cls, filename):
5157
raise SettingsError(str(e))
5258

5359
except KeyError as e:
54-
raise SettingsError(f'Malformed settings: missing key {e}')
60+
raise SettingsError(f'Malformed settings: missing key {e}')

benchmark/benchmark/aggregator.py

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,14 @@ def __init__(self, nodes, rate, tx_size):
1414
self.nodes = nodes
1515
self.rate = rate
1616
self.tx_size = tx_size
17+
self.max_latency = 'any'
1718

1819
def __str__(self):
1920
return (
2021
f' Committee size: {self.nodes} nodes\n'
21-
f' Input rate: {self.rate} txs\n'
22+
f' Input rate: {self.rate} tx/s\n'
2223
f' Transaction size: {self.tx_size} B\n'
24+
f' Max latency: {self.max_latency} ms\n'
2325
)
2426

2527
def __eq__(self, other):
@@ -104,7 +106,10 @@ def print(self):
104106
'-----------------------------------------\n'
105107
)
106108
filename = PathMaker.agg_file(
107-
setup.nodes, setup.rate, setup.tx_size
109+
setup.nodes,
110+
setup.rate,
111+
setup.tx_size,
112+
max_latency=setup.max_latency
108113
)
109114
with open(filename, 'w') as f:
110115
f.write(string)
@@ -123,23 +128,26 @@ def _print_latency(self):
123128

124129
return organized
125130

126-
def _print_tps(self, max_latency=4000):
131+
def _print_tps(self, max_latencies=[2_000, 5_000]):
127132
records = deepcopy(self.records)
128133
organized = defaultdict(list)
129-
for setup, result in records.items():
130-
if result.mean_latency <= max_latency:
131-
nodes = setup.nodes
132-
setup.nodes = 'x'
133-
setup.rate = 'any'
134-
135-
new_point = all(nodes != x[0] for x in organized[setup])
136-
highest_tps = False
137-
for w, r in organized[setup]:
138-
if result.mean_tps > r.mean_tps and nodes == w:
139-
organized[setup].remove((w, r))
140-
highest_tps = True
141-
if new_point or highest_tps:
142-
organized[setup] += [(nodes, result)]
134+
for max_latency in max_latencies:
135+
for setup, result in records.items():
136+
setup = deepcopy(setup)
137+
if result.mean_latency <= max_latency:
138+
nodes = setup.nodes
139+
setup.nodes = 'x'
140+
setup.rate = 'any'
141+
setup.max_latency = max_latency
142+
143+
new_point = all(nodes != x[0] for x in organized[setup])
144+
highest_tps = False
145+
for w, r in organized[setup]:
146+
if result.mean_tps > r.mean_tps and nodes == w:
147+
organized[setup].remove((w, r))
148+
highest_tps = True
149+
if new_point or highest_tps:
150+
organized[setup] += [(nodes, result)]
143151

144152
[v.sort(key=lambda x: x[0]) for v in organized.values()]
145153
return organized

benchmark/benchmark/config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,7 @@ def __init__(self, json):
9595
inputs += [json['consensus']['max_payload_size']]
9696
inputs += [json['consensus']['min_block_delay']]
9797
inputs += [json['mempool']['queue_capacity']]
98+
inputs += [json['consensus']['sync_retry_delay']]
9899
inputs += [json['mempool']['max_payload_size']]
99100
inputs += [json['mempool']['min_block_delay']]
100101
except KeyError as e:

benchmark/benchmark/logs.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
11
from datetime import datetime
22
from glob import glob
3-
from itertools import repeat
43
from multiprocessing import Pool
54
from os.path import join
65
from re import findall, search
7-
from statistics import mean, median_grouped, stdev
6+
from statistics import mean
87

98
from benchmark.utils import Print
109

benchmark/benchmark/plot.py

Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from re import findall, search
1+
from re import findall, search, split
22
import matplotlib.pyplot as plt
33
from matplotlib.ticker import StrMethodFormatter
44
from glob import glob
@@ -15,6 +15,7 @@ def __init__(self, filenames):
1515
if not filenames:
1616
raise PlotError('No data to plot')
1717

18+
filenames.sort(key=self._natural_keys)
1819
self.results = []
1920
try:
2021
for filename in filenames:
@@ -23,6 +24,10 @@ def __init__(self, filenames):
2324
except OSError as e:
2425
raise PlotError(f'Failed to load log files: {e}')
2526

27+
def _natural_keys(self, text):
28+
def try_cast(text): return int(text) if text.isdigit() else text
29+
return [try_cast(c) for c in split('(\d+)', text)]
30+
2631
def _tps(self, data):
2732
values = findall(r' TPS: (\d+) \+/- (\d+)', data)
2833
values = [(int(x), int(y)) for x, y in values]
@@ -46,7 +51,7 @@ def _bps2tps(self, x):
4651
size = int(search(r'Transaction size: (\d+)', data).group(1))
4752
return x * 10**6 / size
4853

49-
def _plot(self, x_label, y_label, y_axis, z_axis, filename):
54+
def _plot(self, x_label, y_label, y_axis, z_axis, type):
5055
plt.figure()
5156
for result in self.results:
5257
y_values, y_err = y_axis(result)
@@ -58,12 +63,14 @@ def _plot(self, x_label, y_label, y_axis, z_axis, filename):
5863
x_values, y_values, yerr=y_err, # uplims=True, lolims=True,
5964
marker='o', label=z_axis(result), linestyle='dotted'
6065
)
66+
# if type == 'latency':
67+
# plt.yscale('log')
6168

6269
plt.xlim(xmin=0)
6370
plt.ylim(bottom=0)
6471
plt.xlabel(x_label)
6572
plt.ylabel(y_label[0])
66-
plt.legend(loc='upper right')
73+
plt.legend(loc='upper left')
6774
ax = plt.gca()
6875
ax.xaxis.set_major_formatter(StrMethodFormatter('{x:,.0f}'))
6976
ax.yaxis.set_major_formatter(StrMethodFormatter('{x:,.0f}'))
@@ -75,7 +82,7 @@ def _plot(self, x_label, y_label, y_axis, z_axis, filename):
7582
secaxy.yaxis.set_major_formatter(StrMethodFormatter('{x:,.0f}'))
7683

7784
for x in ['pdf', 'png']:
78-
plt.savefig(PathMaker.plot_file(filename, x), bbox_inches='tight')
85+
plt.savefig(PathMaker.plot_file(type, x), bbox_inches='tight')
7986

8087
@staticmethod
8188
def nodes(data):
@@ -86,13 +93,18 @@ def nodes(data):
8693
def tx_size(data):
8794
return search(r'Transaction size: .*', data).group(0)
8895

96+
@staticmethod
97+
def max_latency(data):
98+
x = search(r'Max latency: (\d+)', data).group(1)
99+
return f'Max latency: {float(x) / 1000:,.0f} s'
100+
89101
@classmethod
90102
def plot_robustness(cls, z_axis):
91103
assert hasattr(z_axis, '__call__')
92104
x_label = 'Input rate (tx/s)'
93105
y_label = ['Throughput (tx/s)', 'Throughput (MB/s)']
94106

95-
files = glob(PathMaker.agg_file(r'[0-9]*', 'x', r'*'))
107+
files = glob(PathMaker.agg_file(r'[0-9]*', 'x', r'*', 'any'))
96108
ploter = cls(files)
97109
ploter._plot(x_label, y_label, ploter._tps, z_axis, 'robustness')
98110

@@ -102,7 +114,7 @@ def plot_latency(cls, z_axis):
102114
x_label = 'Throughput (tx/s)'
103115
y_label = ['Latency (ms)']
104116

105-
files = glob(PathMaker.agg_file(r'[0-9]*', 'any', r'*'))
117+
files = glob(PathMaker.agg_file(r'[0-9]*', 'any', r'*', 'any'))
106118
ploter = cls(files)
107119
ploter._plot(x_label, y_label, ploter._latency, z_axis, 'latency')
108120

@@ -112,6 +124,6 @@ def plot_tps(cls, z_axis):
112124
x_label = 'Committee size'
113125
y_label = ['Throughput (tx/s)', 'Throughput (MB/s)']
114126

115-
files = glob(PathMaker.agg_file('x', 'any', r'*'))
127+
files = glob(PathMaker.agg_file('x', 'any', r'*', r'*'))
116128
ploter = cls(files)
117129
ploter._plot(x_label, y_label, ploter._tps, z_axis, 'tps')

benchmark/benchmark/utils.py

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
from os.path import join
2+
3+
14
class BenchError(Exception):
25
def __init__(self, message, error):
36
assert isinstance(error, Exception)
@@ -9,11 +12,11 @@ def __init__(self, message, error):
912
class PathMaker:
1013
@staticmethod
1114
def binary_path():
12-
return '../target/release/'
15+
return join('..', 'target', 'release')
1316

1417
@staticmethod
1518
def node_crate_path():
16-
return '../node'
19+
return join('..', 'node')
1720

1821
@staticmethod
1922
def committee_file():
@@ -35,37 +38,42 @@ def db_path(i):
3538

3639
@staticmethod
3740
def logs_path():
38-
return './logs'
41+
return 'logs'
3942

4043
@staticmethod
4144
def node_log_file(i):
4245
assert isinstance(i, int) and i >= 0
43-
return f'{PathMaker.logs_path()}/node-{i}.log'
46+
return join(PathMaker.logs_path(), f'node-{i}.log')
4447

4548
@staticmethod
4649
def client_log_file(i):
4750
assert isinstance(i, int) and i >= 0
48-
return f'{PathMaker.logs_path()}/client-{i}.log'
51+
return join(PathMaker.logs_path(), f'client-{i}.log')
4952

5053
@staticmethod
5154
def results_path():
52-
return './results'
55+
return 'results'
5356

5457
@staticmethod
5558
def result_file(nodes, rate, tx_size):
56-
return f'{PathMaker.results_path()}/bench-{nodes}-{rate}-{tx_size}.txt'
59+
return join(
60+
PathMaker.results_path(), f'bench-{nodes}-{rate}-{tx_size}.txt'
61+
)
5762

5863
@staticmethod
5964
def plots_path():
60-
return './plots'
65+
return 'plots'
6166

6267
@staticmethod
63-
def agg_file(nodes, rate, tx_size):
64-
return f'{PathMaker.plots_path()}/agg-{nodes}-{rate}-{tx_size}.txt'
68+
def agg_file(nodes, rate, tx_size, max_latency):
69+
return join(
70+
PathMaker.plots_path(),
71+
f'agg-{nodes}-{rate}-{tx_size}-{max_latency}.txt'
72+
)
6573

6674
@staticmethod
6775
def plot_file(name, ext):
68-
return f'{PathMaker.plots_path()}/{name}.{ext}'
76+
return join(PathMaker.plots_path(), f'{name}.{ext}')
6977

7078

7179
class Color:
@@ -110,6 +118,7 @@ def error(e):
110118

111119
def progress_bar(iterable, prefix='', suffix='', decimals=1, length=30, fill='█', print_end='\r'):
112120
total = len(iterable)
121+
113122
def printProgressBar(iteration):
114123
formatter = '{0:.'+str(decimals)+'f}'
115124
percent = formatter.format(100 * (iteration / float(total)))

0 commit comments

Comments
 (0)