Skip to content

Commit

Permalink
fix(mocknet): various improvements (#3017)
Browse files Browse the repository at this point in the history
* Support for sharded mocknet (these machine names have the `sharded-` prefix)
* Single function for downloading logs from all nodes (helpful in gathering logs for debugging issues)
* Use a single block hash for all transactions in the load test instead of constantly pinging the status endpoint (this reduces the load on the node which is not related to receiving and processing transactions, which is the core functionality we are benchmarking)
* Ensure `Metrics` can handle the case where `near_block_processing_time` is missing (which occasionally happens for some reason)
  • Loading branch information
birchmd authored Jul 22, 2020
1 parent 4ff207c commit 04649e3
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 40 deletions.
24 changes: 17 additions & 7 deletions pytest/lib/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
import requests
import time

BLOCK_TIME_BINS = [
'0.005', '0.01', '0.025', '0.05', '0.1', '0.25', '0.5', '1', '2.5', '5',
'10', '+Inf'
]


def fold(collection, key, f, default):
if key in collection:
Expand Down Expand Up @@ -39,14 +44,19 @@ def from_url(cls, metrics_url):
total_transactions = fold_sample('near_transaction_processed')
blocks_per_second = fold_sample('near_blocks_per_minute') / 60.0

block_processing_time_samples = prometheus_metrics[
'near_block_processing_time'].samples
def extract_block_processing_time(m):
block_processing_time_samples = m.samples
block_processing_time = {}
for sample in block_processing_time_samples:
if 'le' in sample.labels:
bound = sample.labels['le']
block_processing_time[f'le {bound}'] = int(sample.value)
return block_processing_time

block_processing_time = {}
for sample in block_processing_time_samples:
if 'le' in sample.labels:
bound = sample.labels['le']
block_processing_time[f'le {bound}'] = int(sample.value)
block_processing_time = fold(
prometheus_metrics, 'near_block_processing_time',
extract_block_processing_time,
dict(map(lambda bin: ('le ' + bin, 0), BLOCK_TIME_BINS)))

return cls(total_blocks, memory_usage, total_transactions,
block_processing_time, timestamp, blocks_per_second)
Expand Down
17 changes: 13 additions & 4 deletions pytest/lib/mocknet.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,16 @@
'''


def get_node(i):
n = GCloudNode(f'{NODE_BASE_NAME}{i}')
# set prefix = 'sharded-' to access sharded mocknet nodes
def get_node(i, prefix=''):
n = GCloudNode(f'{prefix}{NODE_BASE_NAME}{i}')
n.machine.username = NODE_USERNAME
n.machine.ssh_key_path = NODE_SSH_KEY_PATH
return n


def get_nodes():
return pmap(get_node, range(NUM_NODES))
def get_nodes(prefix=''):
return pmap(lambda i: get_node(i, prefix=prefix), range(NUM_NODES))


def create_target_dir(machine):
Expand Down Expand Up @@ -115,6 +116,14 @@ def start_load_test_helpers(nodes):
nodes)


def get_log(node):
m = node.machine
target_file = f'./logs/{m.name}.log'
m.download(f'/home/ubuntu/near.log', target_file)

def get_logs(nodes):
pmap(get_log, nodes)

def get_epoch_length_in_blocks(node):
m = node.machine
target_dir = create_target_dir(m)
Expand Down
55 changes: 26 additions & 29 deletions pytest/tests/mocknet/load_testing_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,80 +69,75 @@ def get_latest_block_hash():
return base58.b58decode(last_block_hash.encode('utf8'))


def send_transfer(source_account, dest_index):
def send_transfer(source_account, dest_index, base_block_hash):
alice = source_account
bob = load_testing_account_id(dest_index)
alice_nonce = get_nonce_for_pk(alice.account_id, alice.pk)
last_block_hash = get_latest_block_hash()
tranfer_amount = 100
tx = sign_payment_tx(alice, bob, tranfer_amount, alice_nonce + 1,
last_block_hash)
base_block_hash)
send_tx(tx)


def deploy_contract(source_account):
last_block_hash = get_latest_block_hash()
def deploy_contract(source_account, base_block_hash):
nonce = get_nonce_for_pk(source_account.account_id, source_account.pk)
wasm_binary = utils.load_binary_file(WASM_FILENAME)
tx = sign_deploy_contract_tx(source_account, wasm_binary, nonce + 1,
last_block_hash)
base_block_hash)
send_tx(tx)


def call_contract(source_account):
last_block_hash = get_latest_block_hash()
def call_contract(source_account, base_block_hash):
nonce = get_nonce_for_pk(source_account.account_id, source_account.pk)
tx = sign_function_call_tx(source_account, source_account.account_id,
'do_work', [], 300000000000000, 0, nonce + 1,
last_block_hash)
base_block_hash)
send_tx(tx)


def create_account(source_account):
last_block_hash = get_latest_block_hash()
def create_account(source_account, base_block_hash):
nonce = get_nonce_for_pk(source_account.account_id, source_account.pk)
new_account_id = ''.join(
random.choice(string.ascii_lowercase) for _ in range(0, 10))
new_key = Key(new_account_id, source_account.pk, source_account.sk)
tx = sign_create_account_with_full_access_key_and_balance_tx(
source_account, new_account_id, new_key, 100, nonce + 1,
last_block_hash)
base_block_hash)
send_tx(tx)


def stake(source_account):
last_block_hash = get_latest_block_hash()
def stake(source_account, base_block_hash):
nonce = get_nonce_for_pk(source_account.account_id, source_account.pk)
tx = sign_staking_tx(source_account, source_account, 1, nonce + 1,
last_block_hash)
base_block_hash)
send_tx(tx)


def random_transaction(account_and_index):
def random_transaction(account_and_index, base_block_hash):
choice = random.randint(0, 3)
if choice == 0:
send_transfer(account_and_index[0], account_and_index[1] + 1)
send_transfer(account_and_index[0], account_and_index[1] + 1, base_block_hash)
elif choice == 1:
call_contract(account_and_index[0])
call_contract(account_and_index[0], base_block_hash)
elif choice == 2:
create_account(account_and_index[0])
create_account(account_and_index[0], base_block_hash)
elif choice == 3:
stake(account_and_index[0])
stake(account_and_index[0], base_block_hash)


def send_transfers():
def send_transfers(base_block_hash):
pmap(
lambda account_and_index: send_transfer(account_and_index[0], (
account_and_index[1] + 1) % NUM_ACCOUNTS), test_accounts)
account_and_index[1] + 1) % NUM_ACCOUNTS, base_block_hash), test_accounts)


def send_random_transactions():
pmap(random_transaction, test_accounts)
def send_random_transactions(base_block_hash):
pmap(lambda x: random_transaction(x, base_block_hash), test_accounts)


def throttle_txns(send_txns, total_tx_sent, elapsed_time, max_tps):
def throttle_txns(send_txns, total_tx_sent, elapsed_time, max_tps, base_block_hash):
start_time = time.time()
send_txns()
send_txns(base_block_hash)
duration = time.time() - start_time
total_tx_sent += NUM_ACCOUNTS
elapsed_time += duration
Expand All @@ -167,6 +162,8 @@ def throttle_txns(send_txns, total_tx_sent, elapsed_time, max_tps):
NUM_ACCOUNTS)
]

base_block_hash = get_latest_block_hash()

start_time = time.time()

# begin with only transfers for TPS measurement
Expand All @@ -175,19 +172,19 @@ def throttle_txns(send_txns, total_tx_sent, elapsed_time, max_tps):
while time.time() - start_time < TRANSFER_ONLY_TIMEOUT:
(total_tx_sent,
elapsed_time) = throttle_txns(send_transfers, total_tx_sent,
elapsed_time, MAX_TPS_PER_NODE)
elapsed_time, MAX_TPS_PER_NODE, base_block_hash)

# Ensure load testing contract is deployed to all accounts before
# starting to send random transactions (ensures we do not try to
# call the contract before it is deployed).
delay = CONTRACT_DEPLOY_TIME / NUM_ACCOUNTS
for x in test_accounts:
deploy_contract(x[0])
deploy_contract(x[0], base_block_hash)
time.sleep(delay)

# send all sorts of transactions
start_time = time.time()
while time.time() - start_time < ALL_TX_TIMEOUT:
(total_tx_sent,
elapsed_time) = throttle_txns(send_random_transactions, total_tx_sent,
elapsed_time, MAX_TPS_PER_NODE)
elapsed_time, MAX_TPS_PER_NODE, base_block_hash)

0 comments on commit 04649e3

Please sign in to comment.