Skip to content

Commit

Permalink
Problem: gen-tx don't run in parallel for single node (#1645)
Browse files Browse the repository at this point in the history
* Problem: gen-tx don't run in parallel for single node

Solution:
- use multiprocessing library to do parallel tx gen

* Update CHANGELOG.md

Signed-off-by: yihuang <huang@crypto.com>

* cleanup

---------

Signed-off-by: yihuang <huang@crypto.com>
  • Loading branch information
yihuang authored Oct 17, 2024
1 parent 3b38bcc commit f3746f6
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 10 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@

### Improvements

* (testground)[#1644 ](https://github.com/crypto-org-chain/cronos/pull/1644) load generator retry with backoff on error.
* [#1645](https://github.com/crypto-org-chain/cronos/pull/1645) Gen test tx in parallel even in single node.
* (testground)[#1644](https://github.com/crypto-org-chain/cronos/pull/1644) load generator retry with backoff on error.

*Oct 14, 2024*

Expand Down
48 changes: 39 additions & 9 deletions testground/benchmark/benchmark/transaction.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
import asyncio
import itertools
import multiprocessing
import os
from collections import namedtuple
from pathlib import Path

import aiohttp
Expand All @@ -7,7 +11,7 @@
import ujson

from .erc20 import CONTRACT_ADDRESS
from .utils import gen_account
from .utils import gen_account, split

GAS_PRICE = 1000000000
CHAIN_ID = 777
Expand Down Expand Up @@ -48,17 +52,43 @@ def erc20_transfer_tx(nonce: int):
}


Job = namedtuple(
"Job", ["chunk", "global_seq", "num_accounts", "num_txs", "tx_type", "create_tx"]
)


def _do_job(job: Job):
accounts = [gen_account(job.global_seq, i + 1) for i in range(*job.chunk)]
acct_txs = []
total = 0
for acct in accounts:
txs = []
for i in range(job.num_txs):
txs.append(acct.sign_transaction(job.create_tx(i)).rawTransaction.hex())
total += 1
if total % 1000 == 0:
print("generated", total, "txs for node", job.global_seq)
acct_txs.append(txs)
return acct_txs


def gen(global_seq, num_accounts, num_txs, tx_type: str) -> [str]:
accounts = [gen_account(global_seq, i + 1) for i in range(num_accounts)]
txs = []
chunks = split(num_accounts, os.cpu_count())
create_tx = TX_TYPES[tx_type]
for i in range(num_txs):
for acct in accounts:
txs.append(acct.sign_transaction(create_tx(i)).rawTransaction.hex())
if len(txs) % 1000 == 0:
print("generated", len(txs), "txs for node", global_seq)
jobs = [
Job(chunk, global_seq, num_accounts, num_txs, tx_type, create_tx)
for chunk in chunks
]

with multiprocessing.Pool() as pool:
acct_txs = pool.map(_do_job, jobs)

# mix the account txs together, ordered by nonce.
all_txs = []
for txs in itertools.zip_longest(*itertools.chain(*acct_txs)):
all_txs += txs

return txs
return all_txs


def save(txs: [str], datadir: Path, global_seq: int):
Expand Down
8 changes: 8 additions & 0 deletions testground/benchmark/benchmark/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,3 +171,11 @@ def block(height):

def block_txs(height):
return block(height)["result"]["block"]["data"]["txs"]


def split(a: int, n: int):
"""
Split range(0, a) into n parts
"""
k, m = divmod(a, n)
return [(i * k + min(i, m), (i + 1) * k + min(i + 1, m)) for i in range(n)]

0 comments on commit f3746f6

Please sign in to comment.