Skip to content

Commit

Permalink
Benchmarking code (enable with --bench command-line switch)
Browse files Browse the repository at this point in the history
  • Loading branch information
jtoomim committed Sep 16, 2017
1 parent d2c6edc commit 9692d6e
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 3 deletions.
16 changes: 15 additions & 1 deletion p2pool/bitcoin/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,15 +71,29 @@ def go():
except jsonrpc.Error_for_code(-32601): # Method not found
print >>sys.stderr, 'Error: Bitcoin version too old! Upgrade to v0.5 or newer!'
raise deferral.RetrySilentlyException()
t0 = time.time()
packed_transactions = [(x['data'] if isinstance(x, dict) else x).decode('hex') for x in work['transactions']]
t1 = time.time()

if 'height' not in work:
work['height'] = (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
elif p2pool.DEBUG:
assert work['height'] == (yield bitcoind.rpc_getblock(work['previousblockhash']))['height'] + 1
unpacked_transactions = map(bitcoin_data.tx_type.unpack, packed_transactions)
t2 = time.time()
txhashes = map(bitcoin_data.hash256, packed_transactions)
t3 = time.time()
if p2pool.BENCH: print "Decoding transactions took %2.0f ms, Unpacking %2.0f ms, hashing %2.0f ms" % ((t1 - t0)*1000., (t2-t1)*1000., (t3-t2)*1000.)
import random
for i in range(10):
n = random.randint(0, len(unpacked_transactions)-1)
packed = bitcoin_data.tx_type.pack(unpacked_transactions[n])
assert packed == packed_transactions[n]

defer.returnValue(dict(
version=work['version'],
previous_block=int(work['previousblockhash'], 16),
transactions=map(bitcoin_data.tx_type.unpack, packed_transactions),
transactions=unpacked_transactions,
transaction_hashes=map(bitcoin_data.hash256, packed_transactions),
transaction_fees=[x.get('fee', None) if isinstance(x, dict) else None for x in work['transactions']],
subsidy=work['coinbasevalue'],
Expand Down
4 changes: 4 additions & 0 deletions p2pool/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,6 +456,9 @@ def run():
parser.add_argument('--debug',
help='enable debugging mode',
action='store_const', const=True, default=False, dest='debug')
parser.add_argument('--bench',
help='enable CPU performance profiling mode',
action='store_const', const=True, default=False, dest='bench')
parser.add_argument('--rconsole',
help='enable rconsole debugging mode (requires rfoo)',
action='store_const', const=True, default=False, dest='rconsole')
Expand Down Expand Up @@ -555,6 +558,7 @@ def run():
defer.setDebugging(True)
else:
p2pool.DEBUG = False
p2pool.BENCH = args.bench

net_name = args.net_name + ('_testnet' if args.testnet else '')
net = networks.nets[net_name]
Expand Down
22 changes: 22 additions & 0 deletions p2pool/p2p.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ def remove_from_remote_view_of_my_known_txs(removed):
self.connection_lost_event.watch(lambda: self.node.known_txs_var.removed.unwatch(watch_id1))

def update_remote_view_of_my_known_txs(before, after):
t0 = time.time()
added = set(after) - set(before)
removed = set(before) - set(after)
if added:
Expand All @@ -217,12 +218,15 @@ def update_remote_view_of_my_known_txs(before, after):
key = max(self.known_txs_cache) + 1 if self.known_txs_cache else 0
self.known_txs_cache[key] = dict((h, before[h]) for h in removed)
reactor.callLater(20, self.known_txs_cache.pop, key)
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for update_remote_view_of_my_known_txs" % ((t1-t0)*1000.)
watch_id2 = self.node.known_txs_var.transitioned.watch(update_remote_view_of_my_known_txs)
self.connection_lost_event.watch(lambda: self.node.known_txs_var.transitioned.unwatch(watch_id2))

self.send_have_tx(tx_hashes=self.node.known_txs_var.value.keys())

def update_remote_view_of_my_mining_txs(before, after):
t0 = time.time()
added = set(after) - set(before)
removed = set(before) - set(after)
if removed:
Expand All @@ -232,6 +236,9 @@ def update_remote_view_of_my_mining_txs(before, after):
self.remote_remembered_txs_size += sum(100 + bitcoin_data.tx_type.packed_size(after[x]) for x in added)
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes])
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for update_remote_view_of_my_mining_txs" % ((t1-t0)*1000.)

watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs)
self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2))

Expand Down Expand Up @@ -300,6 +307,7 @@ def handle_getaddrs(self, count):
('shares', pack.ListType(p2pool_data.share_type)),
])
def handle_shares(self, shares):
t0 = time.time()
result = []
for wrappedshare in shares:
if wrappedshare['type'] < p2pool_data.Share.VERSION: continue
Expand All @@ -326,8 +334,12 @@ def handle_shares(self, shares):
result.append((share, txs))

self.node.handle_shares(result, self)
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for %i shares in handle_shares (%3.3f ms/share)" % ((t1-t0)*1000., len(shares), (t1-t0)*1000./ max(1, len(shares)))


def sendShares(self, shares, tracker, known_txs, include_txs_with=[]):
t0 = time.time()
tx_hashes = set()
for share in shares:
if share.VERSION >= 13:
Expand Down Expand Up @@ -368,6 +380,9 @@ def sendShares(self, shares, tracker, known_txs, include_txs_with=[]):
self.send_forget_tx(tx_hashes=hashes_to_send)

self.remote_remembered_txs_size -= new_tx_size
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for %i shares in sendShares (%3.3f ms/share)" % ((t1-t0)*1000., len(shares), (t1-t0)*1000./ max(1, len(shares)))



message_sharereq = pack.ComposedType([
Expand Down Expand Up @@ -416,15 +431,20 @@ def handle_have_tx(self, tx_hashes):
('tx_hashes', pack.ListType(pack.IntType(256))),
])
def handle_losing_tx(self, tx_hashes):
t0 = time.time()
#assert self.remote_tx_hashes.issuperset(tx_hashes)
self.remote_tx_hashes.difference_update(tx_hashes)
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for %i txs in handle_losing_tx (%3.3f ms/tx)" % ((t1-t0)*1000., len(tx_hashes), (t1-t0)*1000./ max(1, len(tx_hashes)))



message_remember_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
('txs', pack.ListType(bitcoin_data.tx_type)),
])
def handle_remember_tx(self, tx_hashes, txs):
t0 = time.time()
for tx_hash in tx_hashes:
if tx_hash in self.remembered_txs:
print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
Expand Down Expand Up @@ -465,6 +485,8 @@ def handle_remember_tx(self, tx_hashes, txs):
self.node.known_txs_var.add(added_known_txs)
if self.remembered_txs_size >= self.max_remembered_txs_size:
raise PeerMisbehavingError('too much transaction data stored')
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for %i txs in p2p.py:handle_remember_tx (%3.3f ms/tx)" % ((t1-t0)*1000., len(tx_hashes), ((t1-t0)*1000. / max(1,len(tx_hashes)) ))
message_forget_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
])
Expand Down
9 changes: 7 additions & 2 deletions p2pool/work.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,7 @@ def get_local_addr_rates(self):

def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target, worker_ip=None):
global print_throttle
t0 = time.time()
if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
if self.node.best_share_var.value is None and self.node.net.PERSIST:
Expand Down Expand Up @@ -398,6 +399,7 @@ def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target
received_header_hashes = set()

def got_response(header, user, coinbase_nonce):
t0 = time.time()
assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
Expand Down Expand Up @@ -504,7 +506,10 @@ def _(err):
self.recent_shares_ts_work.pop(0)
self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target))
self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash))

t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for work.py:got_response()" % ((t1-t0)*1000.)

return on_time

t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for work.py:get_work()" % ((t1-t0)*1000.)
return ba, got_response

0 comments on commit 9692d6e

Please sign in to comment.