1
0

blockchain: parameterise magic number 2016 as CHUNK_SIZE

This commit is contained in:
SomberNight
2025-06-08 20:06:47 +00:00
parent 27599ac537
commit 57bb98dc7f
2 changed files with 28 additions and 27 deletions

View File

@@ -38,6 +38,7 @@ if TYPE_CHECKING:
_logger = get_logger(__name__)
HEADER_SIZE = 80 # bytes
CHUNK_SIZE = 2016 # num headers in a difficulty retarget period
# see https://github.com/bitcoin/bitcoin/blob/feedb9c84e72e4fff489810a2bbeec09bcda5763/src/chainparams.cpp#L76
MAX_TARGET = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff # compact: 0x1d00ffff
@@ -175,7 +176,7 @@ _CHAINWORK_CACHE = {
def init_headers_file_for_best_chain():
b = get_best_chain()
filename = b.path()
length = HEADER_SIZE * len(constants.net.CHECKPOINTS) * 2016
length = HEADER_SIZE * len(constants.net.CHECKPOINTS) * CHUNK_SIZE
if not os.path.exists(filename) or os.path.getsize(filename) < length:
with open(filename, 'wb') as f:
if length > 0:
@@ -321,7 +322,7 @@ class Blockchain(Logger):
def verify_chunk(self, index: int, data: bytes) -> None:
num = len(data) // HEADER_SIZE
start_height = index * 2016
start_height = index * CHUNK_SIZE
prev_hash = self.get_hash(start_height - 1)
target = self.get_target(index-1)
for i in range(num):
@@ -331,7 +332,7 @@ class Blockchain(Logger):
except MissingHeader:
expected_header_hash = None
raw_header = data[i*HEADER_SIZE : (i+1)*HEADER_SIZE]
header = deserialize_header(raw_header, index*2016 + i)
header = deserialize_header(raw_header, index*CHUNK_SIZE + i)
self.verify_header(header, prev_hash, target, expected_header_hash)
prev_hash = hash_header(header)
@@ -358,7 +359,7 @@ class Blockchain(Logger):
main_chain.save_chunk(index, chunk)
return
delta_height = (index * 2016 - self.forkpoint)
delta_height = (index * CHUNK_SIZE - self.forkpoint)
delta_bytes = delta_height * HEADER_SIZE
# if this chunk contains our forkpoint, only save the part after forkpoint
# (the part before is the responsibility of the parent)
@@ -509,7 +510,7 @@ class Blockchain(Logger):
def get_hash(self, height: int) -> str:
def is_height_checkpoint():
within_cp_range = height <= constants.net.max_checkpoint()
at_chunk_boundary = (height+1) % 2016 == 0
at_chunk_boundary = (height+1) % CHUNK_SIZE == 0
return within_cp_range and at_chunk_boundary
if height == -1:
@@ -517,7 +518,7 @@ class Blockchain(Logger):
elif height == 0:
return constants.net.GENESIS
elif is_height_checkpoint():
index = height // 2016
index = height // CHUNK_SIZE
h, t = self.checkpoints[index]
return h
else:
@@ -536,8 +537,8 @@ class Blockchain(Logger):
h, t = self.checkpoints[index]
return t
# new target
first = self.read_header(index * 2016)
last = self.read_header(index * 2016 + 2015)
first = self.read_header(index * CHUNK_SIZE)
last = self.read_header((index+1) * CHUNK_SIZE - 1)
if not first or not last:
raise MissingHeader()
bits = last.get('bits')
@@ -591,7 +592,7 @@ class Blockchain(Logger):
def chainwork_of_header_at_height(self, height: int) -> int:
"""work done by single header at given height"""
chunk_idx = height // 2016 - 1
chunk_idx = height // CHUNK_SIZE - 1
target = self.get_target(chunk_idx)
work = ((2 ** 256 - target - 1) // (target + 1)) + 1
return work
@@ -604,23 +605,23 @@ class Blockchain(Logger):
# On testnet/regtest, difficulty works somewhat different.
# It's out of scope to properly implement that.
return height
last_retarget = height // 2016 * 2016 - 1
last_retarget = height // CHUNK_SIZE * CHUNK_SIZE - 1
cached_height = last_retarget
while _CHAINWORK_CACHE.get(self.get_hash(cached_height)) is None:
if cached_height <= -1:
break
cached_height -= 2016
cached_height -= CHUNK_SIZE
assert cached_height >= -1, cached_height
running_total = _CHAINWORK_CACHE[self.get_hash(cached_height)]
while cached_height < last_retarget:
cached_height += 2016
cached_height += CHUNK_SIZE
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
work_in_chunk = 2016 * work_in_single_header
work_in_chunk = CHUNK_SIZE * work_in_single_header
running_total += work_in_chunk
_CHAINWORK_CACHE[self.get_hash(cached_height)] = running_total
cached_height += 2016
cached_height += CHUNK_SIZE
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
work_in_last_partial_chunk = (height % 2016 + 1) * work_in_single_header
work_in_last_partial_chunk = (height % CHUNK_SIZE + 1) * work_in_single_header
return running_total + work_in_last_partial_chunk
def can_connect(self, header: dict, check_height: bool=True) -> bool:
@@ -638,7 +639,7 @@ class Blockchain(Logger):
if prev_hash != header.get('prev_block_hash'):
return False
try:
target = self.get_target(height // 2016 - 1)
target = self.get_target(height // CHUNK_SIZE - 1)
except MissingHeader:
return False
try:
@@ -661,9 +662,9 @@ class Blockchain(Logger):
def get_checkpoints(self):
# for each chunk, store the hash of the last block and the target after the chunk
cp = []
n = self.height() // 2016
n = self.height() // CHUNK_SIZE
for index in range(n):
h = self.get_hash((index+1) * 2016 -1)
h = self.get_hash((index+1) * CHUNK_SIZE -1)
target = self.get_target(index)
cp.append((h, target))
return cp

View File

@@ -55,7 +55,7 @@ from . import x509
from . import pem
from . import version
from . import blockchain
from .blockchain import Blockchain, HEADER_SIZE
from .blockchain import Blockchain, HEADER_SIZE, CHUNK_SIZE
from . import bitcoin
from . import constants
from .i18n import _
@@ -771,17 +771,17 @@ class Interface(Logger):
) -> Optional[Tuple[bool, int]]:
if not is_non_negative_integer(height):
raise Exception(f"{repr(height)} is not a block height")
index = height // 2016
index = height // CHUNK_SIZE
if can_return_early and index in self._requested_chunks:
return None
self.logger.info(f"requesting chunk from height {height}")
size = 2016
size = CHUNK_SIZE
if tip is not None:
size = min(size, tip - index * 2016 + 1)
size = min(size, tip - index * CHUNK_SIZE + 1)
size = max(size, 0)
try:
self._requested_chunks.add(index)
res = await self.session.send_request('blockchain.block.headers', [index * 2016, size])
res = await self.session.send_request('blockchain.block.headers', [index * CHUNK_SIZE, size])
finally:
self._requested_chunks.discard(index)
assert_dict_contains_field(res, field_name='count')
@@ -792,9 +792,9 @@ class Interface(Logger):
assert_hex_str(res['hex'])
if len(res['hex']) != HEADER_SIZE * 2 * res['count']:
raise RequestCorrupted('inconsistent chunk hex and count')
# we never request more than 2016 headers, but we enforce those fit in a single response
if res['max'] < 2016:
raise RequestCorrupted(f"server uses too low 'max' count for block.headers: {res['max']} < 2016")
# we never request more than CHUNK_SIZE headers, but we enforce those fit in a single response
if res['max'] < CHUNK_SIZE:
raise RequestCorrupted(f"server uses too low 'max' count for block.headers: {res['max']} < {CHUNK_SIZE}")
if res['count'] != size:
raise RequestCorrupted(f"expected {size} headers but only got {res['count']}")
conn = self.blockchain.connect_chunk(index, res['hex'])
@@ -965,7 +965,7 @@ class Interface(Logger):
continue
util.trigger_callback('blockchain_updated')
util.trigger_callback('network_updated')
height = (height // 2016 * 2016) + num_headers
height = (height // CHUNK_SIZE * CHUNK_SIZE) + num_headers
assert height <= next_height+1, (height, self.tip)
last = ChainResolutionMode.CATCHUP
else: