diff --git a/src/ape/types/__init__.py b/src/ape/types/__init__.py index ee66c53e90..ba6776d556 100644 --- a/src/ape/types/__init__.py +++ b/src/ape/types/__init__.py @@ -262,18 +262,20 @@ class ContractLog(BaseContractLog): @validator("block_number", "log_index", "transaction_index", pre=True) def validate_hex_ints(cls, value): - if not isinstance(value, int): + if value is None: + # Should only happen for optionals. + return value + + elif not isinstance(value, int): return to_int(value) return value @validator("contract_address", pre=True) def validate_address(cls, value): - from ape import convert - - return convert(value, AddressType) + return cls.conversion_manager.convert(value, AddressType) - # NOTE: This class has an overrided `__getattr__` method, but `block` is a reserved keyword + # NOTE: This class has an overridden `__getattr__` method, but `block` is a reserved keyword # in most smart contract languages, so it is safe to use. Purposely avoid adding # `.datetime` and `.timestamp` in case they are used as event arg names. @cached_property @@ -299,8 +301,7 @@ def __getattr__(self, item: str) -> Any: """ try: - normal_attribute = self.__getattribute__(item) - return normal_attribute + return self.__getattribute__(item) except AttributeError: pass diff --git a/src/ape/utils/abi.py b/src/ape/utils/abi.py index 74fe6370cc..9e1bdca651 100644 --- a/src/ape/utils/abi.py +++ b/src/ape/utils/abi.py @@ -3,10 +3,12 @@ from typing import Any, Dict, List, Optional, Tuple, Union from eth_abi import decode, grammar +from eth_abi.exceptions import DecodingError, InsufficientDataBytes from eth_utils import decode_hex from ethpm_types import HexBytes from ethpm_types.abi import ABIType, ConstructorABI, EventABI, EventABIType, MethodABI +from ape.logging import logger from ape.types import AddressType ARRAY_PATTERN = re.compile(r"[(*\w,? )]*\[\d*]") @@ -344,21 +346,85 @@ def __init__(self, abi: EventABI): def event_name(self): return self.abi.name - def decode(self, topics: List[str], data: str) -> Dict: + def decode(self, topics: List[str], data: str, use_hex_on_fail: bool = False) -> Dict: decoded = {} for abi, topic_value in zip(self.topic_abi_types, topics[1:]): # reference types as indexed arguments are written as a hash # https://docs.soliditylang.org/en/v0.8.15/contracts.html#events abi_type = "bytes32" if is_dynamic_sized_type(abi.type) else abi.canonical_type - value = decode([abi_type], decode_hex(topic_value))[0] - decoded[abi.name] = self.decode_value(abi_type, value) + hex_value = decode_hex(topic_value) + + try: + value = decode([abi_type], hex_value)[0] + except InsufficientDataBytes as err: + warning_message = f"Failed to decode log topic '{self.event_name}'." + + # Try again with strict=False + try: + value = decode([abi_type], hex_value, strict=False)[0] + except Exception: + # Even with strict=False, we failed to decode. + # This should be a rare occasion, if it ever happens. + logger.warn_from_exception(err, warning_message) + if use_hex_on_fail: + if abi.name not in decoded: + # This allow logs to still be findable on the receipt. + decoded[abi.name] = hex_value + + else: + raise DecodingError(str(err)) from err + + else: + # This happens when providers accidentally leave off trailing zeroes. + warning_message = ( + f"{warning_message} " + "However, we are able to get a value using decode(strict=False)" + ) + logger.warn_from_exception(err, warning_message) + decoded[abi.name] = self.decode_value(abi_type, value) + + else: + # The data was formatted correctly and we were able to decode logs. + decoded[abi.name] = self.decode_value(abi_type, value) data_abi_types = [abi.canonical_type for abi in self.data_abi_types] hex_data = decode_hex(data) if isinstance(data, str) else data - data_values = decode(data_abi_types, hex_data) - for abi, value in zip(self.data_abi_types, data_values): - decoded[abi.name] = self.decode_value(abi.canonical_type, value) + try: + data_values = decode(data_abi_types, hex_data) + except InsufficientDataBytes as err: + warning_message = f"Failed to decode log data '{self.event_name}'." + + # Try again with strict=False + try: + data_values = decode(data_abi_types, hex_data, strict=False) + except Exception: + # Even with strict=False, we failed to decode. + # This should be a rare occasion, if it ever happens. + logger.warn_from_exception(err, warning_message) + if use_hex_on_fail: + for abi in self.data_abi_types: + if abi.name not in decoded: + # This allow logs to still be findable on the receipt. + decoded[abi.name] = hex_data + + else: + raise DecodingError(str(err)) from err + + else: + # This happens when providers accidentally leave off trailing zeroes. + warning_message = ( + f"{warning_message} " + "However, we are able to get a value using decode(strict=False)" + ) + logger.warn_from_exception(err, warning_message) + for abi, value in zip(self.data_abi_types, data_values): + decoded[abi.name] = self.decode_value(abi.canonical_type, value) + + else: + # The data was formatted correctly and we were able to decode logs. + for abi, value in zip(self.data_abi_types, data_values): + decoded[abi.name] = self.decode_value(abi.canonical_type, value) return decoded diff --git a/src/ape_ethereum/ecosystem.py b/src/ape_ethereum/ecosystem.py index 0cafad5aa1..162e6d2dba 100644 --- a/src/ape_ethereum/ecosystem.py +++ b/src/ape_ethereum/ecosystem.py @@ -4,7 +4,7 @@ from eth_abi import decode, encode from eth_abi.exceptions import InsufficientDataBytes, NonEmptyPaddingBytes -from eth_typing import Hash32 +from eth_typing import Hash32, HexStr from eth_utils import ( encode_hex, humanize_hash, @@ -28,7 +28,6 @@ ConversionError, DecodingError, ) -from ape.logging import logger from ape.types import ( AddressType, AutoGasLimit, @@ -200,12 +199,11 @@ def validate_ints(cls, value): class Ethereum(EcosystemAPI): - name: str = "ethereum" - """ - Default transaction type should be overidden id chain doesn't support EIP-1559 + Default transaction type should be overridden id chain doesn't support EIP-1559 """ + name: str = "ethereum" fee_token_symbol: str = "ETH" @property @@ -288,7 +286,7 @@ def str_to_slot(text): ProxyType.OpenZeppelin: str_to_slot("org.zeppelinos.proxy.implementation"), ProxyType.UUPS: str_to_slot("PROXIABLE"), } - for type, slot in slots.items(): + for _type, slot in slots.items(): try: # TODO perf: use a batch call here when ape adds support storage = self.provider.get_storage_at(address, slot) @@ -300,10 +298,10 @@ def str_to_slot(text): target = self.conversion_manager.convert(storage[-20:], AddressType) # read `target.implementation()` - if type == ProxyType.Beacon: + if _type == ProxyType.Beacon: target = ContractCall(IMPLEMENTATION_ABI, target)(skip_trace=True) - return ProxyInfo(type=type, target=target) + return ProxyInfo(type=_type, target=target) # safe >=1.1.0 provides `masterCopy()`, which is also stored in slot 0 # detect safe-specific bytecode of push32 keccak256("masterCopy()") @@ -654,26 +652,27 @@ def decode_logs(self, logs: List[Dict], *events: EventABI) -> Iterator["Contract encode_hex(keccak(text=abi.selector)): LogInputABICollection(abi) for abi in events } + def get_abi(_topic: HexStr) -> Optional[LogInputABICollection]: + return abi_inputs[_topic] if _topic in abi_inputs else None + for log in logs: if log.get("anonymous"): raise NotImplementedError( "decoding anonymous logs is not supported with this method" ) topics = log["topics"] - # web3.py converts topics to hexbytes, data is always a hexstr + # web3.py converts topics to HexBytes, data is always a HexStr if isinstance(log["topics"][0], bytes): topics = [encode_hex(t) for t in log["topics"]] - try: - abi = abi_inputs[topics[0]] - except KeyError: + + elif not topics: continue - try: - event_arguments = abi.decode(topics, log["data"]) - except InsufficientDataBytes: - logger.debug("failed to decode log data for %s", log, exc_info=True) + if not (abi := get_abi(topics[0])): continue + event_arguments = abi.decode(topics, log["data"], use_hex_on_fail=True) + # Since LogABICollection does not have access to the Ecosystem, # the rest of the decoding must happen here. _types = [x.canonical_type for x in abi.abi.inputs] diff --git a/src/ape_ethereum/transactions.py b/src/ape_ethereum/transactions.py index 2d3c70db4f..655bb5d1ca 100644 --- a/src/ape_ethereum/transactions.py +++ b/src/ape_ethereum/transactions.py @@ -270,22 +270,64 @@ def decode_logs( for address, contract in contract_types.items() } + def get_default_log( + _log: Dict, logs: ContractLogContainer, name: Optional[str] = None + ) -> ContractLog: + # For when we fail to decode. + if not name: + name = "UnknownLog" + index = _log.get("logIndex") + if index is not None: + name = f"{name}_WithIndex_{index}" + + return ContractLog( + block_hash=self.block.hash, + block_number=self.block_number, + event_arguments={"__root__": _log["data"]}, + event_name=f"<{name}>", + log_index=logs[-1].log_index + 1 if logs else 0, + transaction_hash=self.txn_hash, + transaction_index=logs[-1].transaction_index if logs else None, + ) + decoded_logs: ContractLogContainer = ContractLogContainer() for log in self.logs: - contract_address = log["address"] - if contract_address not in selectors: - continue - try: - selector = encode_hex(log["topics"][0]) - event_abi = selectors[contract_address][selector] - except KeyError: - # Likely a library log - if library_log := self._decode_ds_note(log): + if contract_address := log.get("address"): + if contract_address in selectors and (topics := log.get("topics")): + selector = encode_hex(topics[0]) + if selector in selectors[contract_address]: + event_abi = selectors[contract_address][selector] + decoded_logs.extend( + self.provider.network.ecosystem.decode_logs([log], event_abi) + ) + + elif library_log := self._decode_ds_note(log): + decoded_logs.append(library_log) + + else: + # Search for selector in other spots: + name = f"UnknownLogWithSelector_{selector}" + obj = get_default_log(log, decoded_logs, name=name) + decoded_logs.append(obj) + + elif library_log := self._decode_ds_note(log): decoded_logs.append(library_log) + + else: + name = f"UnknownLogAtAddress_{contract_address}" + index = log.get("logIndex") + if index is not None: + name = f"{name}_AndLogIndex_{index}" + + obj = get_default_log(log, decoded_logs, name=name) + decoded_logs.append(obj) + + elif library_log := self._decode_ds_note(log): + decoded_logs.append(library_log) + else: - decoded_logs.extend( - self.provider.network.ecosystem.decode_logs([log], event_abi) - ) + obj = get_default_log(log, decoded_logs) + decoded_logs.append(obj) return decoded_logs diff --git a/tests/functional/test_contract_event.py b/tests/functional/test_contract_event.py index 7be175e120..a0a6536801 100644 --- a/tests/functional/test_contract_event.py +++ b/tests/functional/test_contract_event.py @@ -304,20 +304,20 @@ def test_filter_events_with_same_abi( filtering. This test verifies we filter by contract address as well as ABI. """ - receipt = contract_with_call_depth.emitLogWithSameInterfaceFromMultipleContracts(sender=owner) + tx = contract_with_call_depth.emitLogWithSameInterfaceFromMultipleContracts(sender=owner) - assert contract_with_call_depth.OneOfMany(addr=owner.address) in receipt.events - assert middle_contract.OneOfMany(addr=contract_with_call_depth.address) in receipt.events - assert leaf_contract.OneOfMany(addr=contract_with_call_depth.address) in receipt.events + assert contract_with_call_depth.OneOfMany(addr=owner.address) in tx.events + assert middle_contract.OneOfMany(addr=contract_with_call_depth.address) in tx.events + assert leaf_contract.OneOfMany(addr=contract_with_call_depth.address) in tx.events # Ensure each contract's event appears only once - result_a = receipt.events.filter(contract_with_call_depth.OneOfMany) + result_a = tx.events.filter(contract_with_call_depth.OneOfMany) assert result_a == [contract_with_call_depth.OneOfMany(addr=owner.address)] - result_b = receipt.events.filter(middle_contract.OneOfMany) + result_b = tx.events.filter(middle_contract.OneOfMany) assert result_b == [middle_contract.OneOfMany(addr=contract_with_call_depth.address)] - result_c = receipt.events.filter(leaf_contract.OneOfMany) + result_c = tx.events.filter(leaf_contract.OneOfMany) assert result_c == [leaf_contract.OneOfMany(addr=contract_with_call_depth.address)] diff --git a/tests/functional/utils/test_abi.py b/tests/functional/utils/test_abi.py new file mode 100644 index 0000000000..a8269c65aa --- /dev/null +++ b/tests/functional/utils/test_abi.py @@ -0,0 +1,82 @@ +import pytest +from ethpm_types import HexBytes +from ethpm_types.abi import EventABI, EventABIType + +from ape.utils.abi import LogInputABICollection + + +@pytest.fixture +def event_abi(): + return EventABI( + type="event", + name="NodeOperatorAdded", + inputs=[ + EventABIType( + name="nodeOperatorId", + type="uint256", + components=None, + internalType=None, + indexed=False, + ), + EventABIType( + name="name", type="string", components=None, internalType=None, indexed=False + ), + EventABIType( + name="rewardAddress", + type="address", + components=None, + internalType=None, + indexed=False, + ), + EventABIType( + name="stakingLimit", + type="uint64", + components=None, + internalType=None, + indexed=False, + ), + ], + anonymous=False, + ) + + +@pytest.fixture +def collection(event_abi): + return LogInputABICollection(event_abi) + + +@pytest.fixture +def topics(): + return ["0xc52ec0ad7872dae440d886040390c13677df7bf3cca136d8d81e5e5e7dd62ff1"] + + +@pytest.fixture +def log_data_missing_trailing_zeroes(): + return HexBytes( + "0x000000000000000000000000000000000000000000000000000000000000001e" + "000000000000000000000000000000000000000000000000000000000000008000" + "00000000000000000000005a8b929edbf3ce44526465dd2087ec7efb59a5610000" + "000000000000000000000000000000000000000000000000000000000000000000" + "000000000000000000000000000000000000000000000000000000000b4c61756e" + "63686e6f646573" + ) + + +def test_decoding_with_strict(collection, topics, log_data_missing_trailing_zeroes, caplog): + """ + This test is for a time where Alchemy gave us log data when it was missing trailing zeroes. + When using strict=False, it was able to properly decode. In this case, in Ape, we warn + the user and still proceed to decode the log. + """ + actual = collection.decode(topics, log_data_missing_trailing_zeroes) + expected = { + "name": "Launchnodes", + "nodeOperatorId": 30, + "rewardAddress": "0x5a8b929edbf3ce44526465dd2087ec7efb59a561", + "stakingLimit": 0, + } + assert actual == expected + assert ( + "However, we are able to get a value using decode(strict=False)" + in caplog.records[-1].message + )