From 1fe39e5715ea236cc4dd4faa97cfce621024ffeb Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Tue, 21 May 2024 18:04:26 +0100 Subject: [PATCH 01/17] feat: collaborative indexing through txid sharing --- rust/agents/relayer/src/relayer.rs | 2 + .../hyperlane-cosmos/src/interchain_gas.rs | 2 +- rust/chains/hyperlane-cosmos/src/mailbox.rs | 4 +- .../hyperlane-cosmos/src/merkle_tree_hook.rs | 2 +- .../src/contracts/interchain_gas.rs | 2 +- .../src/contracts/mailbox.rs | 45 ++- .../src/contracts/merkle_tree_hook.rs | 2 +- .../hyperlane-fuel/src/interchain_gas.rs | 2 +- rust/chains/hyperlane-fuel/src/mailbox.rs | 4 +- .../hyperlane-sealevel/src/interchain_gas.rs | 2 +- rust/chains/hyperlane-sealevel/src/mailbox.rs | 4 +- .../src/merkle_tree_hook.rs | 4 +- .../src/contract_sync/cursors/rate_limited.rs | 2 +- .../cursors/sequence_aware/forward.rs | 2 +- rust/hyperlane-base/src/contract_sync/mod.rs | 259 +++++++++++++----- rust/hyperlane-base/src/settings/base.rs | 2 + rust/hyperlane-core/src/traits/cursor.rs | 4 +- rust/hyperlane-core/src/traits/indexer.rs | 12 +- 18 files changed, 265 insertions(+), 91 deletions(-) diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index 581620f2f7..8e4cee24ae 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -130,6 +130,8 @@ impl BaseAgent for Relayer { let contract_sync_metrics = Arc::new(ContractSyncMetrics::new(&core_metrics)); + // each of these `contract_syncs` will return a receiver of txid alongside + // `contract_syncs` will also take a hashmap of domain -> recvs as an argument let message_syncs = settings .contract_syncs::( settings.origin_chains.iter(), diff --git a/rust/chains/hyperlane-cosmos/src/interchain_gas.rs b/rust/chains/hyperlane-cosmos/src/interchain_gas.rs index 4ba2ca87ab..4444a56eaa 100644 --- a/rust/chains/hyperlane-cosmos/src/interchain_gas.rs +++ b/rust/chains/hyperlane-cosmos/src/interchain_gas.rs @@ -202,7 +202,7 @@ impl CosmosInterchainGasPaymasterIndexer { #[async_trait] impl Indexer for CosmosInterchainGasPaymasterIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-cosmos/src/mailbox.rs b/rust/chains/hyperlane-cosmos/src/mailbox.rs index 7f686cb85c..833b92b89f 100644 --- a/rust/chains/hyperlane-cosmos/src/mailbox.rs +++ b/rust/chains/hyperlane-cosmos/src/mailbox.rs @@ -350,7 +350,7 @@ impl CosmosMailboxIndexer { #[async_trait] impl Indexer for CosmosMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -397,7 +397,7 @@ impl Indexer for CosmosMailboxIndexer { #[async_trait] impl Indexer for CosmosMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs b/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs index c8e798096c..54acdf80f0 100644 --- a/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs @@ -283,7 +283,7 @@ impl CosmosMerkleTreeHookIndexer { #[async_trait] impl Indexer for CosmosMerkleTreeHookIndexer { /// Fetch list of logs between `range` of blocks - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index 8ed514c836..ec2f572de3 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -86,7 +86,7 @@ where { /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs index fd6a6b2808..05753f07da 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs @@ -7,9 +7,10 @@ use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; -use ethers::abi::{AbiEncode, Detokenize}; +use ethers::abi::{AbiEncode, Detokenize, RawLog}; use ethers::prelude::Middleware; -use ethers_contract::builders::ContractCall; +use ethers_contract::{builders::ContractCall, ContractError, EthEvent, LogMeta as EthersLogMeta}; +use ethers_core::types::H256 as EthersH256; use futures_util::future::join_all; use tracing::instrument; @@ -25,6 +26,7 @@ use crate::interfaces::arbitrum_node_interface::ArbitrumNodeInterface; use crate::interfaces::i_mailbox::{ IMailbox as EthereumMailboxInternal, ProcessCall, IMAILBOX_ABI, }; +use crate::interfaces::mailbox::DispatchFilter; use crate::tx::{call_with_lag, fill_tx_gas_params, report_tx}; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider, TransactionOverrides}; @@ -134,7 +136,7 @@ where /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -157,6 +159,41 @@ where events.sort_by(|a, b| a.0.inner().nonce.cmp(&b.0.inner().nonce)); Ok(events) } + + async fn fetch_logs_by_tx_hash( + &self, + tx_hash: H256, + ) -> ChainResult, LogMeta)>> { + let ethers_tx_hash: EthersH256 = tx_hash.into(); + let receipt = self + .provider + .get_transaction_receipt(ethers_tx_hash) + .await + .map_err(|err| ContractError::::MiddlewareError(err))?; + let Some(receipt) = receipt else { + return Ok(vec![]); + }; + + let logs: Vec<_> = receipt + .logs + .into_iter() + .filter_map(|log| { + let raw_log = RawLog { + topics: log.topics.clone(), + data: log.data.to_vec(), + }; + let log_meta: EthersLogMeta = (&log).into(); + let dispatch_filter = DispatchFilter::decode_log(&raw_log).ok(); + dispatch_filter.map(|event| { + ( + HyperlaneMessage::from(event.message.to_vec()).into(), + log_meta.into(), + ) + }) + }) + .collect(); + Ok(logs) + } } #[async_trait] @@ -183,7 +220,7 @@ where /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs index a94ceff325..534c9a9a07 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs @@ -108,7 +108,7 @@ where { /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-fuel/src/interchain_gas.rs b/rust/chains/hyperlane-fuel/src/interchain_gas.rs index d969210a60..3385872c35 100644 --- a/rust/chains/hyperlane-fuel/src/interchain_gas.rs +++ b/rust/chains/hyperlane-fuel/src/interchain_gas.rs @@ -35,7 +35,7 @@ pub struct FuelInterchainGasPaymasterIndexer {} #[async_trait] impl Indexer for FuelInterchainGasPaymasterIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-fuel/src/mailbox.rs b/rust/chains/hyperlane-fuel/src/mailbox.rs index 035fe6e6d3..5e8f0cf059 100644 --- a/rust/chains/hyperlane-fuel/src/mailbox.rs +++ b/rust/chains/hyperlane-fuel/src/mailbox.rs @@ -126,7 +126,7 @@ pub struct FuelMailboxIndexer {} #[async_trait] impl Indexer for FuelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -140,7 +140,7 @@ impl Indexer for FuelMailboxIndexer { #[async_trait] impl Indexer for FuelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-sealevel/src/interchain_gas.rs b/rust/chains/hyperlane-sealevel/src/interchain_gas.rs index 4945833818..beebcb9db4 100644 --- a/rust/chains/hyperlane-sealevel/src/interchain_gas.rs +++ b/rust/chains/hyperlane-sealevel/src/interchain_gas.rs @@ -246,7 +246,7 @@ impl SealevelInterchainGasPaymasterIndexer { #[async_trait] impl Indexer for SealevelInterchainGasPaymasterIndexer { #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-sealevel/src/mailbox.rs b/rust/chains/hyperlane-sealevel/src/mailbox.rs index 3fc8393d14..beb4e86c37 100644 --- a/rust/chains/hyperlane-sealevel/src/mailbox.rs +++ b/rust/chains/hyperlane-sealevel/src/mailbox.rs @@ -646,7 +646,7 @@ impl SequenceAwareIndexer for SealevelMailboxIndexer { #[async_trait] impl Indexer for SealevelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -670,7 +670,7 @@ impl Indexer for SealevelMailboxIndexer { #[async_trait] impl Indexer for SealevelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, _range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs b/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs index 9fe48053c8..8c1132addf 100644 --- a/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs @@ -83,11 +83,11 @@ pub struct SealevelMerkleTreeHookIndexer(SealevelMailboxIndexer); #[async_trait] impl Indexer for SealevelMerkleTreeHookIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { - let messages = Indexer::::fetch_logs(&self.0, range).await?; + let messages = Indexer::::fetch_logs_in_range(&self.0, range).await?; let merkle_tree_insertions = messages .into_iter() .map(|(m, meta)| (message_to_merkle_tree_insertion(m.inner()).into(), meta)) diff --git a/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs b/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs index d85b3618f6..86383dd740 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs @@ -234,7 +234,7 @@ pub(crate) mod test { #[async_trait] impl Indexer<()> for Indexer { - async fn fetch_logs(&self, range: RangeInclusive) -> ChainResult , LogMeta)>>; + async fn fetch_logs_in_range(&self, range: RangeInclusive) -> ChainResult , LogMeta)>>; async fn get_finalized_block_number(&self) -> ChainResult; } } diff --git a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs index aef515b2b6..78df74e881 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs @@ -475,7 +475,7 @@ pub(crate) mod test { where T: Sequenced + Debug, { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, _range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index b97e3e5f4b..ffc59eb4e6 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -1,3 +1,4 @@ +use std::ops::RangeInclusive; use std::{ collections::HashSet, fmt::Debug, hash::Hash, marker::PhantomData, sync::Arc, time::Duration, }; @@ -8,9 +9,12 @@ use derive_new::new; use hyperlane_core::{ utils::fmt_sync_time, ContractSyncCursor, CursorAction, HyperlaneDomain, HyperlaneLogStore, HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, - SequenceAwareIndexer, + SequenceAwareIndexer, H256, }; +use hyperlane_core::{Indexed, LogMeta}; pub use metrics::ContractSyncMetrics; +use prometheus::core::{AtomicI64, AtomicU64, GenericCounter, GenericGauge}; +use tokio::sync::broadcast::{Receiver as BroadcastReceiver, Sender as BroadcastSender}; use tokio::time::sleep; use tracing::{debug, info, warn}; @@ -24,20 +28,46 @@ use cursors::ForwardBackwardSequenceAwareSyncCursor; const SLEEP_DURATION: Duration = Duration::from_secs(5); +#[derive(Debug)] +pub enum BroadcastChannel { + Transmit(BroadcastSender), + Receive(BroadcastReceiver), +} + +impl BroadcastChannel { + pub fn transmit(&self, tx_id: H256) { + if let BroadcastChannel::Transmit(sender) = self { + if let Err(err) = sender.send(tx_id) { + warn!(?err, "Error broadcasting txid"); + } + } + } + + pub async fn receive(&mut self) -> Option { + if let BroadcastChannel::Receive(recv) = self { + recv.recv().await.ok() + } else { + None + } + } +} + /// Entity that drives the syncing of an agent's db with on-chain data. /// Extracts chain-specific data (emitted checkpoints, messages, etc) from an /// `indexer` and fills the agent's db with this data. -#[derive(Debug, new, Clone)] +#[derive(Debug, new)] pub struct ContractSync, I: Indexer> { domain: HyperlaneDomain, db: D, indexer: I, metrics: ContractSyncMetrics, + tx_id_channel: BroadcastChannel, _phantom: PhantomData, } impl ContractSync where + T: Debug + Send + Sync + Clone + Eq + Hash + 'static, D: HyperlaneLogStore, I: Indexer + 'static, { @@ -45,82 +75,149 @@ where pub fn domain(&self) -> &HyperlaneDomain { &self.domain } -} -impl ContractSync -where - T: Debug + Send + Sync + Clone + Eq + Hash + 'static, - D: HyperlaneLogStore, - I: Indexer + 'static, -{ + async fn get_receive_tx_channel(&self) -> Option> { + match &self.tx_id_channel { + BroadcastChannel::Transmit(tx) => Some(tx.subscribe()), + _ => None, + } + } + /// Sync logs and write them to the LogStore - #[tracing::instrument(name = "ContractSync", fields(domain=self.domain().name()), skip(self, cursor))] - pub async fn sync(&self, label: &'static str, mut cursor: Box>) { + #[tracing::instrument(name = "ContractSync", fields(domain=self.domain().name()), skip(self, opts))] + pub async fn sync(&self, label: &'static str, mut opts: SyncOptions) { let chain_name = self.domain.as_ref(); - let indexed_height = self + let indexed_height_metric = self .metrics .indexed_height .with_label_values(&[label, chain_name]); - let stored_logs = self + let stored_logs_metric = self .metrics .stored_events .with_label_values(&[label, chain_name]); loop { - indexed_height.set(cursor.latest_queried_block() as i64); + // in here, we check to see whether the recv end of the channel received any txid to query receipts for + // the recv end is defined as an Option + + // what's below is to be turned into an async function that is only called if `cursor` + // is Some(...). + // any sleeps should occur in this loop. We don't want to block in either the recv arm or in the + // cursor.next_action() arm. + let logs_found_from_receiver = if let Some(recv) = opts.tx_id_recv.as_mut() { + self.fetch_logs_from_receiver(recv, &stored_logs_metric) + .await + } else { + None + }; + + let logs_found_with_cursor = if let Some(cursor) = opts.cursor.as_mut() { + self.fetch_logs_with_cursor(cursor, &stored_logs_metric, &indexed_height_metric) + .await + } else { + None + }; + + // If we didn't find any logs, sleep for a while + if logs_found_from_receiver.unwrap_or_default() + + logs_found_with_cursor.unwrap_or_default() + == 0 + { + sleep(SLEEP_DURATION).await; + } + } + } - let (action, eta) = match cursor.next_action().await { - Ok((action, eta)) => (action, eta), + async fn fetch_logs_from_receiver( + &self, + recv: &mut BroadcastReceiver, + stored_logs_metric: &GenericCounter, + ) -> Option { + if let Ok(tx_id) = recv.recv().await { + // query receipts for tx_id + let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { + Ok(logs) => logs, Err(err) => { - warn!(?err, "Error getting next action"); - sleep(SLEEP_DURATION).await; - continue; + warn!(?err, ?tx_id, "Error fetching logs for tx id"); + vec![] } }; - let sleep_duration = match action { - // Use `loop` but always break - this allows for returning a value - // from the loop (the sleep duration) - #[allow(clippy::never_loop)] - CursorAction::Query(range) => loop { - debug!(?range, "Looking for for events in index range"); - - let logs = match self.indexer.fetch_logs(range.clone()).await { - Ok(logs) => logs, - Err(err) => { - warn!(?err, "Error fetching logs"); - break SLEEP_DURATION; - } - }; - let deduped_logs = HashSet::<_>::from_iter(logs); - let logs = Vec::from_iter(deduped_logs); - - info!( - ?range, - num_logs = logs.len(), - estimated_time_to_sync = fmt_sync_time(eta), - "Found log(s) in index range" - ); - // Store deliveries - let stored = match self.db.store_logs(&logs).await { - Ok(stored) => stored, - Err(err) => { - warn!(?err, "Error storing logs in db"); - break SLEEP_DURATION; - } - }; - // Report amount of deliveries stored into db - stored_logs.inc_by(stored as u64); - // Update cursor - if let Err(err) = cursor.update(logs, range).await { - warn!(?err, "Error updating cursor"); - break SLEEP_DURATION; - }; - break Default::default(); - }, - CursorAction::Sleep(duration) => duration, - }; - sleep(sleep_duration).await; + let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + let logs_found = logs.len() as u64; + info!(num_logs = logs_found, ?tx_id, "Found log(s) for tx id"); + return Some(logs_found); } + None + } + + async fn fetch_logs_with_cursor( + &self, + cursor: &mut Box>, + stored_logs_metric: &GenericCounter, + indexed_height_metric: &GenericGauge, + ) -> Option { + indexed_height_metric.set(cursor.latest_queried_block() as i64); + let (action, eta) = match cursor.next_action().await { + Ok((action, eta)) => (action, eta), + Err(err) => { + warn!(?err, "Error getting next action"); + return None; + } + }; + match action { + // Use `loop` but always break - this allows for returning a value + // from the loop (the sleep duration) + #[allow(clippy::never_loop)] + CursorAction::Query(range) => { + debug!(?range, "Looking for for events in index range"); + + let logs = match self.indexer.fetch_logs_in_range(range.clone()).await { + Ok(logs) => logs, + Err(err) => { + warn!(?err, ?range, "Error fetching logs in range"); + return None; + } + }; + + let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + let logs_found = logs.len() as u64; + info!( + ?range, + num_logs = logs_found, + estimated_time_to_sync = fmt_sync_time(eta), + "Found log(s) in index range" + ); + + // Update cursor + if let Err(err) = cursor.update(logs, range).await { + warn!(?err, "Error updating cursor"); + return None; + }; + return Some(logs_found); + } + CursorAction::Sleep => return None, + }; + } + + async fn dedupe_and_store_logs( + &self, + logs: Vec<(Indexed, LogMeta)>, + stored_logs_metric: &GenericCounter, + ) -> Vec<(Indexed, LogMeta)> { + let deduped_logs = HashSet::<_>::from_iter(logs); + let logs = Vec::from_iter(deduped_logs); + + // Store deliveries + let stored = match self.db.store_logs(&logs).await { + Ok(stored) => stored, + Err(err) => { + warn!(?err, "Error storing logs in db"); + Default::default() + } + }; + // Report amount of deliveries stored into db + stored_logs_metric.inc_by(stored as u64); + logs } } @@ -141,10 +238,30 @@ pub trait ContractSyncer: Send + Sync { async fn cursor(&self, index_settings: IndexSettings) -> Box>; /// Syncs events from the indexer using the provided cursor - async fn sync(&self, label: &'static str, cursor: Box>); + async fn sync(&self, label: &'static str, opts: SyncOptions); /// The domain of this syncer fn domain(&self) -> &HyperlaneDomain; + + /// If this syncer is also a broadcaster, return the channel to receive txids + async fn get_receive_tx_channel(&self) -> Option>; + + /// Set the channel to receive txids + async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) {} + + async fn receive_tx_to_index(&self) -> Option { + None + } +} + +struct SyncOptions +// where +// T: Debug + Send + Sync + Clone + Eq + Hash + 'static, +{ + // Keep as optional fields for now to run them simultaneously. + // Might want to refactor into an enum later. + cursor: Option>>, + tx_id_recv: Option>, } #[async_trait] @@ -172,13 +289,17 @@ where ) } - async fn sync(&self, label: &'static str, cursor: Box>) { - ContractSync::sync(self, label, cursor).await; + async fn sync(&self, label: &'static str, opts: SyncOptions) { + ContractSync::sync(self, label, opts).await } fn domain(&self) -> &HyperlaneDomain { ContractSync::domain(self) } + + async fn get_receive_tx_channel(&self) -> Option> { + ContractSync::get_receive_tx_channel(self).await + } } /// Log store for sequence aware cursors @@ -207,11 +328,15 @@ where ) } - async fn sync(&self, label: &'static str, cursor: Box>) { - ContractSync::sync(self, label, cursor).await; + async fn sync(&self, label: &'static str, opts: SyncOptions) { + ContractSync::sync(self, label, opts).await; } fn domain(&self) -> &HyperlaneDomain { ContractSync::domain(self) } + + async fn get_receive_tx_channel(&self) -> Option> { + ContractSync::get_receive_tx_channel(self).await + } } diff --git a/rust/hyperlane-base/src/settings/base.rs b/rust/hyperlane-base/src/settings/base.rs index 59b8fa11a0..eafb8c5667 100644 --- a/rust/hyperlane-base/src/settings/base.rs +++ b/rust/hyperlane-base/src/settings/base.rs @@ -172,6 +172,7 @@ impl Settings { db.clone() as SequenceAwareLogStore<_>, indexer, sync_metrics.clone(), + None, ))) } @@ -196,6 +197,7 @@ impl Settings { db.clone() as WatermarkLogStore<_>, indexer, sync_metrics.clone(), + None, ))) } diff --git a/rust/hyperlane-core/src/traits/cursor.rs b/rust/hyperlane-core/src/traits/cursor.rs index cfe92b8dc4..48d05c4402 100644 --- a/rust/hyperlane-core/src/traits/cursor.rs +++ b/rust/hyperlane-core/src/traits/cursor.rs @@ -36,14 +36,14 @@ pub enum CursorAction { /// Direct the contract_sync task to query a block range (inclusive) Query(RangeInclusive), /// Direct the contract_sync task to sleep for a duration - Sleep(Duration), + Sleep, } impl fmt::Debug for CursorAction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CursorAction::Query(range) => write!(f, "Query({:?})", range), - CursorAction::Sleep(duration) => write!(f, "Sleep({:?})", duration), + CursorAction::Sleep => write!(f, "Sleep"), } } } diff --git a/rust/hyperlane-core/src/traits/indexer.rs b/rust/hyperlane-core/src/traits/indexer.rs index 3db7e4f570..3acac87ea6 100644 --- a/rust/hyperlane-core/src/traits/indexer.rs +++ b/rust/hyperlane-core/src/traits/indexer.rs @@ -11,7 +11,7 @@ use async_trait::async_trait; use auto_impl::auto_impl; use serde::Deserialize; -use crate::{ChainResult, Indexed, LogMeta}; +use crate::{ChainResult, Indexed, LogMeta, H256}; /// Indexing mode. #[derive(Copy, Debug, Default, Deserialize, Clone)] @@ -29,13 +29,21 @@ pub enum IndexMode { #[auto_impl(&, Box, Arc,)] pub trait Indexer: Send + Sync + Debug { /// Fetch list of logs between blocks `from` and `to`, inclusive. - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>>; /// Get the chain's latest block number that has reached finality async fn get_finalized_block_number(&self) -> ChainResult; + + /// Fetch list of logs emitted in a transaction with the given hash. + async fn fetch_logs_by_tx_hash( + &self, + _tx_hash: H256, + ) -> ChainResult, LogMeta)>> { + Err(eyre::eyre!("fetch_logs_by_tx_hash not implemented").into()) + } } /// Interface for indexing data in sequence. From 71b2b0fe8b4156c53f6736e10d62efdd510479cd Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Wed, 22 May 2024 13:53:09 +0100 Subject: [PATCH 02/17] feat(cooperative-indexing): loop between `recv` and `cursor` with `FuturesUnordered` --- rust/agents/relayer/src/msg/op_queue.rs | 11 +- rust/hyperlane-base/src/contract_sync/mod.rs | 125 ++++++++++--------- rust/hyperlane-core/src/traits/cursor.rs | 4 +- 3 files changed, 78 insertions(+), 62 deletions(-) diff --git a/rust/agents/relayer/src/msg/op_queue.rs b/rust/agents/relayer/src/msg/op_queue.rs index ef8c2ad2d3..c71af566b0 100644 --- a/rust/agents/relayer/src/msg/op_queue.rs +++ b/rust/agents/relayer/src/msg/op_queue.rs @@ -41,7 +41,7 @@ impl OpQueue { } /// Pop multiple elements at once from the queue and update metrics - #[instrument(skip(self), ret, fields(queue_label=%self.queue_metrics_label), level = "debug")] + #[instrument(skip(self), fields(queue_label=%self.queue_metrics_label), level = "debug")] pub async fn pop_many(&mut self, limit: usize) -> Vec { self.process_retry_requests().await; let mut queue = self.queue.lock().await; @@ -55,6 +55,15 @@ impl OpQueue { break; } } + // This function is called very often by the op_submitter tasks, so only log when there are operations to pop + // to avoid spamming the logs + if !popped.is_empty() { + debug!( + queue_label = %self.queue_metrics_label, + operations = popped, + "Popped OpQueue operations" + ); + } popped } diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index ffc59eb4e6..c2ce67772a 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -1,4 +1,4 @@ -use std::ops::RangeInclusive; +use std::pin::Pin; use std::{ collections::HashSet, fmt::Debug, hash::Hash, marker::PhantomData, sync::Arc, time::Duration, }; @@ -6,6 +6,8 @@ use std::{ use axum::async_trait; use cursors::*; use derive_new::new; +use futures::stream::FuturesUnordered; +use futures::{Future, StreamExt}; use hyperlane_core::{ utils::fmt_sync_time, ContractSyncCursor, CursorAction, HyperlaneDomain, HyperlaneLogStore, HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, @@ -104,25 +106,26 @@ where // is Some(...). // any sleeps should occur in this loop. We don't want to block in either the recv arm or in the // cursor.next_action() arm. - let logs_found_from_receiver = if let Some(recv) = opts.tx_id_recv.as_mut() { - self.fetch_logs_from_receiver(recv, &stored_logs_metric) - .await - } else { - None - }; - - let logs_found_with_cursor = if let Some(cursor) = opts.cursor.as_mut() { - self.fetch_logs_with_cursor(cursor, &stored_logs_metric, &indexed_height_metric) - .await - } else { - None - }; + let mut futures: FuturesUnordered>>>> = + FuturesUnordered::new(); + if let Some(recv) = opts.tx_id_recv.as_mut() { + let fut = Box::pin(self.fetch_logs_from_receiver(recv, &stored_logs_metric)); + futures.push(fut as _); + } + if let Some(cursor) = opts.cursor.as_mut() { + let fut = Box::pin(self.fetch_logs_with_cursor( + cursor, + &stored_logs_metric, + &indexed_height_metric, + )); + futures.push(fut as _); + } + // `FuturesUnordered::next` will return the first future that resolves, regardless of the order + // in which they were pushed to `FuturesUnordered`. // If we didn't find any logs, sleep for a while - if logs_found_from_receiver.unwrap_or_default() - + logs_found_with_cursor.unwrap_or_default() - == 0 - { + let logs_found = futures.next().await.flatten(); + if logs_found.unwrap_or_default() == 0 { sleep(SLEEP_DURATION).await; } } @@ -156,47 +159,51 @@ where stored_logs_metric: &GenericCounter, indexed_height_metric: &GenericGauge, ) -> Option { - indexed_height_metric.set(cursor.latest_queried_block() as i64); - let (action, eta) = match cursor.next_action().await { - Ok((action, eta)) => (action, eta), - Err(err) => { - warn!(?err, "Error getting next action"); - return None; - } - }; - match action { - // Use `loop` but always break - this allows for returning a value - // from the loop (the sleep duration) - #[allow(clippy::never_loop)] - CursorAction::Query(range) => { - debug!(?range, "Looking for for events in index range"); - - let logs = match self.indexer.fetch_logs_in_range(range.clone()).await { - Ok(logs) => logs, - Err(err) => { - warn!(?err, ?range, "Error fetching logs in range"); - return None; - } - }; - - let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; - let logs_found = logs.len() as u64; - info!( - ?range, - num_logs = logs_found, - estimated_time_to_sync = fmt_sync_time(eta), - "Found log(s) in index range" - ); - - // Update cursor - if let Err(err) = cursor.update(logs, range).await { - warn!(?err, "Error updating cursor"); - return None; - }; - return Some(logs_found); - } - CursorAction::Sleep => return None, - }; + loop { + indexed_height_metric.set(cursor.latest_queried_block() as i64); + let (action, eta) = match cursor.next_action().await { + Ok((action, eta)) => (action, eta), + Err(err) => { + warn!(?err, "Error getting next action"); + sleep(SLEEP_DURATION).await; + continue; + } + }; + let sleep_duration = match action { + // Use `loop` but always break - this allows for returning a value + // from the loop (the sleep duration) + #[allow(clippy::never_loop)] + CursorAction::Query(range) => loop { + debug!(?range, "Looking for for events in index range"); + + let logs = match self.indexer.fetch_logs_in_range(range.clone()).await { + Ok(logs) => logs, + Err(err) => { + warn!(?err, ?range, "Error fetching logs in range"); + break SLEEP_DURATION; + } + }; + + let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + let logs_found = logs.len() as u64; + info!( + ?range, + num_logs = logs_found, + estimated_time_to_sync = fmt_sync_time(eta), + "Found log(s) in index range" + ); + + // Update cursor + if let Err(err) = cursor.update(logs, range).await { + warn!(?err, "Error updating cursor"); + break SLEEP_DURATION; + }; + return Some(logs_found); + }, + CursorAction::Sleep(duration) => duration, + }; + sleep(sleep_duration).await; + } } async fn dedupe_and_store_logs( diff --git a/rust/hyperlane-core/src/traits/cursor.rs b/rust/hyperlane-core/src/traits/cursor.rs index 48d05c4402..cfe92b8dc4 100644 --- a/rust/hyperlane-core/src/traits/cursor.rs +++ b/rust/hyperlane-core/src/traits/cursor.rs @@ -36,14 +36,14 @@ pub enum CursorAction { /// Direct the contract_sync task to query a block range (inclusive) Query(RangeInclusive), /// Direct the contract_sync task to sleep for a duration - Sleep, + Sleep(Duration), } impl fmt::Debug for CursorAction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CursorAction::Query(range) => write!(f, "Query({:?})", range), - CursorAction::Sleep => write!(f, "Sleep"), + CursorAction::Sleep(duration) => write!(f, "Sleep({:?})", duration), } } } From 4d986aec9082612d46c9b5c2127a45201c5824c1 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Wed, 22 May 2024 18:50:07 +0100 Subject: [PATCH 03/17] some progress on sharing channels --- rust/agents/relayer/src/msg/op_queue.rs | 8 +- rust/agents/relayer/src/msg/op_submitter.rs | 6 +- rust/agents/relayer/src/relayer.rs | 54 +++++++-- rust/agents/relayer/src/server.rs | 4 +- rust/agents/scraper/src/agent.rs | 10 +- rust/agents/validator/src/validator.rs | 8 +- .../src/contracts/interchain_gas.rs | 51 ++++++++- rust/hyperlane-base/src/contract_sync/mod.rs | 103 ++++++++++-------- rust/hyperlane-base/src/settings/base.rs | 7 +- rust/hyperlane-core/src/types/channel.rs | 26 ++++- 10 files changed, 194 insertions(+), 83 deletions(-) diff --git a/rust/agents/relayer/src/msg/op_queue.rs b/rust/agents/relayer/src/msg/op_queue.rs index c71af566b0..b046cadba2 100644 --- a/rust/agents/relayer/src/msg/op_queue.rs +++ b/rust/agents/relayer/src/msg/op_queue.rs @@ -1,10 +1,10 @@ use std::{cmp::Reverse, collections::BinaryHeap, sync::Arc}; use derive_new::new; -use hyperlane_core::MpmcReceiver; +use hyperlane_core::BroadcastReceiver; use prometheus::{IntGauge, IntGaugeVec}; use tokio::sync::Mutex; -use tracing::{info, instrument}; +use tracing::{debug, info, instrument}; use crate::server::MessageRetryRequest; @@ -18,7 +18,7 @@ pub type QueueOperation = Box; pub struct OpQueue { metrics: IntGaugeVec, queue_metrics_label: String, - retry_rx: MpmcReceiver, + retry_rx: BroadcastReceiver, #[new(default)] queue: Arc>>>, } @@ -60,7 +60,7 @@ impl OpQueue { if !popped.is_empty() { debug!( queue_label = %self.queue_metrics_label, - operations = popped, + operations = ?popped, "Popped OpQueue operations" ); } diff --git a/rust/agents/relayer/src/msg/op_submitter.rs b/rust/agents/relayer/src/msg/op_submitter.rs index 4350baeff2..1a4dc983b6 100644 --- a/rust/agents/relayer/src/msg/op_submitter.rs +++ b/rust/agents/relayer/src/msg/op_submitter.rs @@ -13,8 +13,8 @@ use tracing::{info, warn}; use hyperlane_base::CoreMetrics; use hyperlane_core::{ - BatchItem, ChainCommunicationError, ChainResult, HyperlaneDomain, HyperlaneDomainProtocol, - HyperlaneMessage, MpmcReceiver, TxOutcome, + BatchItem, BroadcastReceiver, ChainCommunicationError, ChainResult, HyperlaneDomain, + HyperlaneDomainProtocol, HyperlaneMessage, TxOutcome, }; use crate::msg::pending_message::CONFIRM_DELAY; @@ -77,7 +77,7 @@ pub struct SerialSubmitter { /// Receiver for new messages to submit. rx: mpsc::UnboundedReceiver, /// Receiver for retry requests. - retry_rx: MpmcReceiver, + retry_rx: BroadcastReceiver, /// Metrics for serial submitter. metrics: SerialSubmitterMetrics, /// Max batch size for submitting messages diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index 8e4cee24ae..d2b39f8f3f 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -13,10 +13,11 @@ use hyperlane_base::{ metrics::{AgentMetrics, MetricsUpdater}, settings::ChainConf, BaseAgent, ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics, HyperlaneAgentCore, + SyncOptions, }; use hyperlane_core::{ - HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, MerkleTreeInsertion, MpmcChannel, - MpmcReceiver, U256, + BroadcastReceiver, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, + MerkleTreeInsertion, MpmcChannel, H256, U256, }; use tokio::{ sync::{ @@ -132,7 +133,7 @@ impl BaseAgent for Relayer { // each of these `contract_syncs` will return a receiver of txid alongside // `contract_syncs` will also take a hashmap of domain -> recvs as an argument - let message_syncs = settings + let message_syncs: HashMap<_, Arc>> = settings .contract_syncs::( settings.origin_chains.iter(), &core_metrics, @@ -160,6 +161,7 @@ impl BaseAgent for Relayer { .map(|(k, v)| (k, v as _)) .collect(); + // set the receivers for each domain and implement the `fetch_logs_by_tx_hash` for igp and merkle let merkle_tree_hook_syncs = settings .contract_syncs::( settings.origin_chains.iter(), @@ -303,6 +305,12 @@ impl BaseAgent for Relayer { .instrument(info_span!("Relayer server")); tasks.push(server_task); + let txid_receivers = self + .message_syncs + .iter() + .map(|(k, v)| (k.clone(), v.get_new_receive_tx_channel())) + .collect::>(); + // send channels by destination chain let mut send_channels = HashMap::with_capacity(self.destination_chains.len()); for (dest_domain, dest_conf) in &self.destination_chains { @@ -337,8 +345,14 @@ impl BaseAgent for Relayer { for origin in &self.origin_chains { tasks.push(self.run_message_sync(origin).await); - tasks.push(self.run_interchain_gas_payment_sync(origin).await); - tasks.push(self.run_merkle_tree_hook_syncs(origin).await); + tasks.push( + self.run_interchain_gas_payment_sync(origin, txid_receivers) + .await, + ); + tasks.push( + self.run_merkle_tree_hook_syncs(origin, txid_receivers) + .await, + ); } // each message process attempts to send messages from a chain @@ -364,7 +378,7 @@ impl Relayer { tokio::spawn(async move { contract_sync .clone() - .sync("dispatched_messages", cursor) + .sync("dispatched_messages", cursor.into()) .await }) .instrument(info_span!("MessageSync")) @@ -373,6 +387,7 @@ impl Relayer { async fn run_interchain_gas_payment_sync( &self, origin: &HyperlaneDomain, + mut rxs: HashMap>, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index_settings(); let contract_sync = self @@ -381,19 +396,36 @@ impl Relayer { .unwrap() .clone(); let cursor = contract_sync.cursor(index_settings).await; - tokio::spawn(async move { contract_sync.clone().sync("gas_payments", cursor).await }) - .instrument(info_span!("IgpSync")) + tokio::spawn(async move { + contract_sync + .clone() + .sync( + "gas_payments", + SyncOptions::new(Some(cursor), rxs.remove(origin)), + ) + .await + }) + .instrument(info_span!("IgpSync")) } async fn run_merkle_tree_hook_syncs( &self, origin: &HyperlaneDomain, + mut rxs: HashMap>, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); let contract_sync = self.merkle_tree_hook_syncs.get(origin).unwrap().clone(); let cursor = contract_sync.cursor(index_settings).await; - tokio::spawn(async move { contract_sync.clone().sync("merkle_tree_hook", cursor).await }) - .instrument(info_span!("MerkleTreeHookSync")) + tokio::spawn(async move { + contract_sync + .clone() + .sync( + "merkle_tree_hook", + SyncOptions::new(Some(cursor), rxs.remove(origin)), + ) + .await + }) + .instrument(info_span!("MerkleTreeHookSync")) } fn run_message_processor( @@ -457,7 +489,7 @@ impl Relayer { &self, destination: &HyperlaneDomain, receiver: UnboundedReceiver, - retry_receiver_channel: MpmcReceiver, + retry_receiver_channel: BroadcastReceiver, batch_size: u32, ) -> Instrumented> { let serial_submitter = SerialSubmitter::new( diff --git a/rust/agents/relayer/src/server.rs b/rust/agents/relayer/src/server.rs index 9f6936a222..88cad86326 100644 --- a/rust/agents/relayer/src/server.rs +++ b/rust/agents/relayer/src/server.rs @@ -109,10 +109,10 @@ mod tests { use super::*; use axum::http::StatusCode; use ethers::utils::hex::ToHex; - use hyperlane_core::{MpmcChannel, MpmcReceiver}; + use hyperlane_core::{BroadcastReceiver, MpmcChannel}; use std::net::SocketAddr; - fn setup_test_server() -> (SocketAddr, MpmcReceiver) { + fn setup_test_server() -> (SocketAddr, BroadcastReceiver) { let mpmc_channel = MpmcChannel::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); let message_retry_api = MessageRetryApi::new(mpmc_channel.sender()); let (path, retry_router) = message_retry_api.get_route(); diff --git a/rust/agents/scraper/src/agent.rs b/rust/agents/scraper/src/agent.rs index cc113cacfd..f69a938094 100644 --- a/rust/agents/scraper/src/agent.rs +++ b/rust/agents/scraper/src/agent.rs @@ -7,7 +7,9 @@ use hyperlane_base::{ metrics::AgentMetrics, settings::IndexSettings, BaseAgent, ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, }; -use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment}; +use hyperlane_core::{ + BroadcastReceiver, Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, H256, +}; use tokio::task::JoinHandle; use tracing::{info_span, instrument::Instrumented, trace, Instrument}; @@ -194,7 +196,7 @@ impl Scraper { .await .unwrap(); let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync("message_dispatch", cursor).await }).instrument( + tokio::spawn(async move { sync.sync("message_dispatch", cursor.into()).await }).instrument( info_span!("ChainContractSync", chain=%domain.name(), event="message_dispatch"), ) } @@ -221,7 +223,7 @@ impl Scraper { let label = "message_delivery"; let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync(label, cursor).await }) + tokio::spawn(async move { sync.sync(label, cursor.into()).await }) .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) } @@ -247,7 +249,7 @@ impl Scraper { let label = "gas_payment"; let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync(label, cursor).await }) + tokio::spawn(async move { sync.sync(label, cursor.into()).await }) .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) } } diff --git a/rust/agents/validator/src/validator.rs b/rust/agents/validator/src/validator.rs index 8d28980098..5985b97bf5 100644 --- a/rust/agents/validator/src/validator.rs +++ b/rust/agents/validator/src/validator.rs @@ -1,4 +1,4 @@ -use std::{num::NonZeroU64, sync::Arc, time::Duration}; +use std::{collections::HashMap, num::NonZeroU64, sync::Arc, time::Duration}; use crate::server as validator_server; use async_trait::async_trait; @@ -102,6 +102,7 @@ impl BaseAgent for Validator { &metrics, &contract_sync_metrics, msg_db.clone().into(), + &mut Default::default(), ) .await?; @@ -209,7 +210,10 @@ impl Validator { let contract_sync = self.merkle_tree_hook_sync.clone(); let cursor = contract_sync.cursor(index_settings).await; tokio::spawn(async move { - contract_sync.clone().sync("merkle_tree_hook", cursor).await; + contract_sync + .clone() + .sync("merkle_tree_hook", cursor.into()) + .await; }) .instrument(info_span!("MerkleTreeHookSyncer")) } diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index ec2f572de3..898760645d 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -6,7 +6,10 @@ use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; +use ethers::abi::RawLog; use ethers::prelude::Middleware; +use ethers_contract::{ContractError, EthLogDecode, LogMeta as EthersLogMeta}; +use ethers_core::types::H256 as EthersH256; use hyperlane_core::{ ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, @@ -15,7 +18,8 @@ use hyperlane_core::{ use tracing::instrument; use crate::interfaces::i_interchain_gas_paymaster::{ - IInterchainGasPaymaster as EthereumInterchainGasPaymasterInternal, IINTERCHAINGASPAYMASTER_ABI, + GasPaymentFilter, IInterchainGasPaymaster as EthereumInterchainGasPaymasterInternal, + IINTERCHAINGASPAYMASTER_ABI, }; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider}; @@ -124,6 +128,51 @@ where .as_u32() .saturating_sub(self.reorg_period)) } + + async fn fetch_logs_by_tx_hash( + &self, + tx_hash: H256, + ) -> ChainResult, LogMeta)>> { + let ethers_tx_hash: EthersH256 = tx_hash.into(); + let receipt = self + .provider + .get_transaction_receipt(ethers_tx_hash) + .await + .map_err(|err| ContractError::::MiddlewareError(err))?; + let Some(receipt) = receipt else { + return Ok(vec![]); + }; + + let logs: Vec<_> = receipt + .logs + .into_iter() + .filter_map(|log| { + let raw_log = RawLog { + topics: log.topics.clone(), + data: log.data.to_vec(), + }; + let log_meta: EthersLogMeta = (&log).into(); + let gas_payment_filter = GasPaymentFilter::decode_log(&raw_log).ok(); + gas_payment_filter.map(|log| { + ( + Indexed::new(InterchainGasPayment { + message_id: H256::from(log.message_id), + destination: log.destination_domain, + payment: log.payment.into(), + gas_amount: log.gas_amount.into(), + }), + log_meta.into(), + ) + }) + }) + .collect(); + println!( + "~~~ found igp logs with tx id {:?}: {:?}", + tx_hash, + logs.len() + ); + Ok(logs) + } } #[async_trait] diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index c2ce67772a..35baab0035 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -13,10 +13,10 @@ use hyperlane_core::{ HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, SequenceAwareIndexer, H256, }; -use hyperlane_core::{Indexed, LogMeta}; +use hyperlane_core::{BroadcastReceiver, Indexed, LogMeta}; pub use metrics::ContractSyncMetrics; use prometheus::core::{AtomicI64, AtomicU64, GenericCounter, GenericGauge}; -use tokio::sync::broadcast::{Receiver as BroadcastReceiver, Sender as BroadcastSender}; +use tokio::sync::broadcast::Sender as BroadcastSender; use tokio::time::sleep; use tracing::{debug, info, warn}; @@ -30,43 +30,36 @@ use cursors::ForwardBackwardSequenceAwareSyncCursor; const SLEEP_DURATION: Duration = Duration::from_secs(5); -#[derive(Debug)] -pub enum BroadcastChannel { - Transmit(BroadcastSender), - Receive(BroadcastReceiver), -} - -impl BroadcastChannel { - pub fn transmit(&self, tx_id: H256) { - if let BroadcastChannel::Transmit(sender) = self { - if let Err(err) = sender.send(tx_id) { - warn!(?err, "Error broadcasting txid"); - } - } - } - - pub async fn receive(&mut self) -> Option { - if let BroadcastChannel::Receive(recv) = self { - recv.recv().await.ok() - } else { - None - } - } -} +// H256 * 1M = 32MB per origin chain worst case +// With one such channel per origin chain. +const TX_ID_CHANNEL_CAPACITY: usize = 1_000_000; /// Entity that drives the syncing of an agent's db with on-chain data. /// Extracts chain-specific data (emitted checkpoints, messages, etc) from an /// `indexer` and fills the agent's db with this data. -#[derive(Debug, new)] +#[derive(Debug)] pub struct ContractSync, I: Indexer> { domain: HyperlaneDomain, db: D, indexer: I, metrics: ContractSyncMetrics, - tx_id_channel: BroadcastChannel, + broadcast_sender: BroadcastSender, _phantom: PhantomData, } +impl, I: Indexer> ContractSync { + pub fn new(domain: HyperlaneDomain, db: D, indexer: I, metrics: ContractSyncMetrics) -> Self { + Self { + domain, + db, + indexer, + metrics, + broadcast_sender: BroadcastSender::new(TX_ID_CHANNEL_CAPACITY), + _phantom: PhantomData, + } + } +} + impl ContractSync where T: Debug + Send + Sync + Clone + Eq + Hash + 'static, @@ -78,11 +71,9 @@ where &self.domain } - async fn get_receive_tx_channel(&self) -> Option> { - match &self.tx_id_channel { - BroadcastChannel::Transmit(tx) => Some(tx.subscribe()), - _ => None, - } + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + let tx = &self.broadcast_sender; + BroadcastReceiver::new(tx.clone(), tx.subscribe()) } /// Sync logs and write them to the LogStore @@ -106,7 +97,7 @@ where // is Some(...). // any sleeps should occur in this loop. We don't want to block in either the recv arm or in the // cursor.next_action() arm. - let mut futures: FuturesUnordered>>>> = + let mut futures: FuturesUnordered> + Send>>> = FuturesUnordered::new(); if let Some(recv) = opts.tx_id_recv.as_mut() { let fut = Box::pin(self.fetch_logs_from_receiver(recv, &stored_logs_metric)); @@ -136,13 +127,16 @@ where recv: &mut BroadcastReceiver, stored_logs_metric: &GenericCounter, ) -> Option { + println!("~~~ fetch_logs_from_receiver"); if let Ok(tx_id) = recv.recv().await { + println!("~~~ tx_id: {:?}", tx_id); // query receipts for tx_id let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { Ok(logs) => logs, Err(err) => { warn!(?err, ?tx_id, "Error fetching logs for tx id"); - vec![] + sleep(SLEEP_DURATION).await; + return None; } }; let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; @@ -251,26 +245,33 @@ pub trait ContractSyncer: Send + Sync { fn domain(&self) -> &HyperlaneDomain; /// If this syncer is also a broadcaster, return the channel to receive txids - async fn get_receive_tx_channel(&self) -> Option>; + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver; /// Set the channel to receive txids - async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) {} + async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver); - async fn receive_tx_to_index(&self) -> Option { - None - } + // async fn receive_tx_to_index(&self) -> Option { + // None + // } } -struct SyncOptions -// where -// T: Debug + Send + Sync + Clone + Eq + Hash + 'static, -{ +#[derive(new)] +pub struct SyncOptions { // Keep as optional fields for now to run them simultaneously. // Might want to refactor into an enum later. cursor: Option>>, tx_id_recv: Option>, } +impl From>> for SyncOptions { + fn from(cursor: Box>) -> Self { + Self { + cursor: Some(cursor), + tx_id_recv: None, + } + } +} + #[async_trait] impl ContractSyncer for WatermarkContractSync where @@ -304,8 +305,12 @@ where ContractSync::domain(self) } - async fn get_receive_tx_channel(&self) -> Option> { - ContractSync::get_receive_tx_channel(self).await + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + ContractSync::get_new_receive_tx_channel(self) + } + + async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { + ContractSync::set_receive_tx_channel(self, channel).await } } @@ -343,7 +348,11 @@ where ContractSync::domain(self) } - async fn get_receive_tx_channel(&self) -> Option> { - ContractSync::get_receive_tx_channel(self).await + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + ContractSync::get_new_receive_tx_channel(self) + } + + async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { + ContractSync::set_receive_tx_channel(self, channel).await } } diff --git a/rust/hyperlane-base/src/settings/base.rs b/rust/hyperlane-base/src/settings/base.rs index eafb8c5667..4cf7c458dd 100644 --- a/rust/hyperlane-base/src/settings/base.rs +++ b/rust/hyperlane-base/src/settings/base.rs @@ -1,12 +1,13 @@ -use std::{collections::HashMap, fmt::Debug, hash::Hash, sync::Arc}; +use std::{borrow::BorrowMut, collections::HashMap, fmt::Debug, hash::Hash, sync::Arc}; use eyre::{eyre, Context, Result}; use futures_util::future::try_join_all; use hyperlane_core::{ - HyperlaneChain, HyperlaneDomain, HyperlaneLogStore, HyperlaneProvider, + BroadcastReceiver, HyperlaneChain, HyperlaneDomain, HyperlaneLogStore, HyperlaneProvider, HyperlaneSequenceAwareIndexerStoreReader, HyperlaneWatermarkedLogStore, InterchainGasPaymaster, Mailbox, MerkleTreeHook, MultisigIsm, SequenceAwareIndexer, ValidatorAnnounce, H256, }; +use tokio::sync::broadcast::Sender as BroadcastSender; use crate::{ cursors::{CursorType, Indexable}, @@ -172,7 +173,6 @@ impl Settings { db.clone() as SequenceAwareLogStore<_>, indexer, sync_metrics.clone(), - None, ))) } @@ -197,7 +197,6 @@ impl Settings { db.clone() as WatermarkLogStore<_>, indexer, sync_metrics.clone(), - None, ))) } diff --git a/rust/hyperlane-core/src/types/channel.rs b/rust/hyperlane-core/src/types/channel.rs index 2a0bbb8974..59f81c62c8 100644 --- a/rust/hyperlane-core/src/types/channel.rs +++ b/rust/hyperlane-core/src/types/channel.rs @@ -1,10 +1,12 @@ +use std::ops::{Deref, DerefMut}; + use derive_new::new; use tokio::sync::broadcast::{Receiver, Sender}; /// Multi-producer, multi-consumer channel pub struct MpmcChannel { sender: Sender, - receiver: MpmcReceiver, + receiver: BroadcastReceiver, } impl MpmcChannel { @@ -17,7 +19,7 @@ impl MpmcChannel { let (sender, receiver) = tokio::sync::broadcast::channel(capacity); Self { sender: sender.clone(), - receiver: MpmcReceiver::new(sender, receiver), + receiver: BroadcastReceiver::new(sender, receiver), } } @@ -27,20 +29,20 @@ impl MpmcChannel { } /// Returns a clone of the receiver end of the channel. - pub fn receiver(&self) -> MpmcReceiver { + pub fn receiver(&self) -> BroadcastReceiver { self.receiver.clone() } } /// Clonable receiving end of a multi-producer, multi-consumer channel #[derive(Debug, new)] -pub struct MpmcReceiver { +pub struct BroadcastReceiver { sender: Sender, /// The receiving end of the channel. pub receiver: Receiver, } -impl Clone for MpmcReceiver { +impl Clone for BroadcastReceiver { fn clone(&self) -> Self { Self { sender: self.sender.clone(), @@ -48,3 +50,17 @@ impl Clone for MpmcReceiver { } } } + +impl Deref for BroadcastReceiver { + type Target = Receiver; + + fn deref(&self) -> &Self::Target { + &self.receiver + } +} + +impl DerefMut for BroadcastReceiver { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.receiver + } +} From 56b04c5ae638371621ab6e407161c9aa8ae9b608 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Thu, 23 May 2024 01:14:48 +0100 Subject: [PATCH 04/17] progress on passing txidx between tasks --- rust/agents/relayer/src/relayer.rs | 16 ++++++----- rust/agents/validator/src/validator.rs | 1 - .../src/contracts/interchain_gas.rs | 4 +-- .../src/contracts/mailbox.rs | 3 +- rust/hyperlane-base/src/contract_sync/mod.rs | 28 +++++++++++-------- rust/hyperlane-core/src/traits/indexer.rs | 4 +-- 6 files changed, 32 insertions(+), 24 deletions(-) diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index d2b39f8f3f..38d03ee9de 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -17,7 +17,7 @@ use hyperlane_base::{ }; use hyperlane_core::{ BroadcastReceiver, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, - MerkleTreeInsertion, MpmcChannel, H256, U256, + MerkleTreeInsertion, MpmcChannel, H256, H512, U256, }; use tokio::{ sync::{ @@ -346,11 +346,11 @@ impl BaseAgent for Relayer { for origin in &self.origin_chains { tasks.push(self.run_message_sync(origin).await); tasks.push( - self.run_interchain_gas_payment_sync(origin, txid_receivers) + self.run_interchain_gas_payment_sync(origin, txid_receivers.clone()) .await, ); tasks.push( - self.run_merkle_tree_hook_syncs(origin, txid_receivers) + self.run_merkle_tree_hook_syncs(origin, txid_receivers.clone()) .await, ); } @@ -387,7 +387,7 @@ impl Relayer { async fn run_interchain_gas_payment_sync( &self, origin: &HyperlaneDomain, - mut rxs: HashMap>, + mut rxs: HashMap>, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index_settings(); let contract_sync = self @@ -396,12 +396,13 @@ impl Relayer { .unwrap() .clone(); let cursor = contract_sync.cursor(index_settings).await; + let origin_chain = origin.clone(); tokio::spawn(async move { contract_sync .clone() .sync( "gas_payments", - SyncOptions::new(Some(cursor), rxs.remove(origin)), + SyncOptions::new(Some(cursor), rxs.remove(&origin_chain)), ) .await }) @@ -411,17 +412,18 @@ impl Relayer { async fn run_merkle_tree_hook_syncs( &self, origin: &HyperlaneDomain, - mut rxs: HashMap>, + mut rxs: HashMap>, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); let contract_sync = self.merkle_tree_hook_syncs.get(origin).unwrap().clone(); let cursor = contract_sync.cursor(index_settings).await; + let origin_chain = origin.clone(); tokio::spawn(async move { contract_sync .clone() .sync( "merkle_tree_hook", - SyncOptions::new(Some(cursor), rxs.remove(origin)), + SyncOptions::new(Some(cursor), rxs.remove(&origin_chain)), ) .await }) diff --git a/rust/agents/validator/src/validator.rs b/rust/agents/validator/src/validator.rs index 5985b97bf5..ab35c0ef88 100644 --- a/rust/agents/validator/src/validator.rs +++ b/rust/agents/validator/src/validator.rs @@ -102,7 +102,6 @@ impl BaseAgent for Validator { &metrics, &contract_sync_metrics, msg_db.clone().into(), - &mut Default::default(), ) .await?; diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index 898760645d..93d1a2da22 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -13,7 +13,7 @@ use ethers_core::types::H256 as EthersH256; use hyperlane_core::{ ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, - InterchainGasPaymaster, InterchainGasPayment, LogMeta, SequenceAwareIndexer, H160, H256, + InterchainGasPaymaster, InterchainGasPayment, LogMeta, SequenceAwareIndexer, H160, H256, H512, }; use tracing::instrument; @@ -131,7 +131,7 @@ where async fn fetch_logs_by_tx_hash( &self, - tx_hash: H256, + tx_hash: H512, ) -> ChainResult, LogMeta)>> { let ethers_tx_hash: EthersH256 = tx_hash.into(); let receipt = self diff --git a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs index 05753f07da..683ef69c2f 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs @@ -12,6 +12,7 @@ use ethers::prelude::Middleware; use ethers_contract::{builders::ContractCall, ContractError, EthEvent, LogMeta as EthersLogMeta}; use ethers_core::types::H256 as EthersH256; use futures_util::future::join_all; +use hyperlane_core::H512; use tracing::instrument; use hyperlane_core::{ @@ -162,7 +163,7 @@ where async fn fetch_logs_by_tx_hash( &self, - tx_hash: H256, + tx_hash: H512, ) -> ChainResult, LogMeta)>> { let ethers_tx_hash: EthersH256 = tx_hash.into(); let receipt = self diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 35baab0035..16407e913d 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -13,7 +13,7 @@ use hyperlane_core::{ HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, SequenceAwareIndexer, H256, }; -use hyperlane_core::{BroadcastReceiver, Indexed, LogMeta}; +use hyperlane_core::{BroadcastReceiver, Indexed, LogMeta, H512}; pub use metrics::ContractSyncMetrics; use prometheus::core::{AtomicI64, AtomicU64, GenericCounter, GenericGauge}; use tokio::sync::broadcast::Sender as BroadcastSender; @@ -43,7 +43,7 @@ pub struct ContractSync, I: Indexer> { db: D, indexer: I, metrics: ContractSyncMetrics, - broadcast_sender: BroadcastSender, + broadcast_sender: BroadcastSender, _phantom: PhantomData, } @@ -71,7 +71,7 @@ where &self.domain } - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { let tx = &self.broadcast_sender; BroadcastReceiver::new(tx.clone(), tx.subscribe()) } @@ -124,7 +124,7 @@ where async fn fetch_logs_from_receiver( &self, - recv: &mut BroadcastReceiver, + recv: &mut BroadcastReceiver, stored_logs_metric: &GenericCounter, ) -> Option { println!("~~~ fetch_logs_from_receiver"); @@ -187,6 +187,12 @@ where "Found log(s) in index range" ); + logs.iter().for_each(|(_, meta)| { + if let Err(err) = self.broadcast_sender.send(meta.transaction_id) { + warn!(?err, "Error sending txid to receiver"); + } + }); + // Update cursor if let Err(err) = cursor.update(logs, range).await { warn!(?err, "Error updating cursor"); @@ -245,10 +251,10 @@ pub trait ContractSyncer: Send + Sync { fn domain(&self) -> &HyperlaneDomain; /// If this syncer is also a broadcaster, return the channel to receive txids - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver; + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver; /// Set the channel to receive txids - async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver); + async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver); // async fn receive_tx_to_index(&self) -> Option { // None @@ -260,7 +266,7 @@ pub struct SyncOptions { // Keep as optional fields for now to run them simultaneously. // Might want to refactor into an enum later. cursor: Option>>, - tx_id_recv: Option>, + tx_id_recv: Option>, } impl From>> for SyncOptions { @@ -305,11 +311,11 @@ where ContractSync::domain(self) } - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { ContractSync::get_new_receive_tx_channel(self) } - async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { + async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { ContractSync::set_receive_tx_channel(self, channel).await } } @@ -348,11 +354,11 @@ where ContractSync::domain(self) } - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { ContractSync::get_new_receive_tx_channel(self) } - async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { + async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { ContractSync::set_receive_tx_channel(self, channel).await } } diff --git a/rust/hyperlane-core/src/traits/indexer.rs b/rust/hyperlane-core/src/traits/indexer.rs index 3acac87ea6..6eee33aee1 100644 --- a/rust/hyperlane-core/src/traits/indexer.rs +++ b/rust/hyperlane-core/src/traits/indexer.rs @@ -11,7 +11,7 @@ use async_trait::async_trait; use auto_impl::auto_impl; use serde::Deserialize; -use crate::{ChainResult, Indexed, LogMeta, H256}; +use crate::{ChainResult, Indexed, LogMeta, H256, H512}; /// Indexing mode. #[derive(Copy, Debug, Default, Deserialize, Clone)] @@ -40,7 +40,7 @@ pub trait Indexer: Send + Sync + Debug { /// Fetch list of logs emitted in a transaction with the given hash. async fn fetch_logs_by_tx_hash( &self, - _tx_hash: H256, + _tx_hash: H512, ) -> ChainResult, LogMeta)>> { Err(eyre::eyre!("fetch_logs_by_tx_hash not implemented").into()) } From 5ea0be18e370c7bd359568e10c506c1b7333b2fa Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Thu, 23 May 2024 18:48:38 +0100 Subject: [PATCH 05/17] wip --- rust/Cargo.lock | 1 + rust/agents/relayer/src/msg/processor.rs | 5 + .../src/contracts/interchain_gas.rs | 1 + rust/hyperlane-base/Cargo.toml | 1 + .../src/contract_sync/cursors/rate_limited.rs | 10 + .../cursors/sequence_aware/mod.rs | 1 + rust/hyperlane-base/src/contract_sync/mod.rs | 198 ++++++++++++------ rust/hyperlane-core/src/traits/cursor.rs | 4 +- rust/hyperlane-core/src/traits/indexer.rs | 3 +- rust/utils/run-locally/src/invariants.rs | 15 +- rust/utils/run-locally/src/main.rs | 65 +++--- 11 files changed, 193 insertions(+), 111 deletions(-) diff --git a/rust/Cargo.lock b/rust/Cargo.lock index d8b69f4ea4..536be3daf5 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -4135,6 +4135,7 @@ dependencies = [ "itertools 0.12.0", "maplit", "mockall", + "num-traits", "paste", "prometheus", "reqwest", diff --git a/rust/agents/relayer/src/msg/processor.rs b/rust/agents/relayer/src/msg/processor.rs index 3aae0d308c..5073892a29 100644 --- a/rust/agents/relayer/src/msg/processor.rs +++ b/rust/agents/relayer/src/msg/processor.rs @@ -117,6 +117,11 @@ impl ProcessorExt for MessageProcessor { impl MessageProcessor { fn try_get_unprocessed_message(&mut self) -> Result> { loop { + println!( + "~~~ trying to get unprocessed message for domain and nonce {:?} {:?}", + self.domain(), + self.message_nonce + ); // First, see if we can find the message so we can update the gauge. if let Some(message) = self.db.retrieve_message_by_nonce(self.message_nonce)? { // Update the latest nonce gauges diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index 93d1a2da22..231d919088 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -139,6 +139,7 @@ where .get_transaction_receipt(ethers_tx_hash) .await .map_err(|err| ContractError::::MiddlewareError(err))?; + println!("~~~ igp receipt: {:?}", receipt); let Some(receipt) = receipt else { return Ok(vec![]); }; diff --git a/rust/hyperlane-base/Cargo.toml b/rust/hyperlane-base/Cargo.toml index 97d84b6221..0ed74431fe 100644 --- a/rust/hyperlane-base/Cargo.toml +++ b/rust/hyperlane-base/Cargo.toml @@ -27,6 +27,7 @@ futures-util.workspace = true itertools.workspace = true maplit.workspace = true mockall.worksapce = true +num-traits.workspace = true paste.workspace = true prometheus.workspace = true rocksdb.workspace = true diff --git a/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs b/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs index 86383dd740..242028acb4 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs @@ -216,6 +216,16 @@ where } } +impl Debug for RateLimitedContractSyncCursor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RateLimitedContractSyncCursor") + .field("tip", &self.tip) + .field("last_tip_update", &self.last_tip_update) + .field("sync_state", &self.sync_state) + .finish() + } +} + #[cfg(test)] pub(crate) mod test { use super::*; diff --git a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs index d3abb4384c..9303438b00 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs @@ -62,6 +62,7 @@ pub enum SyncDirection { /// A cursor that prefers to sync forward, but will sync backward if there is nothing to /// sync forward. +#[derive(Debug)] pub(crate) struct ForwardBackwardSequenceAwareSyncCursor { forward: ForwardSequenceAwareSyncCursor, backward: BackwardSequenceAwareSyncCursor, diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 16407e913d..eb788a8890 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -1,4 +1,3 @@ -use std::pin::Pin; use std::{ collections::HashSet, fmt::Debug, hash::Hash, marker::PhantomData, sync::Arc, time::Duration, }; @@ -6,15 +5,15 @@ use std::{ use axum::async_trait; use cursors::*; use derive_new::new; -use futures::stream::FuturesUnordered; -use futures::{Future, StreamExt}; +use fuels::programs::logs; use hyperlane_core::{ utils::fmt_sync_time, ContractSyncCursor, CursorAction, HyperlaneDomain, HyperlaneLogStore, HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, - SequenceAwareIndexer, H256, + SequenceAwareIndexer, }; use hyperlane_core::{BroadcastReceiver, Indexed, LogMeta, H512}; pub use metrics::ContractSyncMetrics; +use num_traits::Zero; use prometheus::core::{AtomicI64, AtomicU64, GenericCounter, GenericGauge}; use tokio::sync::broadcast::Sender as BroadcastSender; use tokio::time::sleep; @@ -34,6 +33,11 @@ const SLEEP_DURATION: Duration = Duration::from_secs(5); // With one such channel per origin chain. const TX_ID_CHANNEL_CAPACITY: usize = 1_000_000; +enum LogsOrSleepDuration { + Logs(u64), + Sleep(Duration), +} + /// Entity that drives the syncing of an agent's db with on-chain data. /// Extracts chain-specific data (emitted checkpoints, messages, etc) from an /// `indexer` and fills the agent's db with this data. @@ -93,67 +97,28 @@ where // in here, we check to see whether the recv end of the channel received any txid to query receipts for // the recv end is defined as an Option - // what's below is to be turned into an async function that is only called if `cursor` - // is Some(...). - // any sleeps should occur in this loop. We don't want to block in either the recv arm or in the - // cursor.next_action() arm. - let mut futures: FuturesUnordered> + Send>>> = - FuturesUnordered::new(); + let mut logs_found = 0; + // // let mut sleep_duration = SLEEP_DURATION; if let Some(recv) = opts.tx_id_recv.as_mut() { - let fut = Box::pin(self.fetch_logs_from_receiver(recv, &stored_logs_metric)); - futures.push(fut as _); - } - if let Some(cursor) = opts.cursor.as_mut() { - let fut = Box::pin(self.fetch_logs_with_cursor( - cursor, - &stored_logs_metric, - &indexed_height_metric, - )); - futures.push(fut as _); - } - - // `FuturesUnordered::next` will return the first future that resolves, regardless of the order - // in which they were pushed to `FuturesUnordered`. - // If we didn't find any logs, sleep for a while - let logs_found = futures.next().await.flatten(); - if logs_found.unwrap_or_default() == 0 { - sleep(SLEEP_DURATION).await; + // logs_found += self + // .fetch_logs_from_receiver(recv, &stored_logs_metric) + // .await; } - } - } - - async fn fetch_logs_from_receiver( - &self, - recv: &mut BroadcastReceiver, - stored_logs_metric: &GenericCounter, - ) -> Option { - println!("~~~ fetch_logs_from_receiver"); - if let Ok(tx_id) = recv.recv().await { - println!("~~~ tx_id: {:?}", tx_id); - // query receipts for tx_id - let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { - Ok(logs) => logs, - Err(err) => { - warn!(?err, ?tx_id, "Error fetching logs for tx id"); - sleep(SLEEP_DURATION).await; - return None; - } - }; - let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; - let logs_found = logs.len() as u64; - info!(num_logs = logs_found, ?tx_id, "Found log(s) for tx id"); - return Some(logs_found); - } - None - } - - async fn fetch_logs_with_cursor( - &self, - cursor: &mut Box>, - stored_logs_metric: &GenericCounter, - indexed_height_metric: &GenericGauge, - ) -> Option { - loop { + // if let Some(cursor) = opts.cursor.as_mut() { + // match self + // .fetch_logs_with_cursor(cursor, &stored_logs_metric, &indexed_height_metric) + // .await + // { + // LogsOrSleepDuration::Logs(found) => logs_found += found, + // LogsOrSleepDuration::Sleep(duration) => sleep_duration = duration, + // } + // } + + // if logs_found.is_zero() { + // sleep(sleep_duration).await; + // } + info!("~~~ looping"); + let cursor = opts.cursor.as_mut().unwrap(); indexed_height_metric.set(cursor.latest_queried_block() as i64); let (action, eta) = match cursor.next_action().await { Ok((action, eta)) => (action, eta), @@ -173,32 +138,33 @@ where let logs = match self.indexer.fetch_logs_in_range(range.clone()).await { Ok(logs) => logs, Err(err) => { - warn!(?err, ?range, "Error fetching logs in range"); + warn!(?err, "Error fetching logs"); break SLEEP_DURATION; } }; - let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; let logs_found = logs.len() as u64; info!( ?range, num_logs = logs_found, estimated_time_to_sync = fmt_sync_time(eta), + sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), + cursor = ?cursor, "Found log(s) in index range" ); - + logs.iter().for_each(|(_, meta)| { if let Err(err) = self.broadcast_sender.send(meta.transaction_id) { warn!(?err, "Error sending txid to receiver"); } }); - + // Report amount of deliveries stored into db // Update cursor if let Err(err) = cursor.update(logs, range).await { warn!(?err, "Error updating cursor"); break SLEEP_DURATION; }; - return Some(logs_found); + break Default::default(); }, CursorAction::Sleep(duration) => duration, }; @@ -206,6 +172,89 @@ where } } + async fn fetch_logs_from_receiver( + &self, + recv: &mut BroadcastReceiver, + stored_logs_metric: &GenericCounter, + ) -> u64 { + println!("~~~ fetch_logs_from_receiver"); + let mut logs_found = 0; + while let Ok(tx_id) = recv.recv().await { + println!("~~~ tx_id: {:?}", tx_id); + // query receipts for tx_id + // let logs = vec![]; + // let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { + // Ok(logs) => logs, + // Err(err) => { + // warn!(?err, ?tx_id, "Error fetching logs for tx id"); + // continue; + // } + // }; + // let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + // let num_logs = logs.len() as u64; + info!(num_logs = logs_found, ?tx_id, "Found log(s) for tx id"); + // logs_found += num_logs; + } + logs_found + } + + async fn fetch_logs_with_cursor( + &self, + cursor: &mut Box>, + stored_logs_metric: &GenericCounter, + indexed_height_metric: &GenericGauge, + ) -> LogsOrSleepDuration { + indexed_height_metric.set(cursor.latest_queried_block() as i64); + let (action, eta) = match cursor.next_action().await { + Ok((action, eta)) => (action, eta), + Err(err) => { + warn!(?err, "Error getting next action"); + return LogsOrSleepDuration::Sleep(SLEEP_DURATION); + } + }; + match action { + // Use `loop` but always break - this allows for returning a value + // from the loop (the sleep duration) + #[allow(clippy::never_loop)] + CursorAction::Query(range) => loop { + debug!(?range, "Looking for for events in index range"); + + let logs = match self.indexer.fetch_logs_in_range(range.clone()).await { + Ok(logs) => logs, + Err(err) => { + warn!(?err, ?range, "Error fetching logs in range"); + return LogsOrSleepDuration::Sleep(SLEEP_DURATION); + } + }; + + let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + let logs_found = logs.len() as u64; + info!( + ?range, + num_logs = logs_found, + estimated_time_to_sync = fmt_sync_time(eta), + sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), + cursor = ?cursor, + "Found log(s) in index range" + ); + + logs.iter().for_each(|(_, meta)| { + if let Err(err) = self.broadcast_sender.send(meta.transaction_id) { + warn!(?err, "Error sending txid to receiver"); + } + }); + + // Update cursor + if let Err(err) = cursor.update(logs, range).await { + warn!(?err, "Error updating cursor"); + return LogsOrSleepDuration::Sleep(SLEEP_DURATION); + }; + return LogsOrSleepDuration::Logs(logs_found); + }, + CursorAction::Sleep(duration) => return LogsOrSleepDuration::Sleep(duration), + }; + } + async fn dedupe_and_store_logs( &self, logs: Vec<(Indexed, LogMeta)>, @@ -216,7 +265,18 @@ where // Store deliveries let stored = match self.db.store_logs(&logs).await { - Ok(stored) => stored, + Ok(stored) => { + if stored > 0 { + println!( + "~~~ stored logs in db. domain: {:?}, Len: {:?}, sequenes: {:?}, logs: {:?}", + self.domain, + stored, + logs.iter().map(|(log, _)| log.sequence).collect::>(), + logs + ); + } + stored + } Err(err) => { warn!(?err, "Error storing logs in db"); Default::default() diff --git a/rust/hyperlane-core/src/traits/cursor.rs b/rust/hyperlane-core/src/traits/cursor.rs index cfe92b8dc4..fe44fe044b 100644 --- a/rust/hyperlane-core/src/traits/cursor.rs +++ b/rust/hyperlane-core/src/traits/cursor.rs @@ -1,4 +1,4 @@ -use std::{fmt, ops::RangeInclusive, time::Duration}; +use std::{fmt::{self, Debug}, ops::RangeInclusive, time::Duration}; use async_trait::async_trait; use auto_impl::auto_impl; @@ -9,7 +9,7 @@ use crate::{Indexed, LogMeta}; /// A cursor governs event indexing for a contract. #[async_trait] #[auto_impl(Box)] -pub trait ContractSyncCursor: Send + Sync + 'static { +pub trait ContractSyncCursor: Debug + Send + Sync + 'static { /// The next block range that should be queried. /// This method should be tolerant to being called multiple times in a row /// without any updates in between. diff --git a/rust/hyperlane-core/src/traits/indexer.rs b/rust/hyperlane-core/src/traits/indexer.rs index 6eee33aee1..f5c11ffa2a 100644 --- a/rust/hyperlane-core/src/traits/indexer.rs +++ b/rust/hyperlane-core/src/traits/indexer.rs @@ -42,7 +42,8 @@ pub trait Indexer: Send + Sync + Debug { &self, _tx_hash: H512, ) -> ChainResult, LogMeta)>> { - Err(eyre::eyre!("fetch_logs_by_tx_hash not implemented").into()) + // Err(eyre::eyre!("fetch_logs_by_tx_hash not implemented").into()) + Ok(vec![]) } } diff --git a/rust/utils/run-locally/src/invariants.rs b/rust/utils/run-locally/src/invariants.rs index 6900210469..a1038e4362 100644 --- a/rust/utils/run-locally/src/invariants.rs +++ b/rust/utils/run-locally/src/invariants.rs @@ -19,11 +19,12 @@ pub const SOL_MESSAGES_EXPECTED: u32 = 20; pub fn termination_invariants_met( config: &Config, starting_relayer_balance: f64, - solana_cli_tools_path: &Path, - solana_config_path: &Path, + // solana_cli_tools_path: &Path, + // solana_config_path: &Path, ) -> eyre::Result { let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; - let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED; + let total_messages_expected = eth_messages_expected; + // let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED; let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; assert!(!lengths.is_empty(), "Could not find queue length metric"); @@ -76,10 +77,10 @@ pub fn termination_invariants_met( return Ok(false); } - if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { - log!("Solana termination invariants not met"); - return Ok(false); - } + // if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { + // log!("Solana termination invariants not met"); + // return Ok(false); + // } let dispatched_messages_scraped = fetch_metric( "9093", diff --git a/rust/utils/run-locally/src/main.rs b/rust/utils/run-locally/src/main.rs index a287b2bd1f..e8c74a7c65 100644 --- a/rust/utils/run-locally/src/main.rs +++ b/rust/utils/run-locally/src/main.rs @@ -219,7 +219,8 @@ fn main() -> ExitCode { .arg("defaultSigner.key", RELAYER_KEYS[2]) .arg( "relayChains", - "test1,test2,test3,sealeveltest1,sealeveltest2", + "test1,test2,test3", + // "test1,test2,test3,sealeveltest1,sealeveltest2", ); let base_validator_env = common_agent_env @@ -291,9 +292,9 @@ fn main() -> ExitCode { // Ready to run... // - let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join(); - state.data.push(Box::new(solana_path_tempdir)); - let solana_program_builder = build_solana_programs(solana_path.clone()); + // let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join(); + // state.data.push(Box::new(solana_path_tempdir)); + // let solana_program_builder = build_solana_programs(solana_path.clone()); // this task takes a long time in the CI so run it in parallel log!("Building rust..."); @@ -304,13 +305,13 @@ fn main() -> ExitCode { .arg("bin", "validator") .arg("bin", "scraper") .arg("bin", "init-db") - .arg("bin", "hyperlane-sealevel-client") + // .arg("bin", "hyperlane-sealevel-client") .filter_logs(|l| !l.contains("workspace-inheritance")) .run(); let start_anvil = start_anvil(config.clone()); - let solana_program_path = solana_program_builder.join(); + // let solana_program_path = solana_program_builder.join(); log!("Running postgres db..."); let postgres = Program::new("docker") @@ -325,15 +326,15 @@ fn main() -> ExitCode { build_rust.join(); - let solana_ledger_dir = tempdir().unwrap(); - let start_solana_validator = start_solana_test_validator( - solana_path.clone(), - solana_program_path, - solana_ledger_dir.as_ref().to_path_buf(), - ); + // let solana_ledger_dir = tempdir().unwrap(); + // let start_solana_validator = start_solana_test_validator( + // solana_path.clone(), + // solana_program_path, + // solana_ledger_dir.as_ref().to_path_buf(), + // ); - let (solana_config_path, solana_validator) = start_solana_validator.join(); - state.push_agent(solana_validator); + // let (solana_config_path, solana_validator) = start_solana_validator.join(); + // state.push_agent(solana_validator); state.push_agent(start_anvil.join()); // spawn 1st validator before any messages have been sent to test empty mailbox @@ -379,9 +380,9 @@ fn main() -> ExitCode { kathy_env_double_insertion.clone().run().join(); // Send some sealevel messages before spinning up the agents, to test the backward indexing cursor - for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { - initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); - } + // for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { + // initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); + // } // spawn the rest of the validators for (i, validator_env) in validator_envs.into_iter().enumerate().skip(1) { @@ -392,9 +393,9 @@ fn main() -> ExitCode { state.push_agent(relayer_env.spawn("RLY")); // Send some sealevel messages after spinning up the relayer, to test the forward indexing cursor - for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { - initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); - } + // for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { + // initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); + // } log!("Setup complete! Agents running in background..."); log!("Ctrl+C to end execution..."); @@ -416,8 +417,8 @@ fn main() -> ExitCode { if termination_invariants_met( &config, starting_relayer_balance, - &solana_path, - &solana_config_path, + // &solana_path, + // &solana_config_path, ) .unwrap_or(false) { @@ -434,16 +435,16 @@ fn main() -> ExitCode { // verify long-running tasks are still running for (name, child) in state.agents.iter_mut() { if let Some(status) = child.try_wait().unwrap() { - if !status.success() { - log!( - "Child process {} exited unexpectedly, with code {}. Shutting down", - name, - status.code().unwrap() - ); - failure_occurred = true; - SHUTDOWN.store(true, Ordering::Relaxed); - break; - } + // if !status.success() { + // log!( + // "Child process {} exited unexpectedly, with code {}. Shutting down", + // name, + // status.code().unwrap() + // ); + // failure_occurred = true; + // SHUTDOWN.store(true, Ordering::Relaxed); + // break; + // } } } From cc4a394ee0fdf97c4e622181dae27fc88b247478 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Thu, 23 May 2024 19:51:34 +0100 Subject: [PATCH 06/17] fix: don't await on empty channel --- .../src/contracts/merkle_tree_hook.rs | 50 ++++++++++++++++++- rust/hyperlane-base/src/contract_sync/mod.rs | 48 ++++++++++-------- rust/hyperlane-core/src/traits/cursor.rs | 6 ++- 3 files changed, 81 insertions(+), 23 deletions(-) diff --git a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs index 534c9a9a07..3390deb375 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs @@ -4,17 +4,22 @@ use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; +use ethers::abi::RawLog; use ethers::prelude::Middleware; +use ethers_contract::{ContractError, EthLogDecode, LogMeta as EthersLogMeta}; +use ethers_core::types::H256 as EthersH256; use hyperlane_core::accumulator::incremental::IncrementalMerkle; use tracing::instrument; use hyperlane_core::{ ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, LogMeta, - MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H256, + MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H256, H512, }; -use crate::interfaces::merkle_tree_hook::{MerkleTreeHook as MerkleTreeHookContract, Tree}; +use crate::interfaces::merkle_tree_hook::{ + InsertedIntoTreeFilter, MerkleTreeHook as MerkleTreeHookContract, Tree, +}; use crate::tx::call_with_lag; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider}; @@ -142,6 +147,47 @@ where .as_u32() .saturating_sub(self.reorg_period)) } + + async fn fetch_logs_by_tx_hash( + &self, + tx_hash: H512, + ) -> ChainResult, LogMeta)>> { + let ethers_tx_hash: EthersH256 = tx_hash.into(); + let receipt = self + .provider + .get_transaction_receipt(ethers_tx_hash) + .await + .map_err(|err| ContractError::::MiddlewareError(err))?; + println!("~~~ merkle hook receipt: {:?}", receipt); + let Some(receipt) = receipt else { + return Ok(vec![]); + }; + + let logs: Vec<_> = receipt + .logs + .into_iter() + .filter_map(|log| { + let raw_log = RawLog { + topics: log.topics.clone(), + data: log.data.to_vec(), + }; + let log_meta: EthersLogMeta = (&log).into(); + let gas_payment_filter = InsertedIntoTreeFilter::decode_log(&raw_log).ok(); + gas_payment_filter.map(|log| { + ( + MerkleTreeInsertion::new(log.index, H256::from(log.message_id)).into(), + log_meta.into(), + ) + }) + }) + .collect(); + println!( + "~~~ found merkle hook logs with tx id {:?}: {:?}", + tx_hash, + logs.len() + ); + Ok(logs) + } } #[async_trait] diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index eb788a8890..1005255125 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -100,9 +100,9 @@ where let mut logs_found = 0; // // let mut sleep_duration = SLEEP_DURATION; if let Some(recv) = opts.tx_id_recv.as_mut() { - // logs_found += self - // .fetch_logs_from_receiver(recv, &stored_logs_metric) - // .await; + logs_found += self + .fetch_logs_from_receiver(recv, &stored_logs_metric) + .await; } // if let Some(cursor) = opts.cursor.as_mut() { // match self @@ -118,7 +118,7 @@ where // sleep(sleep_duration).await; // } info!("~~~ looping"); - let cursor = opts.cursor.as_mut().unwrap(); + let cursor = opts.cursor.as_mut().unwrap(); indexed_height_metric.set(cursor.latest_queried_block() as i64); let (action, eta) = match cursor.next_action().await { Ok((action, eta)) => (action, eta), @@ -152,7 +152,7 @@ where cursor = ?cursor, "Found log(s) in index range" ); - + logs.iter().for_each(|(_, meta)| { if let Err(err) = self.broadcast_sender.send(meta.transaction_id) { warn!(?err, "Error sending txid to receiver"); @@ -179,21 +179,29 @@ where ) -> u64 { println!("~~~ fetch_logs_from_receiver"); let mut logs_found = 0; - while let Ok(tx_id) = recv.recv().await { - println!("~~~ tx_id: {:?}", tx_id); - // query receipts for tx_id - // let logs = vec![]; - // let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { - // Ok(logs) => logs, - // Err(err) => { - // warn!(?err, ?tx_id, "Error fetching logs for tx id"); - // continue; - // } - // }; - // let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; - // let num_logs = logs.len() as u64; - info!(num_logs = logs_found, ?tx_id, "Found log(s) for tx id"); - // logs_found += num_logs; + loop { + match recv.try_recv() { + Ok(tx_id) => { + println!("~~~ tx_id: {:?}", tx_id); + // query receipts for tx_id + // let logs = vec![]; + let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { + Ok(logs) => logs, + Err(err) => { + warn!(?err, ?tx_id, "Error fetching logs for tx id"); + continue; + } + }; + let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + let num_logs = logs.len() as u64; + info!(num_logs = logs_found, ?tx_id, "Found log(s) for tx id"); + // logs_found += num_logs; + } + Err(err) => { + warn!(?err, "Error receiving txid from channel"); + break; + } + } } logs_found } diff --git a/rust/hyperlane-core/src/traits/cursor.rs b/rust/hyperlane-core/src/traits/cursor.rs index fe44fe044b..b835b94df2 100644 --- a/rust/hyperlane-core/src/traits/cursor.rs +++ b/rust/hyperlane-core/src/traits/cursor.rs @@ -1,4 +1,8 @@ -use std::{fmt::{self, Debug}, ops::RangeInclusive, time::Duration}; +use std::{ + fmt::{self, Debug}, + ops::RangeInclusive, + time::Duration, +}; use async_trait::async_trait; use auto_impl::auto_impl; From 292e180e76e75fae03d71d557af7d56d8533b8f3 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Fri, 24 May 2024 13:47:32 +0100 Subject: [PATCH 07/17] clean up --- rust/agents/relayer/src/relayer.rs | 10 +- rust/agents/scraper/src/agent.rs | 4 +- rust/agents/validator/src/validator.rs | 2 +- .../src/contract_sync/cursors/mod.rs | 12 ++ rust/hyperlane-base/src/contract_sync/mod.rs | 190 ++++++------------ rust/hyperlane-base/src/settings/base.rs | 9 +- rust/hyperlane-core/src/traits/indexer.rs | 2 +- rust/utils/run-locally/src/invariants.rs | 4 +- 8 files changed, 84 insertions(+), 149 deletions(-) diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index 38d03ee9de..d9d46c1cc6 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -17,7 +17,7 @@ use hyperlane_base::{ }; use hyperlane_core::{ BroadcastReceiver, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, - MerkleTreeInsertion, MpmcChannel, H256, H512, U256, + MerkleTreeInsertion, MpmcChannel, H512, U256, }; use tokio::{ sync::{ @@ -308,7 +308,13 @@ impl BaseAgent for Relayer { let txid_receivers = self .message_syncs .iter() - .map(|(k, v)| (k.clone(), v.get_new_receive_tx_channel())) + .filter_map(|(k, v)| { + let maybe_rx = v.get_new_receive_tx_channel(); + if maybe_rx.is_none() { + warn!("No txid receiver for chain {}", k); + } + maybe_rx.map(|rx| (k.clone(), rx)) + }) .collect::>(); // send channels by destination chain diff --git a/rust/agents/scraper/src/agent.rs b/rust/agents/scraper/src/agent.rs index f69a938094..9893a90ce7 100644 --- a/rust/agents/scraper/src/agent.rs +++ b/rust/agents/scraper/src/agent.rs @@ -7,9 +7,7 @@ use hyperlane_base::{ metrics::AgentMetrics, settings::IndexSettings, BaseAgent, ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, }; -use hyperlane_core::{ - BroadcastReceiver, Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, H256, -}; +use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment}; use tokio::task::JoinHandle; use tracing::{info_span, instrument::Instrumented, trace, Instrument}; diff --git a/rust/agents/validator/src/validator.rs b/rust/agents/validator/src/validator.rs index ab35c0ef88..c92fb893cb 100644 --- a/rust/agents/validator/src/validator.rs +++ b/rust/agents/validator/src/validator.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, num::NonZeroU64, sync::Arc, time::Duration}; +use std::{num::NonZeroU64, sync::Arc, time::Duration}; use crate::server as validator_server; use async_trait::async_trait; diff --git a/rust/hyperlane-base/src/contract_sync/cursors/mod.rs b/rust/hyperlane-base/src/contract_sync/cursors/mod.rs index c7d7274d68..dc2783e87e 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/mod.rs @@ -13,8 +13,15 @@ pub enum CursorType { RateLimited, } +// H256 * 1M = 32MB per origin chain worst case +// With one such channel per origin chain. +const TX_ID_CHANNEL_CAPACITY: Option = Some(1_000_000); + pub trait Indexable { fn indexing_cursor(domain: HyperlaneDomainProtocol) -> CursorType; + fn broadcast_channel_size() -> Option { + None + } } impl Indexable for HyperlaneMessage { @@ -26,6 +33,11 @@ impl Indexable for HyperlaneMessage { HyperlaneDomainProtocol::Cosmos => CursorType::SequenceAware, } } + + // Only broadcast txids from the message indexing task + fn broadcast_channel_size() -> Option { + TX_ID_CHANNEL_CAPACITY + } } impl Indexable for InterchainGasPayment { diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 1005255125..d225d73013 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -5,7 +5,6 @@ use std::{ use axum::async_trait; use cursors::*; use derive_new::new; -use fuels::programs::logs; use hyperlane_core::{ utils::fmt_sync_time, ContractSyncCursor, CursorAction, HyperlaneDomain, HyperlaneLogStore, HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, @@ -13,11 +12,11 @@ use hyperlane_core::{ }; use hyperlane_core::{BroadcastReceiver, Indexed, LogMeta, H512}; pub use metrics::ContractSyncMetrics; -use num_traits::Zero; use prometheus::core::{AtomicI64, AtomicU64, GenericCounter, GenericGauge}; +use tokio::sync::broadcast::error::TryRecvError; use tokio::sync::broadcast::Sender as BroadcastSender; use tokio::time::sleep; -use tracing::{debug, info, warn}; +use tracing::{debug, info, instrument, trace, warn}; use crate::settings::IndexSettings; @@ -29,36 +28,28 @@ use cursors::ForwardBackwardSequenceAwareSyncCursor; const SLEEP_DURATION: Duration = Duration::from_secs(5); -// H256 * 1M = 32MB per origin chain worst case -// With one such channel per origin chain. -const TX_ID_CHANNEL_CAPACITY: usize = 1_000_000; - -enum LogsOrSleepDuration { - Logs(u64), - Sleep(Duration), -} - /// Entity that drives the syncing of an agent's db with on-chain data. /// Extracts chain-specific data (emitted checkpoints, messages, etc) from an /// `indexer` and fills the agent's db with this data. #[derive(Debug)] -pub struct ContractSync, I: Indexer> { +pub struct ContractSync, I: Indexer> { domain: HyperlaneDomain, db: D, indexer: I, metrics: ContractSyncMetrics, - broadcast_sender: BroadcastSender, + broadcast_sender: Option>, _phantom: PhantomData, } -impl, I: Indexer> ContractSync { +impl, I: Indexer> ContractSync { + /// Create a new ContractSync pub fn new(domain: HyperlaneDomain, db: D, indexer: I, metrics: ContractSyncMetrics) -> Self { Self { domain, db, indexer, metrics, - broadcast_sender: BroadcastSender::new(TX_ID_CHANNEL_CAPACITY), + broadcast_sender: T::broadcast_channel_size().map(BroadcastSender::new), _phantom: PhantomData, } } @@ -66,7 +57,7 @@ impl, I: Indexer> ContractSync { impl ContractSync where - T: Debug + Send + Sync + Clone + Eq + Hash + 'static, + T: Indexable + Debug + Send + Sync + Clone + Eq + Hash + 'static, D: HyperlaneLogStore, I: Indexer + 'static, { @@ -75,13 +66,15 @@ where &self.domain } - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { - let tx = &self.broadcast_sender; - BroadcastReceiver::new(tx.clone(), tx.subscribe()) + fn get_new_receive_tx_channel(&self) -> Option> { + // Create a new channel if it doesn't exist + self.broadcast_sender + .as_ref() + .map(|tx| BroadcastReceiver::new(tx.clone(), tx.subscribe())) } /// Sync logs and write them to the LogStore - #[tracing::instrument(name = "ContractSync", fields(domain=self.domain().name()), skip(self, opts))] + #[instrument(name = "ContractSync", fields(domain=self.domain().name()), skip(self, opts))] pub async fn sync(&self, label: &'static str, mut opts: SyncOptions) { let chain_name = self.domain.as_ref(); let indexed_height_metric = self @@ -94,95 +87,26 @@ where .with_label_values(&[label, chain_name]); loop { - // in here, we check to see whether the recv end of the channel received any txid to query receipts for - // the recv end is defined as an Option - - let mut logs_found = 0; - // // let mut sleep_duration = SLEEP_DURATION; if let Some(recv) = opts.tx_id_recv.as_mut() { - logs_found += self - .fetch_logs_from_receiver(recv, &stored_logs_metric) + self.fetch_logs_from_receiver(recv, &stored_logs_metric) + .await; + } + if let Some(cursor) = opts.cursor.as_mut() { + self.fetch_logs_with_cursor(cursor, &stored_logs_metric, &indexed_height_metric) .await; } - // if let Some(cursor) = opts.cursor.as_mut() { - // match self - // .fetch_logs_with_cursor(cursor, &stored_logs_metric, &indexed_height_metric) - // .await - // { - // LogsOrSleepDuration::Logs(found) => logs_found += found, - // LogsOrSleepDuration::Sleep(duration) => sleep_duration = duration, - // } - // } - - // if logs_found.is_zero() { - // sleep(sleep_duration).await; - // } - info!("~~~ looping"); - let cursor = opts.cursor.as_mut().unwrap(); - indexed_height_metric.set(cursor.latest_queried_block() as i64); - let (action, eta) = match cursor.next_action().await { - Ok((action, eta)) => (action, eta), - Err(err) => { - warn!(?err, "Error getting next action"); - sleep(SLEEP_DURATION).await; - continue; - } - }; - let sleep_duration = match action { - // Use `loop` but always break - this allows for returning a value - // from the loop (the sleep duration) - #[allow(clippy::never_loop)] - CursorAction::Query(range) => loop { - debug!(?range, "Looking for for events in index range"); - - let logs = match self.indexer.fetch_logs_in_range(range.clone()).await { - Ok(logs) => logs, - Err(err) => { - warn!(?err, "Error fetching logs"); - break SLEEP_DURATION; - } - }; - let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; - let logs_found = logs.len() as u64; - info!( - ?range, - num_logs = logs_found, - estimated_time_to_sync = fmt_sync_time(eta), - sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), - cursor = ?cursor, - "Found log(s) in index range" - ); - - logs.iter().for_each(|(_, meta)| { - if let Err(err) = self.broadcast_sender.send(meta.transaction_id) { - warn!(?err, "Error sending txid to receiver"); - } - }); - // Report amount of deliveries stored into db - // Update cursor - if let Err(err) = cursor.update(logs, range).await { - warn!(?err, "Error updating cursor"); - break SLEEP_DURATION; - }; - break Default::default(); - }, - CursorAction::Sleep(duration) => duration, - }; - sleep(sleep_duration).await; } } + #[instrument(fields(domain=self.domain().name()), skip(self, recv, stored_logs_metric))] async fn fetch_logs_from_receiver( &self, recv: &mut BroadcastReceiver, stored_logs_metric: &GenericCounter, - ) -> u64 { - println!("~~~ fetch_logs_from_receiver"); - let mut logs_found = 0; + ) { loop { match recv.try_recv() { Ok(tx_id) => { - println!("~~~ tx_id: {:?}", tx_id); // query receipts for tx_id // let logs = vec![]; let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { @@ -194,16 +118,15 @@ where }; let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; let num_logs = logs.len() as u64; - info!(num_logs = logs_found, ?tx_id, "Found log(s) for tx id"); - // logs_found += num_logs; + info!(num_logs, ?tx_id, "Found log(s) for tx id"); } + Err(TryRecvError::Empty) => trace!("No txid received"), Err(err) => { warn!(?err, "Error receiving txid from channel"); break; } } } - logs_found } async fn fetch_logs_with_cursor( @@ -211,16 +134,17 @@ where cursor: &mut Box>, stored_logs_metric: &GenericCounter, indexed_height_metric: &GenericGauge, - ) -> LogsOrSleepDuration { + ) { indexed_height_metric.set(cursor.latest_queried_block() as i64); let (action, eta) = match cursor.next_action().await { Ok((action, eta)) => (action, eta), Err(err) => { warn!(?err, "Error getting next action"); - return LogsOrSleepDuration::Sleep(SLEEP_DURATION); + sleep(SLEEP_DURATION).await; + return; } }; - match action { + let sleep_duration = match action { // Use `loop` but always break - this allows for returning a value // from the loop (the sleep duration) #[allow(clippy::never_loop)] @@ -231,7 +155,7 @@ where Ok(logs) => logs, Err(err) => { warn!(?err, ?range, "Error fetching logs in range"); - return LogsOrSleepDuration::Sleep(SLEEP_DURATION); + break SLEEP_DURATION; } }; @@ -246,21 +170,24 @@ where "Found log(s) in index range" ); - logs.iter().for_each(|(_, meta)| { - if let Err(err) = self.broadcast_sender.send(meta.transaction_id) { - warn!(?err, "Error sending txid to receiver"); - } - }); + if let Some(tx) = self.broadcast_sender.as_ref() { + logs.iter().for_each(|(_, meta)| { + if let Err(err) = tx.send(meta.transaction_id) { + warn!(?err, "Error sending txid to receiver"); + } + }); + } // Update cursor if let Err(err) = cursor.update(logs, range).await { warn!(?err, "Error updating cursor"); - return LogsOrSleepDuration::Sleep(SLEEP_DURATION); + break SLEEP_DURATION; }; - return LogsOrSleepDuration::Logs(logs_found); + break Default::default(); }, - CursorAction::Sleep(duration) => return LogsOrSleepDuration::Sleep(duration), + CursorAction::Sleep(duration) => duration, }; + sleep(sleep_duration).await } async fn dedupe_and_store_logs( @@ -273,23 +200,20 @@ where // Store deliveries let stored = match self.db.store_logs(&logs).await { - Ok(stored) => { - if stored > 0 { - println!( - "~~~ stored logs in db. domain: {:?}, Len: {:?}, sequenes: {:?}, logs: {:?}", - self.domain, - stored, - logs.iter().map(|(log, _)| log.sequence).collect::>(), - logs - ); - } - stored - } + Ok(stored) => stored, Err(err) => { warn!(?err, "Error storing logs in db"); Default::default() } }; + if stored > 0 { + debug!( + domain = self.domain.as_ref(), + count = stored, + sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), + "Stored logs in db", + ); + } // Report amount of deliveries stored into db stored_logs_metric.inc_by(stored as u64); logs @@ -319,20 +243,18 @@ pub trait ContractSyncer: Send + Sync { fn domain(&self) -> &HyperlaneDomain; /// If this syncer is also a broadcaster, return the channel to receive txids - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver; + fn get_new_receive_tx_channel(&self) -> Option>; /// Set the channel to receive txids async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver); - - // async fn receive_tx_to_index(&self) -> Option { - // None - // } } #[derive(new)] +/// Options for syncing events pub struct SyncOptions { // Keep as optional fields for now to run them simultaneously. - // Might want to refactor into an enum later. + // Might want to refactor into an enum later, where we either index with a cursor or rely on receiving + // txids from a channel to other inedxing tasks cursor: Option>>, tx_id_recv: Option>, } @@ -349,7 +271,7 @@ impl From>> for SyncOptions { #[async_trait] impl ContractSyncer for WatermarkContractSync where - T: Debug + Send + Sync + Clone + Eq + Hash + 'static, + T: Indexable + Debug + Send + Sync + Clone + Eq + Hash + 'static, { /// Returns a new cursor to be used for syncing events from the indexer based on time async fn cursor(&self, index_settings: IndexSettings) -> Box> { @@ -379,7 +301,7 @@ where ContractSync::domain(self) } - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + fn get_new_receive_tx_channel(&self) -> Option> { ContractSync::get_new_receive_tx_channel(self) } @@ -398,7 +320,7 @@ pub type SequencedDataContractSync = #[async_trait] impl ContractSyncer for SequencedDataContractSync where - T: Send + Sync + Debug + Clone + Eq + Hash + 'static, + T: Indexable + Send + Sync + Debug + Clone + Eq + Hash + 'static, { /// Returns a new cursor to be used for syncing dispatched messages from the indexer async fn cursor(&self, index_settings: IndexSettings) -> Box> { @@ -422,7 +344,7 @@ where ContractSync::domain(self) } - fn get_new_receive_tx_channel(&self) -> BroadcastReceiver { + fn get_new_receive_tx_channel(&self) -> Option> { ContractSync::get_new_receive_tx_channel(self) } diff --git a/rust/hyperlane-base/src/settings/base.rs b/rust/hyperlane-base/src/settings/base.rs index 4cf7c458dd..6757a545ed 100644 --- a/rust/hyperlane-base/src/settings/base.rs +++ b/rust/hyperlane-base/src/settings/base.rs @@ -1,13 +1,12 @@ -use std::{borrow::BorrowMut, collections::HashMap, fmt::Debug, hash::Hash, sync::Arc}; +use std::{collections::HashMap, fmt::Debug, hash::Hash, sync::Arc}; use eyre::{eyre, Context, Result}; use futures_util::future::try_join_all; use hyperlane_core::{ - BroadcastReceiver, HyperlaneChain, HyperlaneDomain, HyperlaneLogStore, HyperlaneProvider, + HyperlaneChain, HyperlaneDomain, HyperlaneLogStore, HyperlaneProvider, HyperlaneSequenceAwareIndexerStoreReader, HyperlaneWatermarkedLogStore, InterchainGasPaymaster, Mailbox, MerkleTreeHook, MultisigIsm, SequenceAwareIndexer, ValidatorAnnounce, H256, }; -use tokio::sync::broadcast::Sender as BroadcastSender; use crate::{ cursors::{CursorType, Indexable}, @@ -161,7 +160,7 @@ impl Settings { db: Arc, ) -> eyre::Result>> where - T: Debug, + T: Indexable + Debug, SequenceIndexer: TryFromWithMetrics, D: HyperlaneLogStore + HyperlaneSequenceAwareIndexerStoreReader + 'static, { @@ -185,7 +184,7 @@ impl Settings { db: Arc, ) -> eyre::Result>> where - T: Debug, + T: Indexable + Debug, SequenceIndexer: TryFromWithMetrics, D: HyperlaneLogStore + HyperlaneWatermarkedLogStore + 'static, { diff --git a/rust/hyperlane-core/src/traits/indexer.rs b/rust/hyperlane-core/src/traits/indexer.rs index f5c11ffa2a..ada4b1a9a0 100644 --- a/rust/hyperlane-core/src/traits/indexer.rs +++ b/rust/hyperlane-core/src/traits/indexer.rs @@ -11,7 +11,7 @@ use async_trait::async_trait; use auto_impl::auto_impl; use serde::Deserialize; -use crate::{ChainResult, Indexed, LogMeta, H256, H512}; +use crate::{ChainResult, Indexed, LogMeta, H512}; /// Indexing mode. #[derive(Copy, Debug, Default, Deserialize, Clone)] diff --git a/rust/utils/run-locally/src/invariants.rs b/rust/utils/run-locally/src/invariants.rs index a1038e4362..33ec7d4eeb 100644 --- a/rust/utils/run-locally/src/invariants.rs +++ b/rust/utils/run-locally/src/invariants.rs @@ -1,5 +1,3 @@ -// use std::path::Path; - use std::path::Path; use crate::config::Config; @@ -27,7 +25,7 @@ pub fn termination_invariants_met( // let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED; let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; - assert!(!lengths.is_empty(), "Could not find queue length metric"); + // assert!(!lengths.is_empty(), "Could not find queue length metric"); if lengths.iter().sum::() != ZERO_MERKLE_INSERTION_KATHY_MESSAGES { log!("Relayer queues not empty. Lengths: {:?}", lengths); return Ok(false); From a7db56aabb983be203cd92ee80115be077f80513 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Fri, 24 May 2024 14:00:34 +0100 Subject: [PATCH 08/17] more clean up, re-enable sealevel e2e --- rust/Cargo.lock | 1 - rust/agents/relayer/src/msg/processor.rs | 5 -- rust/agents/relayer/src/relayer.rs | 3 - .../src/contracts/interchain_gas.rs | 6 -- .../src/contracts/merkle_tree_hook.rs | 6 -- rust/hyperlane-base/Cargo.toml | 1 - rust/hyperlane-base/src/contract_sync/mod.rs | 10 ++- rust/hyperlane-core/src/traits/indexer.rs | 1 - rust/utils/run-locally/src/invariants.rs | 17 ++--- rust/utils/run-locally/src/main.rs | 75 ++++++++----------- 10 files changed, 47 insertions(+), 78 deletions(-) diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 536be3daf5..d8b69f4ea4 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -4135,7 +4135,6 @@ dependencies = [ "itertools 0.12.0", "maplit", "mockall", - "num-traits", "paste", "prometheus", "reqwest", diff --git a/rust/agents/relayer/src/msg/processor.rs b/rust/agents/relayer/src/msg/processor.rs index 5073892a29..3aae0d308c 100644 --- a/rust/agents/relayer/src/msg/processor.rs +++ b/rust/agents/relayer/src/msg/processor.rs @@ -117,11 +117,6 @@ impl ProcessorExt for MessageProcessor { impl MessageProcessor { fn try_get_unprocessed_message(&mut self) -> Result> { loop { - println!( - "~~~ trying to get unprocessed message for domain and nonce {:?} {:?}", - self.domain(), - self.message_nonce - ); // First, see if we can find the message so we can update the gauge. if let Some(message) = self.db.retrieve_message_by_nonce(self.message_nonce)? { // Update the latest nonce gauges diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index d9d46c1cc6..5fb9cbf773 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -131,8 +131,6 @@ impl BaseAgent for Relayer { let contract_sync_metrics = Arc::new(ContractSyncMetrics::new(&core_metrics)); - // each of these `contract_syncs` will return a receiver of txid alongside - // `contract_syncs` will also take a hashmap of domain -> recvs as an argument let message_syncs: HashMap<_, Arc>> = settings .contract_syncs::( settings.origin_chains.iter(), @@ -161,7 +159,6 @@ impl BaseAgent for Relayer { .map(|(k, v)| (k, v as _)) .collect(); - // set the receivers for each domain and implement the `fetch_logs_by_tx_hash` for igp and merkle let merkle_tree_hook_syncs = settings .contract_syncs::( settings.origin_chains.iter(), diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index 231d919088..37fd1b96b8 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -139,7 +139,6 @@ where .get_transaction_receipt(ethers_tx_hash) .await .map_err(|err| ContractError::::MiddlewareError(err))?; - println!("~~~ igp receipt: {:?}", receipt); let Some(receipt) = receipt else { return Ok(vec![]); }; @@ -167,11 +166,6 @@ where }) }) .collect(); - println!( - "~~~ found igp logs with tx id {:?}: {:?}", - tx_hash, - logs.len() - ); Ok(logs) } } diff --git a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs index 3390deb375..6254be9e64 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs @@ -158,7 +158,6 @@ where .get_transaction_receipt(ethers_tx_hash) .await .map_err(|err| ContractError::::MiddlewareError(err))?; - println!("~~~ merkle hook receipt: {:?}", receipt); let Some(receipt) = receipt else { return Ok(vec![]); }; @@ -181,11 +180,6 @@ where }) }) .collect(); - println!( - "~~~ found merkle hook logs with tx id {:?}: {:?}", - tx_hash, - logs.len() - ); Ok(logs) } } diff --git a/rust/hyperlane-base/Cargo.toml b/rust/hyperlane-base/Cargo.toml index 0ed74431fe..97d84b6221 100644 --- a/rust/hyperlane-base/Cargo.toml +++ b/rust/hyperlane-base/Cargo.toml @@ -27,7 +27,6 @@ futures-util.workspace = true itertools.workspace = true maplit.workspace = true mockall.worksapce = true -num-traits.workspace = true paste.workspace = true prometheus.workspace = true rocksdb.workspace = true diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index d225d73013..731b3246a8 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -107,8 +107,6 @@ where loop { match recv.try_recv() { Ok(tx_id) => { - // query receipts for tx_id - // let logs = vec![]; let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { Ok(logs) => logs, Err(err) => { @@ -118,7 +116,12 @@ where }; let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; let num_logs = logs.len() as u64; - info!(num_logs, ?tx_id, "Found log(s) for tx id"); + info!( + num_logs, + ?tx_id, + sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), + "Found log(s) for tx id" + ); } Err(TryRecvError::Empty) => trace!("No txid received"), Err(err) => { @@ -129,6 +132,7 @@ where } } + #[instrument(fields(domain=self.domain().name()), skip(self, stored_logs_metric, indexed_height_metric))] async fn fetch_logs_with_cursor( &self, cursor: &mut Box>, diff --git a/rust/hyperlane-core/src/traits/indexer.rs b/rust/hyperlane-core/src/traits/indexer.rs index ada4b1a9a0..1c05360ff5 100644 --- a/rust/hyperlane-core/src/traits/indexer.rs +++ b/rust/hyperlane-core/src/traits/indexer.rs @@ -42,7 +42,6 @@ pub trait Indexer: Send + Sync + Debug { &self, _tx_hash: H512, ) -> ChainResult, LogMeta)>> { - // Err(eyre::eyre!("fetch_logs_by_tx_hash not implemented").into()) Ok(vec![]) } } diff --git a/rust/utils/run-locally/src/invariants.rs b/rust/utils/run-locally/src/invariants.rs index 33ec7d4eeb..15b2584813 100644 --- a/rust/utils/run-locally/src/invariants.rs +++ b/rust/utils/run-locally/src/invariants.rs @@ -17,15 +17,14 @@ pub const SOL_MESSAGES_EXPECTED: u32 = 20; pub fn termination_invariants_met( config: &Config, starting_relayer_balance: f64, - // solana_cli_tools_path: &Path, - // solana_config_path: &Path, + solana_cli_tools_path: &Path, + solana_config_path: &Path, ) -> eyre::Result { let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; - let total_messages_expected = eth_messages_expected; - // let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED; + let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED; let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; - // assert!(!lengths.is_empty(), "Could not find queue length metric"); + assert!(!lengths.is_empty(), "Could not find queue length metric"); if lengths.iter().sum::() != ZERO_MERKLE_INSERTION_KATHY_MESSAGES { log!("Relayer queues not empty. Lengths: {:?}", lengths); return Ok(false); @@ -75,10 +74,10 @@ pub fn termination_invariants_met( return Ok(false); } - // if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { - // log!("Solana termination invariants not met"); - // return Ok(false); - // } + if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { + log!("Solana termination invariants not met"); + return Ok(false); + } let dispatched_messages_scraped = fetch_metric( "9093", diff --git a/rust/utils/run-locally/src/main.rs b/rust/utils/run-locally/src/main.rs index e8c74a7c65..0e88e685ec 100644 --- a/rust/utils/run-locally/src/main.rs +++ b/rust/utils/run-locally/src/main.rs @@ -200,15 +200,6 @@ fn main() -> ExitCode { r#"[{ "type": "minimum", "payment": "1", - "matchingList": [ - { - "originDomain": ["13375","13376"], - "destinationDomain": ["13375","13376"] - } - ] - }, - { - "type": "none" }]"#, ) .arg( @@ -219,8 +210,7 @@ fn main() -> ExitCode { .arg("defaultSigner.key", RELAYER_KEYS[2]) .arg( "relayChains", - "test1,test2,test3", - // "test1,test2,test3,sealeveltest1,sealeveltest2", + "test1,test2,test3,sealeveltest1,sealeveltest2", ); let base_validator_env = common_agent_env @@ -292,9 +282,9 @@ fn main() -> ExitCode { // Ready to run... // - // let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join(); - // state.data.push(Box::new(solana_path_tempdir)); - // let solana_program_builder = build_solana_programs(solana_path.clone()); + let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join(); + state.data.push(Box::new(solana_path_tempdir)); + let solana_program_builder = build_solana_programs(solana_path.clone()); // this task takes a long time in the CI so run it in parallel log!("Building rust..."); @@ -305,13 +295,13 @@ fn main() -> ExitCode { .arg("bin", "validator") .arg("bin", "scraper") .arg("bin", "init-db") - // .arg("bin", "hyperlane-sealevel-client") + .arg("bin", "hyperlane-sealevel-client") .filter_logs(|l| !l.contains("workspace-inheritance")) .run(); let start_anvil = start_anvil(config.clone()); - // let solana_program_path = solana_program_builder.join(); + let solana_program_path = solana_program_builder.join(); log!("Running postgres db..."); let postgres = Program::new("docker") @@ -326,15 +316,15 @@ fn main() -> ExitCode { build_rust.join(); - // let solana_ledger_dir = tempdir().unwrap(); - // let start_solana_validator = start_solana_test_validator( - // solana_path.clone(), - // solana_program_path, - // solana_ledger_dir.as_ref().to_path_buf(), - // ); + let solana_ledger_dir = tempdir().unwrap(); + let start_solana_validator = start_solana_test_validator( + solana_path.clone(), + solana_program_path, + solana_ledger_dir.as_ref().to_path_buf(), + ); - // let (solana_config_path, solana_validator) = start_solana_validator.join(); - // state.push_agent(solana_validator); + let (solana_config_path, solana_validator) = start_solana_validator.join(); + state.push_agent(solana_validator); state.push_agent(start_anvil.join()); // spawn 1st validator before any messages have been sent to test empty mailbox @@ -380,9 +370,9 @@ fn main() -> ExitCode { kathy_env_double_insertion.clone().run().join(); // Send some sealevel messages before spinning up the agents, to test the backward indexing cursor - // for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { - // initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); - // } + for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { + initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); + } // spawn the rest of the validators for (i, validator_env) in validator_envs.into_iter().enumerate().skip(1) { @@ -393,9 +383,9 @@ fn main() -> ExitCode { state.push_agent(relayer_env.spawn("RLY")); // Send some sealevel messages after spinning up the relayer, to test the forward indexing cursor - // for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { - // initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); - // } + for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { + initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); + } log!("Setup complete! Agents running in background..."); log!("Ctrl+C to end execution..."); @@ -413,12 +403,11 @@ fn main() -> ExitCode { while !SHUTDOWN.load(Ordering::Relaxed) { if config.ci_mode { // for CI we have to look for the end condition. - // if termination_invariants_met(&config, starting_relayer_balance) if termination_invariants_met( &config, starting_relayer_balance, - // &solana_path, - // &solana_config_path, + &solana_path, + &solana_config_path, ) .unwrap_or(false) { @@ -435,16 +424,16 @@ fn main() -> ExitCode { // verify long-running tasks are still running for (name, child) in state.agents.iter_mut() { if let Some(status) = child.try_wait().unwrap() { - // if !status.success() { - // log!( - // "Child process {} exited unexpectedly, with code {}. Shutting down", - // name, - // status.code().unwrap() - // ); - // failure_occurred = true; - // SHUTDOWN.store(true, Ordering::Relaxed); - // break; - // } + if !status.success() { + log!( + "Child process {} exited unexpectedly, with code {}. Shutting down", + name, + status.code().unwrap() + ); + failure_occurred = true; + SHUTDOWN.store(true, Ordering::Relaxed); + break; + } } } From f06ce540af87ca25078d00450135c26fdf17fa53 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Fri, 24 May 2024 14:58:37 +0100 Subject: [PATCH 09/17] fix: don't block on any channel error --- .../cursors/sequence_aware/backward.rs | 12 +++++++++++- .../contract_sync/cursors/sequence_aware/forward.rs | 13 ++++++++++++- rust/hyperlane-base/src/contract_sync/mod.rs | 7 +++++-- 3 files changed, 28 insertions(+), 4 deletions(-) diff --git a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs index e217d4bb42..055ed5da5b 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs @@ -14,7 +14,6 @@ use tracing::{debug, warn}; use super::{LastIndexedSnapshot, TargetSnapshot}; /// A sequence-aware cursor that syncs backward until there are no earlier logs to index. -#[derive(Debug)] pub(crate) struct BackwardSequenceAwareSyncCursor { /// The max chunk size to query for logs. /// If in sequence mode, this is the max number of sequences to query. @@ -33,6 +32,17 @@ pub(crate) struct BackwardSequenceAwareSyncCursor { index_mode: IndexMode, } +impl Debug for BackwardSequenceAwareSyncCursor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BackwardSequenceAwareSyncCursor") + .field("chunk_size", &self.chunk_size) + .field("last_indexed_snapshot", &self.last_indexed_snapshot) + .field("current_indexing_snapshot", &self.current_indexing_snapshot) + .field("index_mode", &self.index_mode) + .finish() + } +} + impl BackwardSequenceAwareSyncCursor { pub fn new( chunk_size: u32, diff --git a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs index 78df74e881..289cab537e 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs @@ -18,7 +18,6 @@ use tracing::{debug, warn}; use super::{LastIndexedSnapshot, TargetSnapshot}; /// A sequence-aware cursor that syncs forwards in perpetuity. -#[derive(Debug)] pub(crate) struct ForwardSequenceAwareSyncCursor { /// The max chunk size to query for logs. /// If in sequence mode, this is the max number of sequences to query. @@ -42,6 +41,18 @@ pub(crate) struct ForwardSequenceAwareSyncCursor { index_mode: IndexMode, } +impl Debug for ForwardSequenceAwareSyncCursor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ForwardSequenceAwareSyncCursor") + .field("chunk_size", &self.chunk_size) + .field("last_indexed_snapshot", &self.last_indexed_snapshot) + .field("current_indexing_snapshot", &self.current_indexing_snapshot) + .field("target_snapshot", &self.target_snapshot) + .field("index_mode", &self.index_mode) + .finish() + } +} + impl ForwardSequenceAwareSyncCursor { pub fn new( chunk_size: u32, diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 731b3246a8..9353b4d94f 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -123,7 +123,10 @@ where "Found log(s) for tx id" ); } - Err(TryRecvError::Empty) => trace!("No txid received"), + Err(TryRecvError::Empty) => { + trace!("No txid received"); + break; + } Err(err) => { warn!(?err, "Error receiving txid from channel"); break; @@ -177,7 +180,7 @@ where if let Some(tx) = self.broadcast_sender.as_ref() { logs.iter().for_each(|(_, meta)| { if let Err(err) = tx.send(meta.transaction_id) { - warn!(?err, "Error sending txid to receiver"); + trace!(?err, "Error sending txid to receiver"); } }); } From 933cdb9ffd66052ea84516a568308f17d35ec91b Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Fri, 24 May 2024 15:02:54 +0100 Subject: [PATCH 10/17] fix clippy --- rust/hyperlane-base/src/contract_sync/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 9353b4d94f..429f236150 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -114,7 +114,7 @@ where continue; } }; - let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + let logs = self.dedupe_and_store_logs(logs, stored_logs_metric).await; let num_logs = logs.len() as u64; info!( num_logs, @@ -166,7 +166,7 @@ where } }; - let logs = self.dedupe_and_store_logs(logs, &stored_logs_metric).await; + let logs = self.dedupe_and_store_logs(logs, stored_logs_metric).await; let logs_found = logs.len() as u64; info!( ?range, From c06c84ca0965bf6c918806d899ab378b1685fa6a Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Fri, 24 May 2024 16:58:08 +0100 Subject: [PATCH 11/17] chore: rm unused method --- rust/agents/relayer/src/relayer.rs | 8 ++++---- rust/hyperlane-base/src/contract_sync/mod.rs | 11 ----------- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index 757c796d24..b473182a07 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -322,12 +322,12 @@ impl BaseAgent for Relayer { let txid_receivers = self .message_syncs .iter() - .filter_map(|(k, v)| { - let maybe_rx = v.get_new_receive_tx_channel(); + .filter_map(|(domain, sync)| { + let maybe_rx = sync.get_new_receive_tx_channel(); if maybe_rx.is_none() { - warn!("No txid receiver for chain {}", k); + warn!(?domain, "No txid receiver for chain"); } - maybe_rx.map(|rx| (k.clone(), rx)) + maybe_rx.map(|rx| (domain.clone(), rx)) }) .collect::>(); diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 429f236150..178030be68 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -251,9 +251,6 @@ pub trait ContractSyncer: Send + Sync { /// If this syncer is also a broadcaster, return the channel to receive txids fn get_new_receive_tx_channel(&self) -> Option>; - - /// Set the channel to receive txids - async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver); } #[derive(new)] @@ -311,10 +308,6 @@ where fn get_new_receive_tx_channel(&self) -> Option> { ContractSync::get_new_receive_tx_channel(self) } - - async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { - ContractSync::set_receive_tx_channel(self, channel).await - } } /// Log store for sequence aware cursors @@ -354,8 +347,4 @@ where fn get_new_receive_tx_channel(&self) -> Option> { ContractSync::get_new_receive_tx_channel(self) } - - async fn set_receive_tx_channel(&mut self, channel: BroadcastReceiver) { - ContractSync::set_receive_tx_channel(self, channel).await - } } From 6ea558bcd7b518923303d7e94d43812d04b88b4d Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Tue, 4 Jun 2024 17:04:09 +0100 Subject: [PATCH 12/17] remediation wip --- rust/agents/relayer/src/relayer.rs | 22 ++-- .../src/contracts/interchain_gas.rs | 7 +- .../src/contracts/mailbox.rs | 7 +- .../src/contracts/merkle_tree_hook.rs | 107 ++++++++++++++---- rust/hyperlane-base/src/contract_sync/mod.rs | 2 +- 5 files changed, 103 insertions(+), 42 deletions(-) diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index b473182a07..568d66909f 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -319,7 +319,7 @@ impl BaseAgent for Relayer { .instrument(info_span!("Relayer server")); tasks.push(server_task); - let txid_receivers = self + let mut txid_receivers = self .message_syncs .iter() .filter_map(|(domain, sync)| { @@ -369,7 +369,7 @@ impl BaseAgent for Relayer { tasks.push( self.run_interchain_gas_payment_sync( origin, - txid_receivers.clone(), + txid_receivers.get(origin).cloned(), task_monitor.clone(), ) .await, @@ -377,7 +377,7 @@ impl BaseAgent for Relayer { tasks.push( self.run_merkle_tree_hook_syncs( origin, - txid_receivers.clone(), + txid_receivers.remove(origin), task_monitor.clone(), ) .await, @@ -424,7 +424,7 @@ impl Relayer { async fn run_interchain_gas_payment_sync( &self, origin: &HyperlaneDomain, - mut rxs: HashMap>, + rx: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index_settings(); @@ -434,14 +434,10 @@ impl Relayer { .unwrap() .clone(); let cursor = contract_sync.cursor(index_settings).await; - let origin_chain = origin.clone(); tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { contract_sync .clone() - .sync( - "gas_payments", - SyncOptions::new(Some(cursor), rxs.remove(&origin_chain)), - ) + .sync("gas_payments", SyncOptions::new(Some(cursor), rx)) .await })) .instrument(info_span!("IgpSync")) @@ -450,20 +446,16 @@ impl Relayer { async fn run_merkle_tree_hook_syncs( &self, origin: &HyperlaneDomain, - mut rxs: HashMap>, + rx: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); let contract_sync = self.merkle_tree_hook_syncs.get(origin).unwrap().clone(); let cursor = contract_sync.cursor(index_settings).await; - let origin_chain = origin.clone(); tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { contract_sync .clone() - .sync( - "merkle_tree_hook", - SyncOptions::new(Some(cursor), rxs.remove(&origin_chain)), - ) + .sync("merkle_tree_hook", SyncOptions::new(Some(cursor), rx)) .await })) .instrument(info_span!("MerkleTreeHookSync")) diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index 37fd1b96b8..89e6d393a8 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -15,7 +15,7 @@ use hyperlane_core::{ HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, InterchainGasPaymaster, InterchainGasPayment, LogMeta, SequenceAwareIndexer, H160, H256, H512, }; -use tracing::instrument; +use tracing::{instrument, warn}; use crate::interfaces::i_interchain_gas_paymaster::{ GasPaymentFilter, IInterchainGasPaymaster as EthereumInterchainGasPaymasterInternal, @@ -140,6 +140,7 @@ where .await .map_err(|err| ContractError::::MiddlewareError(err))?; let Some(receipt) = receipt else { + warn!(%tx_hash, "No receipt found for tx hash"); return Ok(vec![]); }; @@ -147,6 +148,10 @@ where .logs .into_iter() .filter_map(|log| { + // Filter out logs that aren't emitted by this contract + if log.address != self.contract.address() { + return None; + } let raw_log = RawLog { topics: log.topics.clone(), data: log.data.to_vec(), diff --git a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs index 683ef69c2f..54316b2c5d 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs @@ -13,7 +13,7 @@ use ethers_contract::{builders::ContractCall, ContractError, EthEvent, LogMeta a use ethers_core::types::H256 as EthersH256; use futures_util::future::join_all; use hyperlane_core::H512; -use tracing::instrument; +use tracing::{instrument, warn}; use hyperlane_core::{ utils::bytes_to_hex, BatchItem, ChainCommunicationError, ChainResult, ContractLocator, @@ -172,6 +172,7 @@ where .await .map_err(|err| ContractError::::MiddlewareError(err))?; let Some(receipt) = receipt else { + warn!(%tx_hash, "No receipt found for tx hash"); return Ok(vec![]); }; @@ -179,6 +180,10 @@ where .logs .into_iter() .filter_map(|log| { + // Filter out logs that aren't emitted by this contract + if log.address != self.contract.address() { + return None; + } let raw_log = RawLog { topics: log.topics.clone(), data: log.data.to_vec(), diff --git a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs index 6254be9e64..c06ef54eaf 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs @@ -6,15 +6,15 @@ use std::sync::Arc; use async_trait::async_trait; use ethers::abi::RawLog; use ethers::prelude::Middleware; -use ethers_contract::{ContractError, EthLogDecode, LogMeta as EthersLogMeta}; +use ethers_contract::{ContractError, EthEvent, LogMeta as EthersLogMeta}; use ethers_core::types::H256 as EthersH256; use hyperlane_core::accumulator::incremental::IncrementalMerkle; -use tracing::instrument; +use tracing::{instrument, warn}; use hyperlane_core::{ ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, LogMeta, - MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H256, H512, + MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H160, H256, H512, }; use crate::interfaces::merkle_tree_hook::{ @@ -152,38 +152,97 @@ where &self, tx_hash: H512, ) -> ChainResult, LogMeta)>> { - let ethers_tx_hash: EthersH256 = tx_hash.into(); - let receipt = self - .provider - .get_transaction_receipt(ethers_tx_hash) - .await - .map_err(|err| ContractError::::MiddlewareError(err))?; - let Some(receipt) = receipt else { - return Ok(vec![]); - }; - - let logs: Vec<_> = receipt - .logs + let raw_logs_and_log_meta = fetch_raw_logs_and_log_meta::( + tx_hash, + self.provider.clone(), + self.contract.address(), + )?; + let merkle_insertion_logs = raw_logs_and_log_meta? .into_iter() - .filter_map(|log| { - let raw_log = RawLog { - topics: log.topics.clone(), - data: log.data.to_vec(), - }; - let log_meta: EthersLogMeta = (&log).into(); - let gas_payment_filter = InsertedIntoTreeFilter::decode_log(&raw_log).ok(); - gas_payment_filter.map(|log| { + .filter_map(|(log, log_meta)| { + log.map(|log| { ( MerkleTreeInsertion::new(log.index, H256::from(log.message_id)).into(), - log_meta.into(), + log_meta, ) }) }) .collect(); + // let ethers_tx_hash: EthersH256 = tx_hash.into(); + // let receipt = self + // .provider + // .get_transaction_receipt(ethers_tx_hash) + // .await + // .map_err(|err| ContractError::::MiddlewareError(err))?; + // let Some(receipt) = receipt else { + // warn!(%tx_hash, "No receipt found for tx hash"); + // return Ok(vec![]); + // }; + + // let logs: Vec<_> = receipt + // .logs + // .into_iter() + // .filter_map(|log| { + // // Filter out logs that aren't emitted by this contract + // if log.address != self.contract.address() { + // return None; + // } + // let raw_log = RawLog { + // topics: log.topics.clone(), + // data: log.data.to_vec(), + // }; + // let log_meta: EthersLogMeta = (&log).into(); + // let merkle_insertion_filter = InsertedIntoTreeFilter::decode_log(&raw_log).ok(); + // merkle_insertion_filter.map(|log| { + // ( + // MerkleTreeInsertion::new(log.index, H256::from(log.message_id)).into(), + // log_meta.into(), + // ) + // }) + // }) + // .collect(); Ok(logs) } } +pub async fn fetch_raw_logs_and_log_meta( + tx_hash: H512, + provider: Arc, + contract_address: H160, +) -> ChainResult, LogMeta)>> +where + M: Middleware + 'static, +{ + let ethers_tx_hash: EthersH256 = tx_hash.into(); + let receipt = provider + .get_transaction_receipt(ethers_tx_hash) + .await + .map_err(|err| ContractError::::MiddlewareError(err))?; + let Some(receipt) = receipt else { + warn!(%tx_hash, "No receipt found for tx hash"); + return Ok(vec![]); + }; + + let logs: Vec<(Option, LogMeta)> = receipt + .logs + .into_iter() + .filter_map(|log| { + // Filter out logs that aren't emitted by this contract + if log.address != contract_address.into() { + return None; + } + let raw_log = RawLog { + topics: log.topics.clone(), + data: log.data.to_vec(), + }; + let log_meta: EthersLogMeta = (&log).into(); + let event_filter = T::decode_log(&raw_log).ok(); + event_filter.map(|log| (Some(log), log_meta.into())) + }) + .collect(); + Ok(logs) +} + #[async_trait] impl SequenceAwareIndexer for EthereumMerkleTreeHookIndexer where diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 178030be68..99e1eaea4e 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -258,7 +258,7 @@ pub trait ContractSyncer: Send + Sync { pub struct SyncOptions { // Keep as optional fields for now to run them simultaneously. // Might want to refactor into an enum later, where we either index with a cursor or rely on receiving - // txids from a channel to other inedxing tasks + // txids from a channel to other indexing tasks cursor: Option>>, tx_id_recv: Option>, } From baaad7aab7895584eb624446f13a8eb50103dc62 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Wed, 5 Jun 2024 00:00:46 +0100 Subject: [PATCH 13/17] refactor: dedupe logic to fetch events with txhash --- .../src/contracts/interchain_gas.rs | 63 ++++------- .../src/contracts/mailbox.rs | 55 ++++------ .../src/contracts/merkle_tree_hook.rs | 102 +++--------------- .../hyperlane-ethereum/src/contracts/mod.rs | 5 +- .../hyperlane-ethereum/src/contracts/utils.rs | 48 +++++++++ 5 files changed, 102 insertions(+), 171 deletions(-) create mode 100644 rust/chains/hyperlane-ethereum/src/contracts/utils.rs diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index 89e6d393a8..76345ec8f4 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -6,17 +6,15 @@ use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; -use ethers::abi::RawLog; use ethers::prelude::Middleware; -use ethers_contract::{ContractError, EthLogDecode, LogMeta as EthersLogMeta}; -use ethers_core::types::H256 as EthersH256; use hyperlane_core::{ ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, InterchainGasPaymaster, InterchainGasPayment, LogMeta, SequenceAwareIndexer, H160, H256, H512, }; -use tracing::{instrument, warn}; +use tracing::instrument; +use super::utils::fetch_raw_logs_and_log_meta; use crate::interfaces::i_interchain_gas_paymaster::{ GasPaymentFilter, IInterchainGasPaymaster as EthereumInterchainGasPaymasterInternal, IINTERCHAINGASPAYMASTER_ABI, @@ -133,44 +131,25 @@ where &self, tx_hash: H512, ) -> ChainResult, LogMeta)>> { - let ethers_tx_hash: EthersH256 = tx_hash.into(); - let receipt = self - .provider - .get_transaction_receipt(ethers_tx_hash) - .await - .map_err(|err| ContractError::::MiddlewareError(err))?; - let Some(receipt) = receipt else { - warn!(%tx_hash, "No receipt found for tx hash"); - return Ok(vec![]); - }; - - let logs: Vec<_> = receipt - .logs - .into_iter() - .filter_map(|log| { - // Filter out logs that aren't emitted by this contract - if log.address != self.contract.address() { - return None; - } - let raw_log = RawLog { - topics: log.topics.clone(), - data: log.data.to_vec(), - }; - let log_meta: EthersLogMeta = (&log).into(); - let gas_payment_filter = GasPaymentFilter::decode_log(&raw_log).ok(); - gas_payment_filter.map(|log| { - ( - Indexed::new(InterchainGasPayment { - message_id: H256::from(log.message_id), - destination: log.destination_domain, - payment: log.payment.into(), - gas_amount: log.gas_amount.into(), - }), - log_meta.into(), - ) - }) - }) - .collect(); + let logs = fetch_raw_logs_and_log_meta::( + tx_hash, + self.provider.clone(), + self.contract.address(), + ) + .await? + .into_iter() + .map(|(log, log_meta)| { + ( + Indexed::new(InterchainGasPayment { + message_id: H256::from(log.message_id), + destination: log.destination_domain, + payment: log.payment.into(), + gas_amount: log.gas_amount.into(), + }), + log_meta, + ) + }) + .collect(); Ok(logs) } } diff --git a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs index 54316b2c5d..efdb80c789 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs @@ -7,13 +7,12 @@ use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; -use ethers::abi::{AbiEncode, Detokenize, RawLog}; +use ethers::abi::{AbiEncode, Detokenize}; use ethers::prelude::Middleware; -use ethers_contract::{builders::ContractCall, ContractError, EthEvent, LogMeta as EthersLogMeta}; -use ethers_core::types::H256 as EthersH256; +use ethers_contract::builders::ContractCall; use futures_util::future::join_all; use hyperlane_core::H512; -use tracing::{instrument, warn}; +use tracing::instrument; use hyperlane_core::{ utils::bytes_to_hex, BatchItem, ChainCommunicationError, ChainResult, ContractLocator, @@ -32,6 +31,7 @@ use crate::tx::{call_with_lag, fill_tx_gas_params, report_tx}; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider, TransactionOverrides}; use super::multicall::{self, build_multicall}; +use super::utils::fetch_raw_logs_and_log_meta; impl std::fmt::Display for EthereumMailboxInternal where @@ -165,39 +165,20 @@ where &self, tx_hash: H512, ) -> ChainResult, LogMeta)>> { - let ethers_tx_hash: EthersH256 = tx_hash.into(); - let receipt = self - .provider - .get_transaction_receipt(ethers_tx_hash) - .await - .map_err(|err| ContractError::::MiddlewareError(err))?; - let Some(receipt) = receipt else { - warn!(%tx_hash, "No receipt found for tx hash"); - return Ok(vec![]); - }; - - let logs: Vec<_> = receipt - .logs - .into_iter() - .filter_map(|log| { - // Filter out logs that aren't emitted by this contract - if log.address != self.contract.address() { - return None; - } - let raw_log = RawLog { - topics: log.topics.clone(), - data: log.data.to_vec(), - }; - let log_meta: EthersLogMeta = (&log).into(); - let dispatch_filter = DispatchFilter::decode_log(&raw_log).ok(); - dispatch_filter.map(|event| { - ( - HyperlaneMessage::from(event.message.to_vec()).into(), - log_meta.into(), - ) - }) - }) - .collect(); + let logs = fetch_raw_logs_and_log_meta::( + tx_hash, + self.provider.clone(), + self.contract.address(), + ) + .await? + .into_iter() + .map(|(log, log_meta)| { + ( + HyperlaneMessage::from(log.message.to_vec()).into(), + log_meta, + ) + }) + .collect(); Ok(logs) } } diff --git a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs index c06ef54eaf..5836838ef1 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs @@ -4,17 +4,14 @@ use std::ops::RangeInclusive; use std::sync::Arc; use async_trait::async_trait; -use ethers::abi::RawLog; use ethers::prelude::Middleware; -use ethers_contract::{ContractError, EthEvent, LogMeta as EthersLogMeta}; -use ethers_core::types::H256 as EthersH256; use hyperlane_core::accumulator::incremental::IncrementalMerkle; -use tracing::{instrument, warn}; +use tracing::instrument; use hyperlane_core::{ ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, LogMeta, - MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H160, H256, H512, + MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H256, H512, }; use crate::interfaces::merkle_tree_hook::{ @@ -23,6 +20,8 @@ use crate::interfaces::merkle_tree_hook::{ use crate::tx::call_with_lag; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider}; +use super::utils::fetch_raw_logs_and_log_meta; + // We don't need the reverse of this impl, so it's ok to disable the clippy lint #[allow(clippy::from_over_into)] impl Into for Tree { @@ -152,95 +151,22 @@ where &self, tx_hash: H512, ) -> ChainResult, LogMeta)>> { - let raw_logs_and_log_meta = fetch_raw_logs_and_log_meta::( + let logs = fetch_raw_logs_and_log_meta::( tx_hash, self.provider.clone(), self.contract.address(), - )?; - let merkle_insertion_logs = raw_logs_and_log_meta? - .into_iter() - .filter_map(|(log, log_meta)| { - log.map(|log| { - ( - MerkleTreeInsertion::new(log.index, H256::from(log.message_id)).into(), - log_meta, - ) - }) - }) - .collect(); - // let ethers_tx_hash: EthersH256 = tx_hash.into(); - // let receipt = self - // .provider - // .get_transaction_receipt(ethers_tx_hash) - // .await - // .map_err(|err| ContractError::::MiddlewareError(err))?; - // let Some(receipt) = receipt else { - // warn!(%tx_hash, "No receipt found for tx hash"); - // return Ok(vec![]); - // }; - - // let logs: Vec<_> = receipt - // .logs - // .into_iter() - // .filter_map(|log| { - // // Filter out logs that aren't emitted by this contract - // if log.address != self.contract.address() { - // return None; - // } - // let raw_log = RawLog { - // topics: log.topics.clone(), - // data: log.data.to_vec(), - // }; - // let log_meta: EthersLogMeta = (&log).into(); - // let merkle_insertion_filter = InsertedIntoTreeFilter::decode_log(&raw_log).ok(); - // merkle_insertion_filter.map(|log| { - // ( - // MerkleTreeInsertion::new(log.index, H256::from(log.message_id)).into(), - // log_meta.into(), - // ) - // }) - // }) - // .collect(); - Ok(logs) - } -} - -pub async fn fetch_raw_logs_and_log_meta( - tx_hash: H512, - provider: Arc, - contract_address: H160, -) -> ChainResult, LogMeta)>> -where - M: Middleware + 'static, -{ - let ethers_tx_hash: EthersH256 = tx_hash.into(); - let receipt = provider - .get_transaction_receipt(ethers_tx_hash) - .await - .map_err(|err| ContractError::::MiddlewareError(err))?; - let Some(receipt) = receipt else { - warn!(%tx_hash, "No receipt found for tx hash"); - return Ok(vec![]); - }; - - let logs: Vec<(Option, LogMeta)> = receipt - .logs + ) + .await? .into_iter() - .filter_map(|log| { - // Filter out logs that aren't emitted by this contract - if log.address != contract_address.into() { - return None; - } - let raw_log = RawLog { - topics: log.topics.clone(), - data: log.data.to_vec(), - }; - let log_meta: EthersLogMeta = (&log).into(); - let event_filter = T::decode_log(&raw_log).ok(); - event_filter.map(|log| (Some(log), log_meta.into())) + .map(|(log, log_meta)| { + ( + MerkleTreeInsertion::new(log.index, H256::from(log.message_id)).into(), + log_meta, + ) }) .collect(); - Ok(logs) + Ok(logs) + } } #[async_trait] diff --git a/rust/chains/hyperlane-ethereum/src/contracts/mod.rs b/rust/chains/hyperlane-ethereum/src/contracts/mod.rs index 32ad5b953d..1a39fae07a 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/mod.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/mod.rs @@ -1,11 +1,8 @@ pub use {interchain_gas::*, mailbox::*, merkle_tree_hook::*, validator_announce::*}; mod interchain_gas; - mod mailbox; - mod merkle_tree_hook; - mod multicall; - +mod utils; mod validator_announce; diff --git a/rust/chains/hyperlane-ethereum/src/contracts/utils.rs b/rust/chains/hyperlane-ethereum/src/contracts/utils.rs new file mode 100644 index 0000000000..bdf3e52f93 --- /dev/null +++ b/rust/chains/hyperlane-ethereum/src/contracts/utils.rs @@ -0,0 +1,48 @@ +use std::sync::Arc; + +use ethers::{ + abi::RawLog, + providers::Middleware, + types::{H160 as EthersH160, H256 as EthersH256}, +}; +use ethers_contract::{ContractError, EthEvent, LogMeta as EthersLogMeta}; +use hyperlane_core::{ChainResult, LogMeta, H512}; +use tracing::warn; + +pub async fn fetch_raw_logs_and_log_meta( + tx_hash: H512, + provider: Arc, + contract_address: EthersH160, +) -> ChainResult> +where + M: Middleware + 'static, +{ + let ethers_tx_hash: EthersH256 = tx_hash.into(); + let receipt = provider + .get_transaction_receipt(ethers_tx_hash) + .await + .map_err(|err| ContractError::::MiddlewareError(err))?; + let Some(receipt) = receipt else { + warn!(%tx_hash, "No receipt found for tx hash"); + return Ok(vec![]); + }; + + let logs: Vec<(T, LogMeta)> = receipt + .logs + .into_iter() + .filter_map(|log| { + // Filter out logs that aren't emitted by this contract + if log.address != contract_address { + return None; + } + let raw_log = RawLog { + topics: log.topics.clone(), + data: log.data.to_vec(), + }; + let log_meta: EthersLogMeta = (&log).into(); + let event_filter = T::decode_log(&raw_log).ok(); + event_filter.map(|log| (log, log_meta.into())) + }) + .collect(); + Ok(logs) +} From 529e6ccd5bd4c7b52c91e5bce89c046024a67844 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Wed, 5 Jun 2024 12:46:38 +0100 Subject: [PATCH 14/17] wip: no idle channel receivers --- rust/agents/relayer/src/msg/op_queue.rs | 33 ++++++------ rust/agents/relayer/src/msg/op_submitter.rs | 7 +-- rust/agents/relayer/src/relayer.rs | 50 ++++++++----------- rust/agents/relayer/src/server.rs | 14 +++--- .../src/contract_sync/cursors/mod.rs | 3 ++ rust/hyperlane-base/src/contract_sync/mod.rs | 30 ++++++----- rust/hyperlane-core/src/types/channel.rs | 37 +------------- 7 files changed, 68 insertions(+), 106 deletions(-) diff --git a/rust/agents/relayer/src/msg/op_queue.rs b/rust/agents/relayer/src/msg/op_queue.rs index b046cadba2..fd543a5382 100644 --- a/rust/agents/relayer/src/msg/op_queue.rs +++ b/rust/agents/relayer/src/msg/op_queue.rs @@ -1,9 +1,8 @@ use std::{cmp::Reverse, collections::BinaryHeap, sync::Arc}; use derive_new::new; -use hyperlane_core::BroadcastReceiver; use prometheus::{IntGauge, IntGaugeVec}; -use tokio::sync::Mutex; +use tokio::sync::{broadcast::Receiver, Mutex}; use tracing::{debug, info, instrument}; use crate::server::MessageRetryRequest; @@ -18,7 +17,7 @@ pub type QueueOperation = Box; pub struct OpQueue { metrics: IntGaugeVec, queue_metrics_label: String, - retry_rx: BroadcastReceiver, + retry_rx: Arc>, #[new(default)] queue: Arc>>>, } @@ -73,7 +72,7 @@ impl OpQueue { // The other consideration is whether to put the channel receiver in the OpQueue or in a dedicated task // that also holds an Arc to the Mutex. For simplicity, we'll put it in the OpQueue for now. let mut message_retry_requests = vec![]; - while let Ok(message_id) = self.retry_rx.receiver.try_recv() { + while let Ok(message_id) = self.retry_rx.try_recv() { message_retry_requests.push(message_id); } if message_retry_requests.is_empty() { @@ -112,13 +111,13 @@ mod test { use super::*; use crate::msg::pending_operation::PendingOperationResult; use hyperlane_core::{ - HyperlaneDomain, HyperlaneMessage, KnownHyperlaneDomain, MpmcChannel, TryBatchAs, - TxOutcome, H256, + HyperlaneDomain, HyperlaneMessage, KnownHyperlaneDomain, TryBatchAs, TxOutcome, H256, }; use std::{ collections::VecDeque, time::{Duration, Instant}, }; + use tokio::sync; #[derive(Debug, Clone)] struct MockPendingOperation { @@ -221,13 +220,17 @@ mod test { #[tokio::test] async fn test_multiple_op_queues_message_id() { let (metrics, queue_metrics_label) = dummy_metrics_and_label(); - let mpmc_channel = MpmcChannel::new(100); + let broadcaster = sync::broadcast::Sender::new(100); let mut op_queue_1 = OpQueue::new( metrics.clone(), queue_metrics_label.clone(), - mpmc_channel.receiver(), + Arc::new(broadcaster.subscribe()), + ); + let mut op_queue_2 = OpQueue::new( + metrics, + queue_metrics_label, + Arc::new(broadcaster.subscribe()), ); - let mut op_queue_2 = OpQueue::new(metrics, queue_metrics_label, mpmc_channel.receiver()); // Add some operations to the queue with increasing `next_attempt_after` values let destination_domain: HyperlaneDomain = KnownHyperlaneDomain::Injective.into(); @@ -253,11 +256,10 @@ mod test { } // Retry by message ids - let mpmc_tx = mpmc_channel.sender(); - mpmc_tx + broadcaster .send(MessageRetryRequest::MessageId(op_ids[1])) .unwrap(); - mpmc_tx + broadcaster .send(MessageRetryRequest::MessageId(op_ids[2])) .unwrap(); @@ -287,11 +289,11 @@ mod test { #[tokio::test] async fn test_destination_domain() { let (metrics, queue_metrics_label) = dummy_metrics_and_label(); - let mpmc_channel = MpmcChannel::new(100); + let broadcaster = sync::broadcast::Sender::new(100); let mut op_queue = OpQueue::new( metrics.clone(), queue_metrics_label.clone(), - mpmc_channel.receiver(), + Arc::new(broadcaster.subscribe()), ); // Add some operations to the queue with increasing `next_attempt_after` values @@ -313,8 +315,7 @@ mod test { } // Retry by domain - let mpmc_tx = mpmc_channel.sender(); - mpmc_tx + broadcaster .send(MessageRetryRequest::DestinationDomain( destination_domain_2.id(), )) diff --git a/rust/agents/relayer/src/msg/op_submitter.rs b/rust/agents/relayer/src/msg/op_submitter.rs index b83dd2a507..7070b070ff 100644 --- a/rust/agents/relayer/src/msg/op_submitter.rs +++ b/rust/agents/relayer/src/msg/op_submitter.rs @@ -1,3 +1,4 @@ +use std::sync::Arc; use std::time::Duration; use derive_new::new; @@ -108,17 +109,17 @@ impl SerialSubmitter { let prepare_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "prepare_queue".to_string(), - retry_rx.clone(), + Arc::new(retry_rx.clone()), ); let submit_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "submit_queue".to_string(), - retry_rx.clone(), + Arc::new(retry_rx.clone()), ); let confirm_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "confirm_queue".to_string(), - retry_rx, + Arc::new(retry_rx), ); let tasks = [ diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index 568d66909f..b33f3a3cf8 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -16,11 +16,11 @@ use hyperlane_base::{ SyncOptions, }; use hyperlane_core::{ - BroadcastReceiver, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, - MerkleTreeInsertion, MpmcChannel, H512, U256, + HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, MerkleTreeInsertion, H512, U256, }; use tokio::{ sync::{ + broadcast::{Receiver, Sender}, mpsc::{self, UnboundedReceiver, UnboundedSender}, RwLock, }, @@ -306,8 +306,8 @@ impl BaseAgent for Relayer { } // run server - let mpmc_channel = MpmcChannel::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); - let custom_routes = relayer_server::routes(mpmc_channel.sender()); + let broadcast_tx = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); + let custom_routes = relayer_server::routes(broadcast_tx); let server = self .core @@ -319,18 +319,6 @@ impl BaseAgent for Relayer { .instrument(info_span!("Relayer server")); tasks.push(server_task); - let mut txid_receivers = self - .message_syncs - .iter() - .filter_map(|(domain, sync)| { - let maybe_rx = sync.get_new_receive_tx_channel(); - if maybe_rx.is_none() { - warn!(?domain, "No txid receiver for chain"); - } - maybe_rx.map(|rx| (domain.clone(), rx)) - }) - .collect::>(); - // send channels by destination chain let mut send_channels = HashMap::with_capacity(self.destination_chains.len()); for (dest_domain, dest_conf) in &self.destination_chains { @@ -341,7 +329,7 @@ impl BaseAgent for Relayer { self.run_destination_submitter( dest_domain, receive_channel, - mpmc_channel.receiver(), + broadcast_tx.subscribe(), // Default to submitting one message at a time if there is no batch config self.core.settings.chains[dest_domain.name()] .connection @@ -365,22 +353,23 @@ impl BaseAgent for Relayer { } for origin in &self.origin_chains { + let maybe_broadcaster = self + .message_syncs + .get(origin) + .map(|sync| sync.get_broadcaster()) + .flatten(); tasks.push(self.run_message_sync(origin, task_monitor.clone()).await); tasks.push( self.run_interchain_gas_payment_sync( origin, - txid_receivers.get(origin).cloned(), + maybe_broadcaster.clone(), task_monitor.clone(), ) .await, ); tasks.push( - self.run_merkle_tree_hook_syncs( - origin, - txid_receivers.remove(origin), - task_monitor.clone(), - ) - .await, + self.run_merkle_tree_hook_syncs(origin, maybe_broadcaster, task_monitor.clone()) + .await, ); } @@ -424,7 +413,7 @@ impl Relayer { async fn run_interchain_gas_payment_sync( &self, origin: &HyperlaneDomain, - rx: Option>, + broadcast_tx: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index_settings(); @@ -437,7 +426,7 @@ impl Relayer { tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { contract_sync .clone() - .sync("gas_payments", SyncOptions::new(Some(cursor), rx)) + .sync("gas_payments", SyncOptions::new(Some(cursor), broadcast_tx)) .await })) .instrument(info_span!("IgpSync")) @@ -446,7 +435,7 @@ impl Relayer { async fn run_merkle_tree_hook_syncs( &self, origin: &HyperlaneDomain, - rx: Option>, + broadcast_tx: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); @@ -455,7 +444,10 @@ impl Relayer { tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { contract_sync .clone() - .sync("merkle_tree_hook", SyncOptions::new(Some(cursor), rx)) + .sync( + "merkle_tree_hook", + SyncOptions::new(Some(cursor), broadcast_tx), + ) .await })) .instrument(info_span!("MerkleTreeHookSync")) @@ -527,7 +519,7 @@ impl Relayer { &self, destination: &HyperlaneDomain, receiver: UnboundedReceiver, - retry_receiver_channel: BroadcastReceiver, + retry_receiver_channel: Receiver, batch_size: u32, task_monitor: TaskMonitor, ) -> Instrumented> { diff --git a/rust/agents/relayer/src/server.rs b/rust/agents/relayer/src/server.rs index 88cad86326..4e95bb9b72 100644 --- a/rust/agents/relayer/src/server.rs +++ b/rust/agents/relayer/src/server.rs @@ -109,12 +109,12 @@ mod tests { use super::*; use axum::http::StatusCode; use ethers::utils::hex::ToHex; - use hyperlane_core::{BroadcastReceiver, MpmcChannel}; use std::net::SocketAddr; + use tokio::sync::broadcast::{Receiver, Sender}; - fn setup_test_server() -> (SocketAddr, BroadcastReceiver) { - let mpmc_channel = MpmcChannel::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); - let message_retry_api = MessageRetryApi::new(mpmc_channel.sender()); + fn setup_test_server() -> (SocketAddr, Receiver) { + let broadcast_tx = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); + let message_retry_api = MessageRetryApi::new(broadcast_tx); let (path, retry_router) = message_retry_api.get_route(); let app = Router::new().nest(path, retry_router); @@ -124,7 +124,7 @@ mod tests { let addr = server.local_addr(); tokio::spawn(server); - (addr, mpmc_channel.receiver()) + (addr, broadcast_tx.subscribe()) } #[tokio::test] @@ -148,7 +148,7 @@ mod tests { assert_eq!(response.status(), StatusCode::OK); assert_eq!( - rx.receiver.try_recv().unwrap(), + rx.try_recv().unwrap(), MessageRetryRequest::MessageId(message_id) ); } @@ -172,7 +172,7 @@ mod tests { assert_eq!(response.status(), StatusCode::OK); assert_eq!( - rx.receiver.try_recv().unwrap(), + rx.try_recv().unwrap(), MessageRetryRequest::DestinationDomain(destination_domain) ); } diff --git a/rust/hyperlane-base/src/contract_sync/cursors/mod.rs b/rust/hyperlane-base/src/contract_sync/cursors/mod.rs index dc2783e87e..016454d04e 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/mod.rs @@ -18,7 +18,10 @@ pub enum CursorType { const TX_ID_CHANNEL_CAPACITY: Option = Some(1_000_000); pub trait Indexable { + /// Returns the configured cursor type of this type for the given domain, (e.g. `SequenceAware` or `RateLimited`) fn indexing_cursor(domain: HyperlaneDomainProtocol) -> CursorType; + /// Indexing tasks may have channels open between them to share information that improves reliability (such as the txid where a message event was indexed). + /// By default this method is None, and it should return a channel capacity if this indexing task is to broadcast anything to other tasks. fn broadcast_channel_size() -> Option { None } diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 99e1eaea4e..2c1dbb073a 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -10,11 +10,11 @@ use hyperlane_core::{ HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, SequenceAwareIndexer, }; -use hyperlane_core::{BroadcastReceiver, Indexed, LogMeta, H512}; +use hyperlane_core::{Indexed, LogMeta, H512}; pub use metrics::ContractSyncMetrics; use prometheus::core::{AtomicI64, AtomicU64, GenericCounter, GenericGauge}; use tokio::sync::broadcast::error::TryRecvError; -use tokio::sync::broadcast::Sender as BroadcastSender; +use tokio::sync::broadcast::{Receiver as BroadcastReceiver, Sender as BroadcastSender}; use tokio::time::sleep; use tracing::{debug, info, instrument, trace, warn}; @@ -66,11 +66,8 @@ where &self.domain } - fn get_new_receive_tx_channel(&self) -> Option> { - // Create a new channel if it doesn't exist - self.broadcast_sender - .as_ref() - .map(|tx| BroadcastReceiver::new(tx.clone(), tx.subscribe())) + fn get_broadcaster(&self) -> Option> { + self.broadcast_sender.clone() } /// Sync logs and write them to the LogStore @@ -87,8 +84,9 @@ where .with_label_values(&[label, chain_name]); loop { - if let Some(recv) = opts.tx_id_recv.as_mut() { - self.fetch_logs_from_receiver(recv, &stored_logs_metric) + if let Some(broadcaster) = opts.tx_id_broadcaster.as_mut() { + let mut rx = broadcaster.subscribe(); + self.fetch_logs_from_receiver(&mut rx, &stored_logs_metric) .await; } if let Some(cursor) = opts.cursor.as_mut() { @@ -250,7 +248,7 @@ pub trait ContractSyncer: Send + Sync { fn domain(&self) -> &HyperlaneDomain; /// If this syncer is also a broadcaster, return the channel to receive txids - fn get_new_receive_tx_channel(&self) -> Option>; + fn get_broadcaster(&self) -> Option>; } #[derive(new)] @@ -260,14 +258,14 @@ pub struct SyncOptions { // Might want to refactor into an enum later, where we either index with a cursor or rely on receiving // txids from a channel to other indexing tasks cursor: Option>>, - tx_id_recv: Option>, + tx_id_broadcaster: Option>, } impl From>> for SyncOptions { fn from(cursor: Box>) -> Self { Self { cursor: Some(cursor), - tx_id_recv: None, + tx_id_broadcaster: None, } } } @@ -305,8 +303,8 @@ where ContractSync::domain(self) } - fn get_new_receive_tx_channel(&self) -> Option> { - ContractSync::get_new_receive_tx_channel(self) + fn get_broadcaster(&self) -> Option> { + ContractSync::get_broadcaster(self) } } @@ -344,7 +342,7 @@ where ContractSync::domain(self) } - fn get_new_receive_tx_channel(&self) -> Option> { - ContractSync::get_new_receive_tx_channel(self) + fn get_broadcaster(&self) -> Option> { + ContractSync::get_broadcaster(self) } } diff --git a/rust/hyperlane-core/src/types/channel.rs b/rust/hyperlane-core/src/types/channel.rs index 59f81c62c8..7f892a5458 100644 --- a/rust/hyperlane-core/src/types/channel.rs +++ b/rust/hyperlane-core/src/types/channel.rs @@ -6,7 +6,6 @@ use tokio::sync::broadcast::{Receiver, Sender}; /// Multi-producer, multi-consumer channel pub struct MpmcChannel { sender: Sender, - receiver: BroadcastReceiver, } impl MpmcChannel { @@ -19,7 +18,6 @@ impl MpmcChannel { let (sender, receiver) = tokio::sync::broadcast::channel(capacity); Self { sender: sender.clone(), - receiver: BroadcastReceiver::new(sender, receiver), } } @@ -29,38 +27,7 @@ impl MpmcChannel { } /// Returns a clone of the receiver end of the channel. - pub fn receiver(&self) -> BroadcastReceiver { - self.receiver.clone() - } -} - -/// Clonable receiving end of a multi-producer, multi-consumer channel -#[derive(Debug, new)] -pub struct BroadcastReceiver { - sender: Sender, - /// The receiving end of the channel. - pub receiver: Receiver, -} - -impl Clone for BroadcastReceiver { - fn clone(&self) -> Self { - Self { - sender: self.sender.clone(), - receiver: self.sender.subscribe(), - } - } -} - -impl Deref for BroadcastReceiver { - type Target = Receiver; - - fn deref(&self) -> &Self::Target { - &self.receiver - } -} - -impl DerefMut for BroadcastReceiver { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.receiver + pub fn receiver(&self) -> Receiver { + self.sender.subscribe() } } From 936b9a71996e0c6b20c7892d6bd6a0c46d20127b Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Wed, 5 Jun 2024 14:29:38 +0100 Subject: [PATCH 15/17] fix: idle receiver channels --- rust/agents/relayer/src/msg/op_queue.rs | 10 +++---- rust/agents/relayer/src/msg/op_submitter.rs | 16 +++++----- rust/agents/relayer/src/relayer.rs | 11 ++++--- rust/agents/relayer/src/server.rs | 2 +- rust/hyperlane-core/src/types/channel.rs | 33 --------------------- rust/hyperlane-core/src/types/mod.rs | 4 --- 6 files changed, 20 insertions(+), 56 deletions(-) delete mode 100644 rust/hyperlane-core/src/types/channel.rs diff --git a/rust/agents/relayer/src/msg/op_queue.rs b/rust/agents/relayer/src/msg/op_queue.rs index fd543a5382..6881d06769 100644 --- a/rust/agents/relayer/src/msg/op_queue.rs +++ b/rust/agents/relayer/src/msg/op_queue.rs @@ -17,7 +17,7 @@ pub type QueueOperation = Box; pub struct OpQueue { metrics: IntGaugeVec, queue_metrics_label: String, - retry_rx: Arc>, + retry_rx: Arc>>, #[new(default)] queue: Arc>>>, } @@ -72,7 +72,7 @@ impl OpQueue { // The other consideration is whether to put the channel receiver in the OpQueue or in a dedicated task // that also holds an Arc to the Mutex. For simplicity, we'll put it in the OpQueue for now. let mut message_retry_requests = vec![]; - while let Ok(message_id) = self.retry_rx.try_recv() { + while let Ok(message_id) = self.retry_rx.lock().await.try_recv() { message_retry_requests.push(message_id); } if message_retry_requests.is_empty() { @@ -224,12 +224,12 @@ mod test { let mut op_queue_1 = OpQueue::new( metrics.clone(), queue_metrics_label.clone(), - Arc::new(broadcaster.subscribe()), + Arc::new(Mutex::new(broadcaster.subscribe())), ); let mut op_queue_2 = OpQueue::new( metrics, queue_metrics_label, - Arc::new(broadcaster.subscribe()), + Arc::new(Mutex::new(broadcaster.subscribe())), ); // Add some operations to the queue with increasing `next_attempt_after` values @@ -293,7 +293,7 @@ mod test { let mut op_queue = OpQueue::new( metrics.clone(), queue_metrics_label.clone(), - Arc::new(broadcaster.subscribe()), + Arc::new(Mutex::new(broadcaster.subscribe())), ); // Add some operations to the queue with increasing `next_attempt_after` values diff --git a/rust/agents/relayer/src/msg/op_submitter.rs b/rust/agents/relayer/src/msg/op_submitter.rs index 7070b070ff..66d1a57d38 100644 --- a/rust/agents/relayer/src/msg/op_submitter.rs +++ b/rust/agents/relayer/src/msg/op_submitter.rs @@ -5,7 +5,9 @@ use derive_new::new; use futures::future::join_all; use futures_util::future::try_join_all; use prometheus::{IntCounter, IntGaugeVec}; +use tokio::sync::broadcast::Sender; use tokio::sync::mpsc; +use tokio::sync::Mutex; use tokio::task::JoinHandle; use tokio::time::sleep; use tokio_metrics::TaskMonitor; @@ -14,8 +16,8 @@ use tracing::{info, warn}; use hyperlane_base::CoreMetrics; use hyperlane_core::{ - BatchItem, BroadcastReceiver, ChainCommunicationError, ChainResult, HyperlaneDomain, - HyperlaneDomainProtocol, HyperlaneMessage, TxOutcome, + BatchItem, ChainCommunicationError, ChainResult, HyperlaneDomain, HyperlaneDomainProtocol, + HyperlaneMessage, TxOutcome, }; use crate::msg::pending_message::CONFIRM_DELAY; @@ -78,7 +80,7 @@ pub struct SerialSubmitter { /// Receiver for new messages to submit. rx: mpsc::UnboundedReceiver, /// Receiver for retry requests. - retry_rx: BroadcastReceiver, + retry_tx: Sender, /// Metrics for serial submitter. metrics: SerialSubmitterMetrics, /// Max batch size for submitting messages @@ -102,24 +104,24 @@ impl SerialSubmitter { domain, metrics, rx: rx_prepare, - retry_rx, + retry_tx, max_batch_size, task_monitor, } = self; let prepare_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "prepare_queue".to_string(), - Arc::new(retry_rx.clone()), + Arc::new(Mutex::new(retry_tx.subscribe())), ); let submit_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "submit_queue".to_string(), - Arc::new(retry_rx.clone()), + Arc::new(Mutex::new(retry_tx.subscribe())), ); let confirm_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "confirm_queue".to_string(), - Arc::new(retry_rx), + Arc::new(Mutex::new(retry_tx.subscribe())), ); let tasks = [ diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index b33f3a3cf8..de177daf7c 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -20,7 +20,7 @@ use hyperlane_core::{ }; use tokio::{ sync::{ - broadcast::{Receiver, Sender}, + broadcast::Sender, mpsc::{self, UnboundedReceiver, UnboundedSender}, RwLock, }, @@ -307,7 +307,7 @@ impl BaseAgent for Relayer { // run server let broadcast_tx = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); - let custom_routes = relayer_server::routes(broadcast_tx); + let custom_routes = relayer_server::routes(broadcast_tx.clone()); let server = self .core @@ -329,7 +329,7 @@ impl BaseAgent for Relayer { self.run_destination_submitter( dest_domain, receive_channel, - broadcast_tx.subscribe(), + broadcast_tx.clone(), // Default to submitting one message at a time if there is no batch config self.core.settings.chains[dest_domain.name()] .connection @@ -356,8 +356,7 @@ impl BaseAgent for Relayer { let maybe_broadcaster = self .message_syncs .get(origin) - .map(|sync| sync.get_broadcaster()) - .flatten(); + .and_then(|sync| sync.get_broadcaster()); tasks.push(self.run_message_sync(origin, task_monitor.clone()).await); tasks.push( self.run_interchain_gas_payment_sync( @@ -519,7 +518,7 @@ impl Relayer { &self, destination: &HyperlaneDomain, receiver: UnboundedReceiver, - retry_receiver_channel: Receiver, + retry_receiver_channel: Sender, batch_size: u32, task_monitor: TaskMonitor, ) -> Instrumented> { diff --git a/rust/agents/relayer/src/server.rs b/rust/agents/relayer/src/server.rs index 4e95bb9b72..364181df60 100644 --- a/rust/agents/relayer/src/server.rs +++ b/rust/agents/relayer/src/server.rs @@ -114,7 +114,7 @@ mod tests { fn setup_test_server() -> (SocketAddr, Receiver) { let broadcast_tx = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); - let message_retry_api = MessageRetryApi::new(broadcast_tx); + let message_retry_api = MessageRetryApi::new(broadcast_tx.clone()); let (path, retry_router) = message_retry_api.get_route(); let app = Router::new().nest(path, retry_router); diff --git a/rust/hyperlane-core/src/types/channel.rs b/rust/hyperlane-core/src/types/channel.rs deleted file mode 100644 index 7f892a5458..0000000000 --- a/rust/hyperlane-core/src/types/channel.rs +++ /dev/null @@ -1,33 +0,0 @@ -use std::ops::{Deref, DerefMut}; - -use derive_new::new; -use tokio::sync::broadcast::{Receiver, Sender}; - -/// Multi-producer, multi-consumer channel -pub struct MpmcChannel { - sender: Sender, -} - -impl MpmcChannel { - /// Creates a new `MpmcChannel` with the specified capacity. - /// - /// # Arguments - /// - /// * `capacity` - The maximum number of messages that can be buffered in the channel. - pub fn new(capacity: usize) -> Self { - let (sender, receiver) = tokio::sync::broadcast::channel(capacity); - Self { - sender: sender.clone(), - } - } - - /// Returns a clone of the sender end of the channel. - pub fn sender(&self) -> Sender { - self.sender.clone() - } - - /// Returns a clone of the receiver end of the channel. - pub fn receiver(&self) -> Receiver { - self.sender.subscribe() - } -} diff --git a/rust/hyperlane-core/src/types/mod.rs b/rust/hyperlane-core/src/types/mod.rs index 59f20630bf..c8b2ad3464 100644 --- a/rust/hyperlane-core/src/types/mod.rs +++ b/rust/hyperlane-core/src/types/mod.rs @@ -8,8 +8,6 @@ pub use self::primitive_types::*; pub use ::primitive_types as ethers_core_types; pub use announcement::*; pub use chain_data::*; -#[cfg(feature = "async")] -pub use channel::*; pub use checkpoint::*; pub use indexing::*; pub use log_metadata::*; @@ -21,8 +19,6 @@ use crate::{Decode, Encode, HyperlaneProtocolError}; mod announcement; mod chain_data; -#[cfg(feature = "async")] -mod channel; mod checkpoint; mod indexing; mod log_metadata; From 4dca21edd3761fea1675a9556117030c5abeeb64 Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Wed, 5 Jun 2024 14:57:53 +0100 Subject: [PATCH 16/17] feat(scraper): enable hook indexing --- rust/agents/scraper/src/agent.rs | 43 +++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/rust/agents/scraper/src/agent.rs b/rust/agents/scraper/src/agent.rs index 121cd571e5..535dd2c322 100644 --- a/rust/agents/scraper/src/agent.rs +++ b/rust/agents/scraper/src/agent.rs @@ -5,10 +5,10 @@ use derive_more::AsRef; use futures::future::try_join_all; use hyperlane_base::{ metrics::AgentMetrics, settings::IndexSettings, BaseAgent, ChainMetrics, ContractSyncMetrics, - ContractSyncer, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, + ContractSyncer, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, SyncOptions, }; -use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment}; -use tokio::task::JoinHandle; +use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, H512}; +use tokio::{sync::broadcast::Sender, task::JoinHandle}; use tracing::{info_span, instrument::Instrumented, trace, Instrument}; use crate::{chain_scraper::HyperlaneSqlDb, db::ScraperDb, settings::ScraperSettings}; @@ -135,16 +135,16 @@ impl Scraper { let domain = scraper.domain.clone(); let mut tasks = Vec::with_capacity(2); - tasks.push( - self.build_message_indexer( + let (message_indexer, maybe_broadcaster) = self + .build_message_indexer( domain.clone(), self.core_metrics.clone(), self.contract_sync_metrics.clone(), db.clone(), index_settings.clone(), ) - .await, - ); + .await; + tasks.push(message_indexer); tasks.push( self.build_delivery_indexer( domain.clone(), @@ -152,6 +152,7 @@ impl Scraper { self.contract_sync_metrics.clone(), db.clone(), index_settings.clone(), + maybe_broadcaster.clone(), ) .await, ); @@ -162,6 +163,7 @@ impl Scraper { self.contract_sync_metrics.clone(), db, index_settings.clone(), + maybe_broadcaster, ) .await, ); @@ -182,7 +184,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, - ) -> Instrumented> { + ) -> (Instrumented>, Option>) { let sync = self .as_ref() .settings @@ -195,9 +197,12 @@ impl Scraper { .await .unwrap(); let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync("message_dispatch", cursor.into()).await }).instrument( - info_span!("ChainContractSync", chain=%domain.name(), event="message_dispatch"), - ) + let maybe_broadcaser = sync.get_broadcaster(); + let task = tokio::spawn(async move { sync.sync("message_dispatch", cursor.into()).await }) + .instrument( + info_span!("ChainContractSync", chain=%domain.name(), event="message_dispatch"), + ); + (task, maybe_broadcaser) } async fn build_delivery_indexer( @@ -207,6 +212,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, + broadcast_tx: Option>, ) -> Instrumented> { let sync = self .as_ref() @@ -222,8 +228,11 @@ impl Scraper { let label = "message_delivery"; let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync(label, cursor.into()).await }) - .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) + tokio::spawn(async move { + sync.sync(label, SyncOptions::new(Some(cursor), broadcast_tx)) + .await + }) + .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) } async fn build_interchain_gas_payment_indexer( @@ -233,6 +242,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, + broadcast_tx: Option>, ) -> Instrumented> { let sync = self .as_ref() @@ -248,7 +258,10 @@ impl Scraper { let label = "gas_payment"; let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync(label, cursor.into()).await }) - .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) + tokio::spawn(async move { + sync.sync(label, SyncOptions::new(Some(cursor), broadcast_tx)) + .await + }) + .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) } } From 69dd7e11206cb38ebdffbf26c6bf7f7d6cdde19e Mon Sep 17 00:00:00 2001 From: Daniel Savu <23065004+daniel-savu@users.noreply.github.com> Date: Fri, 7 Jun 2024 12:41:01 +0100 Subject: [PATCH 17/17] remediations --- rust/agents/relayer/src/relayer.rs | 29 ++++++++++++-------- rust/agents/scraper/src/agent.rs | 17 +++++++----- rust/hyperlane-base/src/contract_sync/mod.rs | 10 +++---- 3 files changed, 32 insertions(+), 24 deletions(-) diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index de177daf7c..085e43ee6c 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -20,7 +20,7 @@ use hyperlane_core::{ }; use tokio::{ sync::{ - broadcast::Sender, + broadcast::{Receiver, Sender}, mpsc::{self, UnboundedReceiver, UnboundedSender}, RwLock, }, @@ -306,8 +306,8 @@ impl BaseAgent for Relayer { } // run server - let broadcast_tx = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); - let custom_routes = relayer_server::routes(broadcast_tx.clone()); + let sender = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); + let custom_routes = relayer_server::routes(sender.clone()); let server = self .core @@ -329,7 +329,7 @@ impl BaseAgent for Relayer { self.run_destination_submitter( dest_domain, receive_channel, - broadcast_tx.clone(), + sender.clone(), // Default to submitting one message at a time if there is no batch config self.core.settings.chains[dest_domain.name()] .connection @@ -361,14 +361,18 @@ impl BaseAgent for Relayer { tasks.push( self.run_interchain_gas_payment_sync( origin, - maybe_broadcaster.clone(), + maybe_broadcaster.clone().map(|b| b.subscribe()), task_monitor.clone(), ) .await, ); tasks.push( - self.run_merkle_tree_hook_syncs(origin, maybe_broadcaster, task_monitor.clone()) - .await, + self.run_merkle_tree_hook_syncs( + origin, + maybe_broadcaster.map(|b| b.subscribe()), + task_monitor.clone(), + ) + .await, ); } @@ -412,7 +416,7 @@ impl Relayer { async fn run_interchain_gas_payment_sync( &self, origin: &HyperlaneDomain, - broadcast_tx: Option>, + tx_id_receiver: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index_settings(); @@ -425,7 +429,10 @@ impl Relayer { tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { contract_sync .clone() - .sync("gas_payments", SyncOptions::new(Some(cursor), broadcast_tx)) + .sync( + "gas_payments", + SyncOptions::new(Some(cursor), tx_id_receiver), + ) .await })) .instrument(info_span!("IgpSync")) @@ -434,7 +441,7 @@ impl Relayer { async fn run_merkle_tree_hook_syncs( &self, origin: &HyperlaneDomain, - broadcast_tx: Option>, + tx_id_receiver: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); @@ -445,7 +452,7 @@ impl Relayer { .clone() .sync( "merkle_tree_hook", - SyncOptions::new(Some(cursor), broadcast_tx), + SyncOptions::new(Some(cursor), tx_id_receiver), ) .await })) diff --git a/rust/agents/scraper/src/agent.rs b/rust/agents/scraper/src/agent.rs index 535dd2c322..f33f005560 100644 --- a/rust/agents/scraper/src/agent.rs +++ b/rust/agents/scraper/src/agent.rs @@ -8,7 +8,10 @@ use hyperlane_base::{ ContractSyncer, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, SyncOptions, }; use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, H512}; -use tokio::{sync::broadcast::Sender, task::JoinHandle}; +use tokio::{ + sync::broadcast::{Receiver, Sender}, + task::JoinHandle, +}; use tracing::{info_span, instrument::Instrumented, trace, Instrument}; use crate::{chain_scraper::HyperlaneSqlDb, db::ScraperDb, settings::ScraperSettings}; @@ -152,7 +155,7 @@ impl Scraper { self.contract_sync_metrics.clone(), db.clone(), index_settings.clone(), - maybe_broadcaster.clone(), + maybe_broadcaster.clone().map(|b| b.subscribe()), ) .await, ); @@ -163,7 +166,7 @@ impl Scraper { self.contract_sync_metrics.clone(), db, index_settings.clone(), - maybe_broadcaster, + maybe_broadcaster.map(|b| b.subscribe()), ) .await, ); @@ -212,7 +215,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, - broadcast_tx: Option>, + tx_id_receiver: Option>, ) -> Instrumented> { let sync = self .as_ref() @@ -229,7 +232,7 @@ impl Scraper { let label = "message_delivery"; let cursor = sync.cursor(index_settings.clone()).await; tokio::spawn(async move { - sync.sync(label, SyncOptions::new(Some(cursor), broadcast_tx)) + sync.sync(label, SyncOptions::new(Some(cursor), tx_id_receiver)) .await }) .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) @@ -242,7 +245,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, - broadcast_tx: Option>, + tx_id_receiver: Option>, ) -> Instrumented> { let sync = self .as_ref() @@ -259,7 +262,7 @@ impl Scraper { let label = "gas_payment"; let cursor = sync.cursor(index_settings.clone()).await; tokio::spawn(async move { - sync.sync(label, SyncOptions::new(Some(cursor), broadcast_tx)) + sync.sync(label, SyncOptions::new(Some(cursor), tx_id_receiver)) .await }) .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 862b965587..9c8ba75d6a 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -84,10 +84,8 @@ where .with_label_values(&[label, chain_name]); loop { - if let Some(broadcaster) = opts.tx_id_broadcaster.as_mut() { - let mut rx = broadcaster.subscribe(); - self.fetch_logs_from_receiver(&mut rx, &stored_logs_metric) - .await; + if let Some(rx) = opts.tx_id_receiver.as_mut() { + self.fetch_logs_from_receiver(rx, &stored_logs_metric).await; } if let Some(cursor) = opts.cursor.as_mut() { self.fetch_logs_with_cursor(cursor, &stored_logs_metric, &indexed_height_metric) @@ -258,14 +256,14 @@ pub struct SyncOptions { // Might want to refactor into an enum later, where we either index with a cursor or rely on receiving // txids from a channel to other indexing tasks cursor: Option>>, - tx_id_broadcaster: Option>, + tx_id_receiver: Option>, } impl From>> for SyncOptions { fn from(cursor: Box>) -> Self { Self { cursor: Some(cursor), - tx_id_broadcaster: None, + tx_id_receiver: None, } } }