Skip to content

Commit

Permalink
Retry block_pointer_from_number on deploy (#4812)
Browse files Browse the repository at this point in the history
Add retry to avoid a single failure while making the call prevent the subsgraph from being deployed
Add warning for failed attempts
  • Loading branch information
mangas committed Aug 22, 2023
1 parent d8735c7 commit 0d49f14
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 13 deletions.
24 changes: 15 additions & 9 deletions core/src/subgraph/registrar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ use graph::prelude::{
CreateSubgraphResult, SubgraphAssignmentProvider as SubgraphAssignmentProviderTrait,
SubgraphRegistrar as SubgraphRegistrarTrait, *,
};
use graph::tokio_retry::Retry;
use graph::util::futures::retry_strategy;
use graph::util::futures::RETRY_DEFAULT_LIMIT;

pub struct SubgraphRegistrar<P, S, SM> {
logger: Logger,
Expand Down Expand Up @@ -518,15 +521,18 @@ async fn resolve_start_block(
.expect("cannot identify minimum start block because there are no data sources")
{
0 => Ok(None),
min_start_block => chain
.block_pointer_from_number(logger, min_start_block - 1)
.await
.map(Some)
.map_err(move |_| {
SubgraphRegistrarError::ManifestValidationError(vec![
SubgraphManifestValidationError::BlockNotFound(min_start_block.to_string()),
])
}),
min_start_block => Retry::spawn(retry_strategy(Some(2), RETRY_DEFAULT_LIMIT), move || {
chain
.block_pointer_from_number(&logger, min_start_block - 1)
.inspect_err(move |e| warn!(&logger, "Failed to get block number: {}", e))
})
.await
.map(Some)
.map_err(move |_| {
SubgraphRegistrarError::ManifestValidationError(vec![
SubgraphManifestValidationError::BlockNotFound(min_start_block.to_string()),
])
}),
}
}

Expand Down
1 change: 1 addition & 0 deletions graph/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ pub use semver;
pub use slog;
pub use stable_hash_legacy;
pub use tokio;
pub use tokio_retry;
pub use tokio_stream;
pub use url;

Expand Down
16 changes: 12 additions & 4 deletions graph/src/util/futures.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ use thiserror::Error;
use tokio_retry::strategy::{jitter, ExponentialBackoff};
use tokio_retry::Retry;

pub const RETRY_DEFAULT_LIMIT: Duration = Duration::from_secs(30);

/// Generic helper function for retrying async operations with built-in logging.
///
/// To use this helper, do the following:
Expand Down Expand Up @@ -271,7 +273,7 @@ where

let mut attempt_count = 0;

Retry::spawn(retry_strategy(limit_opt), move || {
Retry::spawn(retry_strategy(limit_opt, RETRY_DEFAULT_LIMIT), move || {
let operation_name = operation_name.clone();
let logger = logger.clone();
let condition = condition.clone();
Expand Down Expand Up @@ -347,11 +349,17 @@ where
})
}

fn retry_strategy(limit_opt: Option<usize>) -> Box<dyn Iterator<Item = Duration> + Send> {
pub fn retry_strategy(
limit_opt: Option<usize>,
max_delay: Duration,
) -> Box<dyn Iterator<Item = Duration> + Send> {
// Exponential backoff, but with a maximum
let max_delay_ms = 30_000;
let backoff = ExponentialBackoff::from_millis(2)
.max_delay(Duration::from_millis(max_delay_ms))
.max_delay(Duration::from_millis(
// This should be fine, if the value is too high it will crash during
// testing.
max_delay.as_millis().try_into().unwrap(),
))
.map(jitter);

// Apply limit (maximum retry count)
Expand Down

0 comments on commit 0d49f14

Please sign in to comment.