From 82d4007d2be51bcca0817970fd65efac34b87c46 Mon Sep 17 00:00:00 2001 From: shufps Date: Tue, 14 Nov 2023 17:00:42 +0100 Subject: [PATCH 1/8] changed cache to 24h --- Cargo.lock | 1 + Cargo.toml | 3 +- src/bin/inx-chronicle/api/explorer/routes.rs | 59 +++++++++++++++++--- 3 files changed, 55 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b035bf4ed..cfe4d7569 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -506,6 +506,7 @@ dependencies = [ "iota-crypto", "iota-sdk", "mongodb", + "once_cell", "packable", "pin-project", "prefix-hex", diff --git a/Cargo.toml b/Cargo.toml index 0c19cbd55..5a027d074 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,6 +51,7 @@ tracing-subscriber = { version = "0.3", default-features = false, features = [ " uint = { version = "0.9", default-features = false } url = { version = "2.3", default-features = false } uuid = { version = "1.3", default-features = false, features = [ "v4" ] } +once_cell = "1.17.1" # Optional chrono = { version = "0.4", default-features = false, features = [ "std" ], optional = true } @@ -112,7 +113,7 @@ api = [ influx = [ "dep:influxdb", ] -inx = [ +inx = [ "dep:inx", "dep:tonic", ] diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index 2cb3283b8..ea60ab5f1 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -40,6 +40,10 @@ use crate::api::{ ApiResult, }; +use std::time::{Instant, Duration}; +use tokio::sync::{RwLock}; +use once_cell::sync::Lazy; + pub fn routes() -> Router { Router::new() .route("/balance/:address", get(balance)) @@ -319,11 +323,34 @@ async fn blocks_by_milestone_id( .await } +struct RichestCacheData { + last_updated: Instant, + data: RichestAddressesResponse, +} + +struct TokenCacheData { + last_updated: Instant, + data: TokenDistributionResponse, +} + +static RICHEST_ADDRESSES_CACHE: Lazy>> = Lazy::new(|| RwLock::new(None)); +static TOKEN_DISTRIBUTION_CACHE: Lazy>> = Lazy::new(|| RwLock::new(None)); + async fn richest_addresses_ledger_analytics( database: Extension, RichestAddressesQuery { top, ledger_index }: RichestAddressesQuery, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; + let cache = RICHEST_ADDRESSES_CACHE.read().await; + + if let Some(cached_data) = &*cache { + if cached_data.last_updated.elapsed() < Duration::from_secs(86400) { + return Ok(cached_data.data.clone()); + } + } + + drop(cache); // release the read lock + let res = database .collection::() .get_richest_addresses(ledger_index, top) @@ -338,19 +365,22 @@ async fn richest_addresses_ledger_analytics( .bech32_hrp .parse()?; - Ok(RichestAddressesResponse { + let response = RichestAddressesResponse { top: res .top .into_iter() .map(|stat| AddressStatDto { - address: iota_sdk::types::block::address::Address::from(stat.address) - .to_bech32(hrp) - .to_string(), + address: iota_sdk::types::block::address::Address::from(stat.address).to_bech32(hrp.clone()).to_string(), balance: stat.balance, }) .collect(), ledger_index, - }) + }; + + // Store the response in the cache + *RICHEST_ADDRESSES_CACHE.write().await = Some(RichestCacheData { last_updated: Instant::now(), data: response.clone() }); + + Ok(response) } async fn token_distribution_ledger_analytics( @@ -358,15 +388,30 @@ async fn token_distribution_ledger_analytics( LedgerIndex { ledger_index }: LedgerIndex, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; + let cache = TOKEN_DISTRIBUTION_CACHE.read().await; + + if let Some(cached_data) = &*cache { + if cached_data.last_updated.elapsed() < Duration::from_secs(86400) { + return Ok(cached_data.data.clone()); + } + } + + drop(cache); // release the read lock + let res = database .collection::() .get_token_distribution(ledger_index) .await?; - Ok(TokenDistributionResponse { + let response = TokenDistributionResponse { distribution: res.distribution.into_iter().map(Into::into).collect(), ledger_index, - }) + }; + + // Store the response in the cache + *TOKEN_DISTRIBUTION_CACHE.write().await = Some(TokenCacheData { last_updated: Instant::now(), data: response.clone() }); + + Ok(response) } /// This is just a helper fn to either unwrap an optional ledger index param or fetch the latest From 885ddae262fbe006fbbb75b90b0e831d8392d30b Mon Sep 17 00:00:00 2001 From: shufps Date: Fri, 6 Oct 2023 08:02:41 +0200 Subject: [PATCH 2/8] formatted source --- src/bin/inx-chronicle/api/explorer/routes.rs | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index ea60ab5f1..5fd3cbad5 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -40,9 +40,9 @@ use crate::api::{ ApiResult, }; -use std::time::{Instant, Duration}; -use tokio::sync::{RwLock}; use once_cell::sync::Lazy; +use std::time::{Duration, Instant}; +use tokio::sync::RwLock; pub fn routes() -> Router { Router::new() @@ -378,7 +378,10 @@ async fn richest_addresses_ledger_analytics( }; // Store the response in the cache - *RICHEST_ADDRESSES_CACHE.write().await = Some(RichestCacheData { last_updated: Instant::now(), data: response.clone() }); + *RICHEST_ADDRESSES_CACHE.write().await = Some(RichestCacheData { + last_updated: Instant::now(), + data: response.clone(), + }); Ok(response) } @@ -409,7 +412,10 @@ async fn token_distribution_ledger_analytics( }; // Store the response in the cache - *TOKEN_DISTRIBUTION_CACHE.write().await = Some(TokenCacheData { last_updated: Instant::now(), data: response.clone() }); + *TOKEN_DISTRIBUTION_CACHE.write().await = Some(TokenCacheData { + last_updated: Instant::now(), + data: response.clone(), + }); Ok(response) } From 3bb48f2964f22a11827aa782a32eaa15b1062de7 Mon Sep 17 00:00:00 2001 From: shufps Date: Tue, 14 Nov 2023 17:28:26 +0100 Subject: [PATCH 3/8] rebased, clippied, formatted, compiles again --- src/bin/inx-chronicle/api/explorer/routes.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index 5fd3cbad5..4c90f09e8 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -19,7 +19,7 @@ use chronicle::{ }, }; use futures::{StreamExt, TryStreamExt}; -use iota_sdk::types::block::address::ToBech32Ext; +use iota_sdk::types::block::address::{Hrp, ToBech32Ext}; use super::{ extractors::{ @@ -356,7 +356,7 @@ async fn richest_addresses_ledger_analytics( .get_richest_addresses(ledger_index, top) .await?; - let hrp = database + let hrp: Hrp = database .collection::() .get_protocol_parameters_for_ledger_index(ledger_index) .await? @@ -370,7 +370,9 @@ async fn richest_addresses_ledger_analytics( .top .into_iter() .map(|stat| AddressStatDto { - address: iota_sdk::types::block::address::Address::from(stat.address).to_bech32(hrp.clone()).to_string(), + address: iota_sdk::types::block::address::Address::from(stat.address) + .to_bech32(hrp) + .to_string(), balance: stat.balance, }) .collect(), From f62bb5506b94e191962501f28e5584d59272d2ae Mon Sep 17 00:00:00 2001 From: shufps Date: Tue, 21 Nov 2023 08:03:35 +0100 Subject: [PATCH 4/8] timed cache refresh to midnight --- src/bin/inx-chronicle/api/explorer/routes.rs | 26 ++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index 4c90f09e8..f4eaecadc 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -2,6 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 use std::str::FromStr; +use std::time::SystemTime; + +use tracing::info; use axum::{extract::Path, routing::get, Extension}; use chronicle::{ @@ -333,6 +336,13 @@ struct TokenCacheData { data: TokenDistributionResponse, } +fn calculate_seconds_until_midnight() -> u64 { + let now = SystemTime::now(); + let since_epoch = now.duration_since(SystemTime::UNIX_EPOCH).expect("Time went backwards"); + let seconds_today = since_epoch.as_secs() % 86400; + 86400 - seconds_today +} + static RICHEST_ADDRESSES_CACHE: Lazy>> = Lazy::new(|| RwLock::new(None)); static TOKEN_DISTRIBUTION_CACHE: Lazy>> = Lazy::new(|| RwLock::new(None)); @@ -342,6 +352,7 @@ async fn richest_addresses_ledger_analytics( ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; let cache = RICHEST_ADDRESSES_CACHE.read().await; + let seconds_until_midnight = calculate_seconds_until_midnight(); if let Some(cached_data) = &*cache { if cached_data.last_updated.elapsed() < Duration::from_secs(86400) { @@ -351,6 +362,9 @@ async fn richest_addresses_ledger_analytics( drop(cache); // release the read lock + info!("refreshing richest-addresses cache ..."); + let refresh_start = SystemTime::now(); + let res = database .collection::() .get_richest_addresses(ledger_index, top) @@ -385,6 +399,10 @@ async fn richest_addresses_ledger_analytics( data: response.clone(), }); + let refresh_elapsed = refresh_start.elapsed().unwrap(); + info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed); + info!("next refresh in {} seconds", seconds_until_midnight); + Ok(response) } @@ -395,6 +413,7 @@ async fn token_distribution_ledger_analytics( let ledger_index = resolve_ledger_index(&database, ledger_index).await?; let cache = TOKEN_DISTRIBUTION_CACHE.read().await; + let seconds_until_midnight = calculate_seconds_until_midnight(); if let Some(cached_data) = &*cache { if cached_data.last_updated.elapsed() < Duration::from_secs(86400) { return Ok(cached_data.data.clone()); @@ -403,6 +422,9 @@ async fn token_distribution_ledger_analytics( drop(cache); // release the read lock + info!("refreshing token-distribution cache ..."); + let refresh_start = SystemTime::now(); + let res = database .collection::() .get_token_distribution(ledger_index) @@ -419,6 +441,10 @@ async fn token_distribution_ledger_analytics( data: response.clone(), }); + let refresh_elapsed = refresh_start.elapsed().unwrap(); + info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed); + info!("next refresh in {} seconds", seconds_until_midnight); + Ok(response) } From 50391e9f36e35a732807735a5eee6f5b424a8281 Mon Sep 17 00:00:00 2001 From: shufps Date: Tue, 21 Nov 2023 09:13:14 +0100 Subject: [PATCH 5/8] used the cache as mutex to prevent concurrent refreshs --- src/bin/inx-chronicle/api/explorer/routes.rs | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index f4eaecadc..6c8224ee3 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -351,7 +351,7 @@ async fn richest_addresses_ledger_analytics( RichestAddressesQuery { top, ledger_index }: RichestAddressesQuery, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; - let cache = RICHEST_ADDRESSES_CACHE.read().await; + let cache = RICHEST_ADDRESSES_CACHE.write().await; let seconds_until_midnight = calculate_seconds_until_midnight(); if let Some(cached_data) = &*cache { @@ -360,8 +360,6 @@ async fn richest_addresses_ledger_analytics( } } - drop(cache); // release the read lock - info!("refreshing richest-addresses cache ..."); let refresh_start = SystemTime::now(); @@ -411,7 +409,7 @@ async fn token_distribution_ledger_analytics( LedgerIndex { ledger_index }: LedgerIndex, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; - let cache = TOKEN_DISTRIBUTION_CACHE.read().await; + let cache = TOKEN_DISTRIBUTION_CACHE.write().await; let seconds_until_midnight = calculate_seconds_until_midnight(); if let Some(cached_data) = &*cache { @@ -420,8 +418,6 @@ async fn token_distribution_ledger_analytics( } } - drop(cache); // release the read lock - info!("refreshing token-distribution cache ..."); let refresh_start = SystemTime::now(); From 8248b36199e245ee567d125e32e5438c0113796b Mon Sep 17 00:00:00 2001 From: shufps Date: Tue, 21 Nov 2023 09:33:05 +0100 Subject: [PATCH 6/8] fixed writing to cache --- src/bin/inx-chronicle/api/explorer/routes.rs | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index 6c8224ee3..043e9737c 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -351,7 +351,7 @@ async fn richest_addresses_ledger_analytics( RichestAddressesQuery { top, ledger_index }: RichestAddressesQuery, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; - let cache = RICHEST_ADDRESSES_CACHE.write().await; + let mut cache = RICHEST_ADDRESSES_CACHE.write().await; let seconds_until_midnight = calculate_seconds_until_midnight(); if let Some(cached_data) = &*cache { @@ -392,10 +392,7 @@ async fn richest_addresses_ledger_analytics( }; // Store the response in the cache - *RICHEST_ADDRESSES_CACHE.write().await = Some(RichestCacheData { - last_updated: Instant::now(), - data: response.clone(), - }); + *cache = Some(RichestCacheData { last_updated: Instant::now(), data: response.clone() }); let refresh_elapsed = refresh_start.elapsed().unwrap(); info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed); @@ -409,7 +406,7 @@ async fn token_distribution_ledger_analytics( LedgerIndex { ledger_index }: LedgerIndex, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; - let cache = TOKEN_DISTRIBUTION_CACHE.write().await; + let mut cache = TOKEN_DISTRIBUTION_CACHE.write().await; let seconds_until_midnight = calculate_seconds_until_midnight(); if let Some(cached_data) = &*cache { @@ -432,10 +429,7 @@ async fn token_distribution_ledger_analytics( }; // Store the response in the cache - *TOKEN_DISTRIBUTION_CACHE.write().await = Some(TokenCacheData { - last_updated: Instant::now(), - data: response.clone(), - }); + *cache = Some(TokenCacheData { last_updated: Instant::now(), data: response.clone() }); let refresh_elapsed = refresh_start.elapsed().unwrap(); info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed); From ee13b46dc8099df44d44de150ee4321cf25badda Mon Sep 17 00:00:00 2001 From: shufps Date: Mon, 8 Jul 2024 13:01:24 +0200 Subject: [PATCH 7/8] added optional cached parameter to rich list and token distribution --- .../inx-chronicle/api/explorer/extractors.rs | 9 +- src/bin/inx-chronicle/api/explorer/routes.rs | 87 ++++++++++++------- 2 files changed, 60 insertions(+), 36 deletions(-) diff --git a/src/bin/inx-chronicle/api/explorer/extractors.rs b/src/bin/inx-chronicle/api/explorer/extractors.rs index f992c136f..570102a66 100644 --- a/src/bin/inx-chronicle/api/explorer/extractors.rs +++ b/src/bin/inx-chronicle/api/explorer/extractors.rs @@ -265,6 +265,7 @@ const DEFAULT_TOP_RICHLIST: usize = 100; pub struct RichestAddressesQuery { pub top: usize, pub ledger_index: Option, + pub cached: Option, } impl Default for RichestAddressesQuery { @@ -272,6 +273,7 @@ impl Default for RichestAddressesQuery { Self { top: DEFAULT_TOP_RICHLIST, ledger_index: None, + cached: None, } } } @@ -292,16 +294,17 @@ impl FromRequest for RichestAddressesQuery { #[derive(Copy, Clone, Deserialize, Default)] #[serde(default, deny_unknown_fields, rename_all = "camelCase")] -pub struct LedgerIndex { +pub struct TokenDistributionQuery { pub ledger_index: Option, + pub cached: Option, } #[async_trait] -impl FromRequest for LedgerIndex { +impl FromRequest for TokenDistributionQuery { type Rejection = ApiError; async fn from_request(req: &mut axum::extract::RequestParts) -> Result { - let Query(query) = Query::::from_request(req) + let Query(query) = Query::::from_request(req) .await .map_err(RequestError::from)?; Ok(query) diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index 043e9737c..3568131d8 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -26,7 +26,7 @@ use iota_sdk::types::block::address::{Hrp, ToBech32Ext}; use super::{ extractors::{ - BlocksByMilestoneCursor, BlocksByMilestoneIdPagination, BlocksByMilestoneIndexPagination, LedgerIndex, + BlocksByMilestoneCursor, BlocksByMilestoneIdPagination, BlocksByMilestoneIndexPagination, TokenDistributionQuery, LedgerUpdatesByAddressCursor, LedgerUpdatesByAddressPagination, LedgerUpdatesByMilestoneCursor, LedgerUpdatesByMilestonePagination, MilestonesCursor, MilestonesPagination, RichestAddressesQuery, }, @@ -44,7 +44,6 @@ use crate::api::{ }; use once_cell::sync::Lazy; -use std::time::{Duration, Instant}; use tokio::sync::RwLock; pub fn routes() -> Router { @@ -327,42 +326,57 @@ async fn blocks_by_milestone_id( } struct RichestCacheData { - last_updated: Instant, + last_updated: u64, data: RichestAddressesResponse, } struct TokenCacheData { - last_updated: Instant, + last_updated: u64, data: TokenDistributionResponse, } -fn calculate_seconds_until_midnight() -> u64 { +fn get_seconds_until_midnight() -> u64 { let now = SystemTime::now(); let since_epoch = now.duration_since(SystemTime::UNIX_EPOCH).expect("Time went backwards"); - let seconds_today = since_epoch.as_secs() % 86400; - 86400 - seconds_today + 86400 - (since_epoch.as_secs() % 86400) +} + +fn get_days_since_epoch() -> u64 { + let now = SystemTime::now(); + let secs_since_epoch = now.duration_since(SystemTime::UNIX_EPOCH).expect("Time went backwards").as_secs(); + secs_since_epoch / 86400 } static RICHEST_ADDRESSES_CACHE: Lazy>> = Lazy::new(|| RwLock::new(None)); static TOKEN_DISTRIBUTION_CACHE: Lazy>> = Lazy::new(|| RwLock::new(None)); +fn get_cache_bool(cache: Option) -> bool { + // default case is use the cache + match cache { + Some(b) => b, + None => true, + } +} + async fn richest_addresses_ledger_analytics( database: Extension, - RichestAddressesQuery { top, ledger_index }: RichestAddressesQuery, + RichestAddressesQuery { top, ledger_index , cached}: RichestAddressesQuery, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; let mut cache = RICHEST_ADDRESSES_CACHE.write().await; - let seconds_until_midnight = calculate_seconds_until_midnight(); - - if let Some(cached_data) = &*cache { - if cached_data.last_updated.elapsed() < Duration::from_secs(86400) { - return Ok(cached_data.data.clone()); + let cached = get_cache_bool(cached); + let days_since_epoch = get_days_since_epoch(); + + if cached { + if let Some(cached_data) = &*cache { + if cached_data.last_updated == days_since_epoch { + return Ok(cached_data.data.clone()); + } } + info!("refreshing richest-addresses cache ..."); } - info!("refreshing richest-addresses cache ..."); let refresh_start = SystemTime::now(); - let res = database .collection::() .get_richest_addresses(ledger_index, top) @@ -391,33 +405,38 @@ async fn richest_addresses_ledger_analytics( ledger_index, }; - // Store the response in the cache - *cache = Some(RichestCacheData { last_updated: Instant::now(), data: response.clone() }); + if cached { + // Store the response in the cache + *cache = Some(RichestCacheData { last_updated: days_since_epoch, data: response.clone() }); - let refresh_elapsed = refresh_start.elapsed().unwrap(); - info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed); - info!("next refresh in {} seconds", seconds_until_midnight); + let refresh_elapsed = refresh_start.elapsed().unwrap(); + info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed); + info!("next refresh in {} seconds", get_seconds_until_midnight()); + } Ok(response) } async fn token_distribution_ledger_analytics( database: Extension, - LedgerIndex { ledger_index }: LedgerIndex, + TokenDistributionQuery { ledger_index, cached}: TokenDistributionQuery, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; let mut cache = TOKEN_DISTRIBUTION_CACHE.write().await; - - let seconds_until_midnight = calculate_seconds_until_midnight(); - if let Some(cached_data) = &*cache { - if cached_data.last_updated.elapsed() < Duration::from_secs(86400) { - return Ok(cached_data.data.clone()); + let cached = get_cache_bool(cached); + let days_since_epoch = get_days_since_epoch(); + + if cached { + if let Some(cached_data) = &*cache { + if cached_data.last_updated == days_since_epoch { + return Ok(cached_data.data.clone()); + } } + + info!("refreshing token-distribution cache ..."); } - info!("refreshing token-distribution cache ..."); let refresh_start = SystemTime::now(); - let res = database .collection::() .get_token_distribution(ledger_index) @@ -428,12 +447,14 @@ async fn token_distribution_ledger_analytics( ledger_index, }; - // Store the response in the cache - *cache = Some(TokenCacheData { last_updated: Instant::now(), data: response.clone() }); + if cached { + // Store the response in the cache + *cache = Some(TokenCacheData { last_updated: days_since_epoch, data: response.clone() }); - let refresh_elapsed = refresh_start.elapsed().unwrap(); - info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed); - info!("next refresh in {} seconds", seconds_until_midnight); + let refresh_elapsed = refresh_start.elapsed().unwrap(); + info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed); + info!("next refresh in {} seconds", get_seconds_until_midnight()); + } Ok(response) } From 5c101f993e42374775365015d0fa17604239bfeb Mon Sep 17 00:00:00 2001 From: shufps Date: Mon, 8 Jul 2024 13:03:08 +0200 Subject: [PATCH 8/8] cargo fmt --- src/analytics/influx.rs | 10 +++----- src/bin/inx-chronicle/api/explorer/routes.rs | 26 +++++++++++++++----- tests/node_configuration.rs | 12 ++++----- 3 files changed, 29 insertions(+), 19 deletions(-) diff --git a/src/analytics/influx.rs b/src/analytics/influx.rs index 165a79b04..e6b1d5a54 100644 --- a/src/analytics/influx.rs +++ b/src/analytics/influx.rs @@ -61,12 +61,10 @@ where M: Measurement, { fn prepare_query(&self) -> Vec { - vec![ - influxdb::Timestamp::from(self.at.milestone_timestamp) - .into_query(M::NAME) - .add_field("milestone_index", self.at.milestone_index) - .add_fields(&self.inner), - ] + vec![influxdb::Timestamp::from(self.at.milestone_timestamp) + .into_query(M::NAME) + .add_field("milestone_index", self.at.milestone_index) + .add_fields(&self.inner)] } } diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index 3568131d8..3290c20d5 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -26,9 +26,10 @@ use iota_sdk::types::block::address::{Hrp, ToBech32Ext}; use super::{ extractors::{ - BlocksByMilestoneCursor, BlocksByMilestoneIdPagination, BlocksByMilestoneIndexPagination, TokenDistributionQuery, + BlocksByMilestoneCursor, BlocksByMilestoneIdPagination, BlocksByMilestoneIndexPagination, LedgerUpdatesByAddressCursor, LedgerUpdatesByAddressPagination, LedgerUpdatesByMilestoneCursor, LedgerUpdatesByMilestonePagination, MilestonesCursor, MilestonesPagination, RichestAddressesQuery, + TokenDistributionQuery, }, responses::{ AddressStatDto, BalanceResponse, BlockChildrenResponse, BlockPayloadTypeDto, BlocksByMilestoneResponse, @@ -343,7 +344,10 @@ fn get_seconds_until_midnight() -> u64 { fn get_days_since_epoch() -> u64 { let now = SystemTime::now(); - let secs_since_epoch = now.duration_since(SystemTime::UNIX_EPOCH).expect("Time went backwards").as_secs(); + let secs_since_epoch = now + .duration_since(SystemTime::UNIX_EPOCH) + .expect("Time went backwards") + .as_secs(); secs_since_epoch / 86400 } @@ -360,7 +364,11 @@ fn get_cache_bool(cache: Option) -> bool { async fn richest_addresses_ledger_analytics( database: Extension, - RichestAddressesQuery { top, ledger_index , cached}: RichestAddressesQuery, + RichestAddressesQuery { + top, + ledger_index, + cached, + }: RichestAddressesQuery, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; let mut cache = RICHEST_ADDRESSES_CACHE.write().await; @@ -407,7 +415,10 @@ async fn richest_addresses_ledger_analytics( if cached { // Store the response in the cache - *cache = Some(RichestCacheData { last_updated: days_since_epoch, data: response.clone() }); + *cache = Some(RichestCacheData { + last_updated: days_since_epoch, + data: response.clone(), + }); let refresh_elapsed = refresh_start.elapsed().unwrap(); info!("refreshing richest-addresses cache done. Took {:?}", refresh_elapsed); @@ -419,7 +430,7 @@ async fn richest_addresses_ledger_analytics( async fn token_distribution_ledger_analytics( database: Extension, - TokenDistributionQuery { ledger_index, cached}: TokenDistributionQuery, + TokenDistributionQuery { ledger_index, cached }: TokenDistributionQuery, ) -> ApiResult { let ledger_index = resolve_ledger_index(&database, ledger_index).await?; let mut cache = TOKEN_DISTRIBUTION_CACHE.write().await; @@ -449,7 +460,10 @@ async fn token_distribution_ledger_analytics( if cached { // Store the response in the cache - *cache = Some(TokenCacheData { last_updated: days_since_epoch, data: response.clone() }); + *cache = Some(TokenCacheData { + last_updated: days_since_epoch, + data: response.clone(), + }); let refresh_elapsed = refresh_start.elapsed().unwrap(); info!("refreshing token-distribution cache done. Took {:?}", refresh_elapsed); diff --git a/tests/node_configuration.rs b/tests/node_configuration.rs index e43e24eab..64cbae483 100644 --- a/tests/node_configuration.rs +++ b/tests/node_configuration.rs @@ -19,13 +19,11 @@ mod test_rand { let node_configuration = setup_collection::(&db).await.unwrap(); // empty collection - assert!( - node_configuration - .get_latest_node_configuration() - .await - .unwrap() - .is_none() - ); + assert!(node_configuration + .get_latest_node_configuration() + .await + .unwrap() + .is_none()); let mut config = NodeConfiguration { milestone_public_key_count: 3,