Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(sns-downloader): fixing limit and better hashing #190

Merged
merged 2 commits into from
Feb 13, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 27 additions & 6 deletions rs/ic-observability/sns-downloader/src/downloader_loop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ pub async fn run_downloader_loop(logger: Logger, cli: CliArgs, stop_signal: Rece
}

let mut current_hash: u64 = 0;
let mut limit: u64 = 20;

loop {
let tick = crossbeam::select! {
Expand All @@ -39,7 +40,7 @@ pub async fn run_downloader_loop(logger: Logger, cli: CliArgs, stop_signal: Rece
};
info!(logger, "Downloading from {} @ interval {:?}", cli.sd_url, tick);

let response = match client.get(cli.sd_url.clone()).send().await {
let response = match client.get(cli.sd_url.clone()).query(&[("limit", limit)]).send().await {
Ok(res) => res,
Err(e) => {
warn!(
Expand Down Expand Up @@ -76,6 +77,16 @@ pub async fn run_downloader_loop(logger: Logger, cli: CliArgs, stop_signal: Rece
continue;
}
};

if limit == targets.len() as u64 {
limit += 10;
info!(
logger,
"Limit reached. Increasing in next scrape from '{}' to '{}'",
targets.len(),
limit
)
}
let mut snses = vec![];
for target in targets {
let mut sns = Sns {
Expand Down Expand Up @@ -103,7 +114,8 @@ pub async fn run_downloader_loop(logger: Logger, cli: CliArgs, stop_signal: Rece

let mut hasher = DefaultHasher::new();

let targets = snses.into_iter().filter(|f| filters.filter(f)).collect::<Vec<_>>();
let mut targets = snses.into_iter().filter(|f| filters.filter(f)).collect::<Vec<_>>();
targets.sort_by_key(|f| f.root_canister_id.to_string());

for target in &targets {
target.hash(&mut hasher);
Expand All @@ -112,7 +124,14 @@ pub async fn run_downloader_loop(logger: Logger, cli: CliArgs, stop_signal: Rece
let hash = hasher.finish();

if current_hash != hash {
info!(logger, "Received new targets from {} @ interval {:?}", cli.sd_url, tick);
info!(
logger,
"Received new targets from {} @ interval {:?}, old hash '{}' != '{}' new hash",
cli.sd_url,
tick,
current_hash,
hash
);
current_hash = hash;

generate_config(&cli, targets, logger.clone());
Expand Down Expand Up @@ -164,8 +183,7 @@ async fn get_canisters(cli: &CliArgs, root_canister_id: String, client: &Client,
return vec![];
}
};

match &contract["canisters"] {
let mut canisters = match &contract["canisters"] {
serde_json::Value::Array(ar) => ar
.iter()
.map(|val| Canister {
Expand All @@ -181,5 +199,8 @@ async fn get_canisters(cli: &CliArgs, root_canister_id: String, client: &Client,
);
vec![]
}
}
};

canisters.sort_by_key(|c| c.canister_id.to_string());
canisters
}
Loading