From b86b12434af3682f1d78cdfc90a9116ca5410176 Mon Sep 17 00:00:00 2001 From: Eliot Eikenberry Date: Fri, 16 Aug 2024 21:42:04 -0400 Subject: [PATCH] fix: expand limit to loki query, use different field for merge sha, cleanup warnings (#32) --- src/helpers/gatherer.rs | 4 +--- src/helpers/loki.rs | 21 +++++++++------------ src/routes/teams.rs | 2 +- 3 files changed, 11 insertions(+), 16 deletions(-) diff --git a/src/helpers/gatherer.rs b/src/helpers/gatherer.rs index 396e631..3a1816f 100644 --- a/src/helpers/gatherer.rs +++ b/src/helpers/gatherer.rs @@ -1,8 +1,6 @@ use chrono::{DateTime, Utc}; use regex::Regex; -use serde::de; use std::collections::HashMap; -use anyhow::Result; use super::response::ResponseRecord; @@ -154,7 +152,7 @@ pub fn link_data(data: GatheredData) -> Vec { let failures = find_failures_per_deployment(&data); - data.deployments_by_repo.iter().for_each(|(key, value)| { + data.deployments_by_repo.iter().for_each(|(_, value)| { value.iter().for_each(|deployment| { let mut record: ResponseRecord = ResponseRecord { repository: deployment.repository.clone(), diff --git a/src/helpers/loki.rs b/src/helpers/loki.rs index da8fcf2..9402ea0 100644 --- a/src/helpers/loki.rs +++ b/src/helpers/loki.rs @@ -1,16 +1,17 @@ use anyhow::{anyhow, Result}; -use chrono::{DateTime, Duration, TimeDelta, Utc}; +use chrono::{DateTime, Duration, Utc}; use reqwest::{Response, Error}; use serde::{Deserialize, Serialize}; -use std::{collections::HashMap, env, fs}; +use std::{collections::HashMap, env}; -use super::{gatherer::{DeployEntry, GatheredData, IssueEntry, MergeEntry}, request::DataRequest, response::ResponseRecord}; +use super::{gatherer::{DeployEntry, GatheredData, IssueEntry, MergeEntry}, request::DataRequest}; #[derive(Serialize, Debug, Clone, Default)] pub struct QueryParams { pub query: String, pub start: String, pub end: String, + pub limit: u16, } #[derive(Deserialize, Debug, Default)] @@ -40,7 +41,6 @@ pub struct Stream { pub repository_name: Option, pub service_name: Option, pub team_name: Option, - pub merge_sha: Option, pub merged_at: Option>, pub deployment_state: Option, } @@ -82,6 +82,7 @@ pub struct Repository { pub struct PullRequest { pub title: String, pub user: User, + pub merge_commit_sha: String, } #[derive(Deserialize, Debug)] @@ -210,7 +211,8 @@ fn fill_query_params, F: AsRef>(request: &DataRequest, query: let params = QueryParams { start: request.start.timestamp_nanos_opt().unwrap().to_string(), end: request.end.timestamp_nanos_opt().unwrap().to_string(), - query: query + query: query, + limit: 5000, }; return params; @@ -333,18 +335,16 @@ fn sort_merge_data(merge_data: QueryResponse) -> HashMap { for result in merge_data.data.result { for value in result.values { - - let sha = result.stream.merge_sha.as_ref().unwrap().to_string(); let pr = value.json_data.body.pull_request.unwrap(); let record = MergeEntry { user: pr.user.login.clone(), title: pr.title.clone(), merged_at: result.stream.merged_at.unwrap().clone(), - sha: sha.clone() + sha: pr.merge_commit_sha.clone() }; - records_by_sha.entry(sha) + records_by_sha.entry(pr.merge_commit_sha) .or_insert(record); } } @@ -386,9 +386,6 @@ async fn query_data(request: DataRequest) -> Result<(QueryResponse, QueryRespons Ok((deploy_data, issue_data, merge_data)) } -const REQUEST_DAYS: i64 = 5; -const REQUEST_DAYS_DURATION: TimeDelta = Duration::days(5); - fn get_batch_days_size() -> i64 { let var = env::var("LOKI_DAYS_BATCH_SIZE"); diff --git a/src/routes/teams.rs b/src/routes/teams.rs index 0dfe8d2..205cf89 100644 --- a/src/routes/teams.rs +++ b/src/routes/teams.rs @@ -99,7 +99,7 @@ pub async fn handle_request(Extension(cache): Extension) -> Result { + Err(_) => { tracing::error!("GitHub Request Failed"); return Err(StatusCode::INTERNAL_SERVER_ERROR); }