Skip to content

Commit

Permalink
fix: expand limit to loki query, use different field for merge sha, c…
Browse files Browse the repository at this point in the history
…leanup warnings (#32)
  • Loading branch information
Wolftousen authored Aug 17, 2024
1 parent aff63b7 commit b86b124
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 16 deletions.
4 changes: 1 addition & 3 deletions src/helpers/gatherer.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
use chrono::{DateTime, Utc};
use regex::Regex;
use serde::de;
use std::collections::HashMap;
use anyhow::Result;

use super::response::ResponseRecord;

Expand Down Expand Up @@ -154,7 +152,7 @@ pub fn link_data(data: GatheredData) -> Vec<ResponseRecord> {

let failures = find_failures_per_deployment(&data);

data.deployments_by_repo.iter().for_each(|(key, value)| {
data.deployments_by_repo.iter().for_each(|(_, value)| {
value.iter().for_each(|deployment| {
let mut record: ResponseRecord = ResponseRecord {
repository: deployment.repository.clone(),
Expand Down
21 changes: 9 additions & 12 deletions src/helpers/loki.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
use anyhow::{anyhow, Result};
use chrono::{DateTime, Duration, TimeDelta, Utc};
use chrono::{DateTime, Duration, Utc};
use reqwest::{Response, Error};
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, env, fs};
use std::{collections::HashMap, env};

use super::{gatherer::{DeployEntry, GatheredData, IssueEntry, MergeEntry}, request::DataRequest, response::ResponseRecord};
use super::{gatherer::{DeployEntry, GatheredData, IssueEntry, MergeEntry}, request::DataRequest};

#[derive(Serialize, Debug, Clone, Default)]
pub struct QueryParams {
pub query: String,
pub start: String,
pub end: String,
pub limit: u16,
}

#[derive(Deserialize, Debug, Default)]
Expand Down Expand Up @@ -40,7 +41,6 @@ pub struct Stream {
pub repository_name: Option<String>,
pub service_name: Option<String>,
pub team_name: Option<String>,
pub merge_sha: Option<String>,
pub merged_at: Option<DateTime<Utc>>,
pub deployment_state: Option<String>,
}
Expand Down Expand Up @@ -82,6 +82,7 @@ pub struct Repository {
pub struct PullRequest {
pub title: String,
pub user: User,
pub merge_commit_sha: String,
}

#[derive(Deserialize, Debug)]
Expand Down Expand Up @@ -210,7 +211,8 @@ fn fill_query_params<Q: AsRef<str>, F: AsRef<str>>(request: &DataRequest, query:
let params = QueryParams {
start: request.start.timestamp_nanos_opt().unwrap().to_string(),
end: request.end.timestamp_nanos_opt().unwrap().to_string(),
query: query
query: query,
limit: 5000,
};

return params;
Expand Down Expand Up @@ -333,18 +335,16 @@ fn sort_merge_data(merge_data: QueryResponse) -> HashMap<String, MergeEntry> {

for result in merge_data.data.result {
for value in result.values {

let sha = result.stream.merge_sha.as_ref().unwrap().to_string();
let pr = value.json_data.body.pull_request.unwrap();

let record = MergeEntry {
user: pr.user.login.clone(),
title: pr.title.clone(),
merged_at: result.stream.merged_at.unwrap().clone(),
sha: sha.clone()
sha: pr.merge_commit_sha.clone()
};

records_by_sha.entry(sha)
records_by_sha.entry(pr.merge_commit_sha)
.or_insert(record);
}
}
Expand Down Expand Up @@ -386,9 +386,6 @@ async fn query_data(request: DataRequest) -> Result<(QueryResponse, QueryRespons
Ok((deploy_data, issue_data, merge_data))
}

const REQUEST_DAYS: i64 = 5;
const REQUEST_DAYS_DURATION: TimeDelta = Duration::days(5);

fn get_batch_days_size() -> i64 {
let var = env::var("LOKI_DAYS_BATCH_SIZE");

Expand Down
2 changes: 1 addition & 1 deletion src/routes/teams.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ pub async fn handle_request(Extension(cache): Extension<TeamsCache>) -> Result<J
break;
}
}
Err(e) => {
Err(_) => {
tracing::error!("GitHub Request Failed");
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
Expand Down

0 comments on commit b86b124

Please sign in to comment.