From 444f19f8160bdf9b8a27284f3508e51b1e9f145f Mon Sep 17 00:00:00 2001 From: Jonathan Siegel <248302+usiegj00@users.noreply.github.com> Date: Tue, 29 Oct 2024 18:03:10 +0900 Subject: [PATCH] Update download.rs to match non-CNB operation --- libherokubuildpack/src/download.rs | 42 +++++++++++++++++++++++++++--- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/libherokubuildpack/src/download.rs b/libherokubuildpack/src/download.rs index 14d36543..f922cd57 100644 --- a/libherokubuildpack/src/download.rs +++ b/libherokubuildpack/src/download.rs @@ -1,8 +1,15 @@ -use std::{fs, io}; +/// Unfortunately the rust-ification of buildpacks introduces a wealth of +/// incompatibilities and unexpected behavior. In original buildpacks a web +/// download [was a curl command with reasonable options](https://github.com/heroku/heroku-buildpack-nodejs/blob/76b36fcf5daa1b1aed17b3616a57f99d2ef05a29/lib/binaries.sh#L98C1-L99C1): +/// curl "$url" -L --silent --fail --retry 5 --retry-max-time 15 --retry-connrefused --connect-timeout 5 -o target +/// Unfortunately, the new rust-implementation dropped the retry, max-time and whatnot +/// and therefore things [like NodeJS downloads fail to install](https://github.com/heroku/buildpacks-nodejs/issues/868). +/// But on the other hand, Rust provides memory safety and cross-platform compile targets. +/// This is a copilot suggestion to bring parity. +use std::{fs, io, thread, time::Duration}; #[derive(thiserror::Error, Debug)] pub enum DownloadError { - // Boxed to prevent `large_enum_variant` errors since `ureq::Error` is massive. #[error("HTTP error while downloading file: {0}")] HttpError(#[from] Box), @@ -10,7 +17,7 @@ pub enum DownloadError { IoError(#[from] std::io::Error), } -/// Downloads a file via HTTP(S) to a local path +/// Downloads a file via HTTP(S) to a local path with retry logic /// /// # Examples /// ``` @@ -31,7 +38,34 @@ pub fn download_file( uri: impl AsRef, destination: impl AsRef, ) -> Result<(), DownloadError> { - let response = ureq::get(uri.as_ref()).call().map_err(Box::new)?; + let max_retries = 5; + let retry_delay = Duration::from_secs(2); + let mut attempts = 0; + + while attempts < max_retries { + attempts += 1; + match attempt_download(&uri, &destination) { + Ok(_) => return Ok(()), + Err(e) if attempts < max_retries => { + eprintln!("Attempt {}/{} failed: {}. Retrying in {:?}...", attempts, max_retries, e, retry_delay); + thread::sleep(retry_delay); + } + Err(e) => return Err(e), + } + } + + Err(DownloadError::HttpError(Box::new(ureq::Error::new("Max retries reached")))) +} + +fn attempt_download( + uri: &impl AsRef, + destination: &impl AsRef, +) -> Result<(), DownloadError> { + let response = ureq::get(uri.as_ref()) + .timeout_connect(5_000) // 5 seconds timeout + .timeout_read(15_000) // 15 seconds max time + .call() + .map_err(Box::new)?; let mut reader = response.into_reader(); let mut file = fs::File::create(destination.as_ref())?; io::copy(&mut reader, &mut file)?;