diff --git a/CHANGELOG.md b/CHANGELOG.md index 0302a44..177363d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,12 @@ All notable changes to this project will be documented in this file. ## Added -- Added `tracing` for structure and multi-level logging. `--verbose` and `-v` are now +- An `--offline` flag has been added to `criticalup install`, when enabled only the download cache + will be used where possible, and the cache will not be populated on cache miss. +- Caching of downloaded keys, manifests, and installation tarballs has been added. Newly downloaded + artifacts will also be stored in the OS-specific cache directory. The cache can be cleaned with + `criticalup clean` or any relevant OS behaviors. +- `tracing` support was added for structured and multi-level logging. `--verbose` and `-v` are now generally accepted and enable debug logging. Passing the flag twice (eg. `-vv`) will enable trace logging as well. The `--log-level` argument can accept arbitrary [tracing directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives) diff --git a/Cargo.lock b/Cargo.lock index 44265f2..648d209 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -734,6 +734,7 @@ dependencies = [ "thiserror", "tokio", "toml_edit", + "tracing", ] [[package]] diff --git a/crates/criticaltrust/src/manifests.rs b/crates/criticaltrust/src/manifests.rs index ff67cb3..cc2d815 100644 --- a/crates/criticaltrust/src/manifests.rs +++ b/crates/criticaltrust/src/manifests.rs @@ -102,7 +102,7 @@ pub struct ReleaseArtifact { pub sha256: Vec, } -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Copy)] pub enum ReleaseArtifactFormat { #[serde(rename = "tar.zst")] TarZst, diff --git a/crates/criticaltrust/src/signatures/keychain.rs b/crates/criticaltrust/src/signatures/keychain.rs index e5eff18..cf09109 100644 --- a/crates/criticaltrust/src/signatures/keychain.rs +++ b/crates/criticaltrust/src/signatures/keychain.rs @@ -4,9 +4,11 @@ use crate::keys::{KeyId, KeyRole, PublicKey}; use crate::signatures::{PublicKeysRepository, SignedPayload}; use crate::Error; +use serde::{Deserialize, Serialize}; use std::collections::HashMap; /// Collection of all trusted public keys. +#[derive(Serialize, Deserialize)] pub struct Keychain { keys: HashMap, } diff --git a/crates/criticalup-cli/src/commands/clean.rs b/crates/criticalup-cli/src/commands/clean.rs index 8279b1d..743b1f9 100644 --- a/crates/criticalup-cli/src/commands/clean.rs +++ b/crates/criticalup-cli/src/commands/clean.rs @@ -14,13 +14,22 @@ pub(crate) async fn run(ctx: &Context) -> Result<(), Error> { let installations_dir = &ctx.config.paths.installation_dir; let state = State::load(&ctx.config).await?; + delete_cache_directory(&ctx.config.paths.cache_dir).await?; delete_unused_installations(installations_dir, &state).await?; delete_untracked_installation_dirs(installations_dir, state).await?; Ok(()) } -/// Deletes installation from `State` wl; ith `InstallationId`s that have empty manifest section, and +async fn delete_cache_directory(cache_dir: &Path) -> Result<(), Error> { + if cache_dir.exists() { + tracing::info!("Cleaning cache directory"); + tokio::fs::remove_dir_all(&cache_dir).await?; + } + Ok(()) +} + +/// Deletes installation from `State` with `InstallationId`s that have empty manifest section, and /// deletes the installation directory from the disk if present. async fn delete_unused_installations(installations_dir: &Path, state: &State) -> Result<(), Error> { let unused_installations: Vec = state diff --git a/crates/criticalup-cli/src/commands/install.rs b/crates/criticalup-cli/src/commands/install.rs index 88d7991..582a78a 100644 --- a/crates/criticalup-cli/src/commands/install.rs +++ b/crates/criticalup-cli/src/commands/install.rs @@ -5,9 +5,11 @@ use std::path::{Path, PathBuf}; use criticaltrust::integrity::IntegrityVerifier; use criticaltrust::manifests::{Release, ReleaseArtifactFormat}; +use criticalup_core::download_server_cache::DownloadServerCache; use criticalup_core::download_server_client::DownloadServerClient; use criticalup_core::project_manifest::{ProjectManifest, ProjectManifestProduct}; use criticalup_core::state::State; +use tokio::fs::read; use crate::errors::Error; use crate::errors::Error::{IntegrityErrorsWhileInstallation, PackageDependenciesNotSupported}; @@ -18,12 +20,19 @@ pub const DEFAULT_RELEASE_ARTIFACT_FORMAT: ReleaseArtifactFormat = ReleaseArtifa pub(crate) async fn run( ctx: &Context, reinstall: bool, + offline: bool, project: Option, ) -> Result<(), Error> { // TODO: If `std::io::stdout().is_terminal() == true``, provide a nice, fancy progress bar using indicatif. // Retain existing behavior to support non-TTY usage. let state = State::load(&ctx.config).await?; + let maybe_client = if !offline { + Some(DownloadServerClient::new(&ctx.config, &state)) + } else { + None + }; + let cache = DownloadServerCache::new(&ctx.config.paths.cache_dir, &maybe_client).await?; // Get manifest location if arg `project` is None let manifest_path = ProjectManifest::discover_canonical_path(project.as_deref()).await?; @@ -37,7 +46,7 @@ pub(crate) async fn run( let abs_installation_dir_path = installation_dir.join(product.installation_id()); if !abs_installation_dir_path.exists() { - install_product_afresh(ctx, &state, &manifest_path, product).await?; + install_product_afresh(ctx, &state, &cache, &manifest_path, product).await?; } else { // Check if the state file has no mention of this installation. let does_this_installation_exist_in_state = state @@ -46,7 +55,7 @@ pub(crate) async fn run( if !does_this_installation_exist_in_state || reinstall { // If the installation directory exists, but the State has no installation of that // InstallationId, then re-run the install command and go through installation. - install_product_afresh(ctx, &state, &manifest_path, product).await?; + install_product_afresh(ctx, &state, &cache, &manifest_path, product).await?; } else { // If the installation directory exists AND there is an existing installation with // that InstallationId, then merely update the installation in the State file to @@ -67,9 +76,16 @@ pub(crate) async fn run( Ok(()) } +#[tracing::instrument(level = "debug", skip_all, fields( + manifest_path = %manifest_path.display(), + installation_id = %product.installation_id(), + release = %product.release(), + product = %product.name(), +))] async fn install_product_afresh( ctx: &Context, state: &State, + cache: &DownloadServerCache<'_>, manifest_path: &Path, product: &ProjectManifestProduct, ) -> Result<(), Error> { @@ -77,17 +93,15 @@ async fn install_product_afresh( let release = product.release(); let installation_dir = &ctx.config.paths.installation_dir; let abs_installation_dir_path = installation_dir.join(product.installation_id()); - let client = DownloadServerClient::new(&ctx.config, state); - let keys = client.get_keys().await?; + let keys = cache.keys().await?; - // TODO: Add tracing to support log levels, structured logging. tracing::info!("Installing product '{product_name}' ({release})",); let mut integrity_verifier = IntegrityVerifier::new(&keys); // Get the release manifest for the product from the server and verify it. - let release_manifest_from_server = client - .get_product_release_manifest(product_name, product.release()) + let release_manifest_from_server = cache + .product_release_manifest(product_name, product.release()) .await?; let verified_release_manifest = release_manifest_from_server.signed.into_verified(&keys)?; @@ -103,10 +117,8 @@ async fn install_product_afresh( .await?; for package in product.packages() { - tracing::info!("Downloading component '{package}' for '{product_name}' ({release})",); - - let response_file = client - .download_package( + let package_path = cache + .package( product_name, release_name, package, @@ -114,18 +126,10 @@ async fn install_product_afresh( ) .await?; - // Archive file path, path with the archive extension. - let package_name_with_extension = - format!("{}.{}", package, DEFAULT_RELEASE_ARTIFACT_FORMAT); - let abs_artifact_compressed_file_path: PathBuf = - abs_installation_dir_path.join(&package_name_with_extension); - - // Save the downloaded package archive on disk. - tokio::fs::write(&abs_artifact_compressed_file_path, response_file.clone()).await?; - tracing::info!("Installing component '{package}' for '{product_name}' ({release})",); + let package_data = read(package_path).await?; - let decoder = xz2::read::XzDecoder::new(response_file.as_slice()); + let decoder = xz2::read::XzDecoder::new(package_data.as_slice()); let mut archive = tar::Archive::new(decoder); archive.set_preserve_permissions(true); archive.set_preserve_mtime(true); @@ -147,8 +151,6 @@ async fn install_product_afresh( ); } } - - clean_archive_download(&abs_artifact_compressed_file_path).await?; } let verified_packages = integrity_verifier @@ -173,11 +175,6 @@ fn check_for_package_dependencies(verified_release_manifest: &Release) -> Result Ok(()) } -async fn clean_archive_download(abs_artifact_compressed_file_path: &PathBuf) -> Result<(), Error> { - tokio::fs::remove_file(abs_artifact_compressed_file_path).await?; - Ok(()) -} - #[test] fn dependencies_check() { use criticaltrust::manifests::ReleasePackage; diff --git a/crates/criticalup-cli/src/lib.rs b/crates/criticalup-cli/src/lib.rs index 4c8feca..108c45d 100644 --- a/crates/criticalup-cli/src/lib.rs +++ b/crates/criticalup-cli/src/lib.rs @@ -55,9 +55,11 @@ async fn main_inner(whitelabel: WhitelabelConfig, args: &[OsString]) -> Result<( Some(AuthCommands::Remove) => commands::auth_remove::run(&ctx).await?, None => commands::auth::run(&ctx).await?, }, - Commands::Install { project, reinstall } => { - commands::install::run(&ctx, reinstall, project).await? - } + Commands::Install { + project, + reinstall, + offline, + } => commands::install::run(&ctx, reinstall, offline, project).await?, Commands::Clean => commands::clean::run(&ctx).await?, Commands::Remove { project } => commands::remove::run(&ctx, project).await?, Commands::Run { command, project } => commands::run::run(&ctx, command, project).await?, @@ -136,6 +138,9 @@ enum Commands { /// Reinstall products that may have already been installed #[arg(long)] reinstall: bool, + /// Don't download from the server, only use previously cached artifacts + #[arg(long)] + offline: bool, }, /// Delete all unused and untracked installations diff --git a/crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap index 6c5a036..f190fc8 100644 --- a/crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap +++ b/crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap @@ -16,6 +16,7 @@ Usage: Options: --project Path to the manifest `criticalup.toml` --reinstall Reinstall products that may have already been installed + --offline Don't download from the server, only use previously cached artifacts -v, --verbose... Enable debug logs, -vv for trace --log-level [...] Tracing directives -h, --help Print help diff --git a/crates/criticalup-core/Cargo.toml b/crates/criticalup-core/Cargo.toml index 2bd62a2..b8d68ef 100644 --- a/crates/criticalup-core/Cargo.toml +++ b/crates/criticalup-core/Cargo.toml @@ -21,6 +21,7 @@ dirs = { version = "5.0.1", default-features = false } tokio.workspace = true reqwest-middleware.workspace = true reqwest-retry.workspace = true +tracing.workspace = true [dev-dependencies] mock-download-server = { path = "../mock-download-server" } diff --git a/crates/criticalup-core/src/config/mod.rs b/crates/criticalup-core/src/config/mod.rs index 3cc36db..fe883cf 100644 --- a/crates/criticalup-core/src/config/mod.rs +++ b/crates/criticalup-core/src/config/mod.rs @@ -21,20 +21,24 @@ pub struct Config { impl Config { /// Detect and load the criticalup configuration from the execution environment. pub fn detect(whitelabel: WhitelabelConfig) -> Result { - Self::detect_inner(whitelabel, None) + Self::detect_inner(whitelabel, None, None) } fn detect_inner( whitelabel: WhitelabelConfig, root: Option, + cache_dir: Option, ) -> Result { - let paths = Paths::detect(&whitelabel, root)?; + let paths = Paths::detect(&whitelabel, root, cache_dir)?; Ok(Self { whitelabel, paths }) } #[cfg(test)] - pub(crate) fn test(root: std::path::PathBuf) -> Result { - Self::detect_inner(WhitelabelConfig::test(), Some(root)) + pub(crate) fn test( + root: std::path::PathBuf, + cache_dir: std::path::PathBuf, + ) -> Result { + Self::detect_inner(WhitelabelConfig::test(), Some(root), Some(cache_dir)) } } diff --git a/crates/criticalup-core/src/config/paths.rs b/crates/criticalup-core/src/config/paths.rs index 93492da..795b49d 100644 --- a/crates/criticalup-core/src/config/paths.rs +++ b/crates/criticalup-core/src/config/paths.rs @@ -14,6 +14,7 @@ pub struct Paths { pub proxies_dir: PathBuf, pub installation_dir: PathBuf, + pub cache_dir: PathBuf, #[cfg(test)] pub(crate) root: PathBuf, @@ -23,6 +24,7 @@ impl Paths { pub(super) fn detect( whitelabel: &WhitelabelConfig, root: Option, + cache_dir: Option, ) -> Result { let root = if let Some(root) = root { if root != Path::new("") { @@ -34,10 +36,18 @@ impl Paths { find_root(whitelabel).ok_or(Error::CouldNotDetectRootDirectory)? }; + let cache_dir = match cache_dir { + Some(cache_dir) => cache_dir, + None => { + find_cache_dir(whitelabel).ok_or_else(|| Error::CouldNotDetectCacheDirectory)? + } + }; + Ok(Paths { state_file: root.join("state.json"), proxies_dir: root.join("bin"), installation_dir: root.join(DEFAULT_INSTALLATION_DIR_NAME), + cache_dir, #[cfg(test)] root, }) @@ -56,6 +66,18 @@ fn platform_specific_root(whitelabel: &WhitelabelConfig) -> Option { dirs::data_dir().map(|v| v.join(whitelabel.name)) } +fn find_cache_dir(whitelabel: &WhitelabelConfig) -> Option { + match env::var_os("CRITICALUP_CACHE_DIR") { + Some(val) if val.is_empty() => platform_specific_cache_dir(whitelabel), + Some(val) => Some(PathBuf::from(val)), + None => platform_specific_cache_dir(whitelabel), + } +} + +fn platform_specific_cache_dir(whitelabel: &WhitelabelConfig) -> Option { + dirs::cache_dir().map(|v| v.join(whitelabel.name)) +} + #[cfg(test)] mod tests { use super::*; @@ -72,9 +94,15 @@ mod tests { state_file: "/opt/criticalup/state.json".into(), proxies_dir: "/opt/criticalup/bin".into(), installation_dir: "/opt/criticalup/toolchains".into(), + cache_dir: "/cache/criticalup".into(), root: "/opt/criticalup".into() }, - Paths::detect(&WhitelabelConfig::test(), Some("/opt/criticalup".into()),).unwrap() + Paths::detect( + &WhitelabelConfig::test(), + Some("/opt/criticalup".into()), + Some("/cache/criticalup".into()) + ) + .unwrap() ); } @@ -159,7 +187,7 @@ mod tests { ) { assert_eq!( expected.as_ref(), - Paths::detect(whitelabel, root).unwrap().root + Paths::detect(whitelabel, root, None).unwrap().root ); } @@ -168,7 +196,7 @@ mod tests { whitelabel: &WhitelabelConfig, root: Option, ) { - match Paths::detect(whitelabel, root) { + match Paths::detect(whitelabel, root, None) { Ok(paths) => assert_ne!(expected.as_ref(), paths.root), Err(err) => assert!(matches!(err, Error::CouldNotDetectRootDirectory)), } diff --git a/crates/criticalup-core/src/download_server_cache.rs b/crates/criticalup-core/src/download_server_cache.rs new file mode 100644 index 0000000..a2f9f15 --- /dev/null +++ b/crates/criticalup-core/src/download_server_cache.rs @@ -0,0 +1,187 @@ +// SPDX-FileCopyrightText: The Ferrocene Developers +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use crate::{download_server_client::DownloadServerClient, errors::Error}; +use std::path::{Path, PathBuf}; + +use criticaltrust::{ + manifests::{ReleaseArtifactFormat, ReleaseManifest}, + signatures::Keychain, +}; +use tokio::fs::{create_dir_all, read, write}; + +/// A cache for artifacts from the download server +pub struct DownloadServerCache<'a> { + root: &'a Path, + /// The cache will lazily populate if provided a client. + client: Option<&'a DownloadServerClient>, +} + +impl<'a> DownloadServerCache<'a> { + /// Create a new cache from a given root, and optionally a client. + pub async fn new( + root: &'a Path, + client: impl Into>, + ) -> Result { + let client = client.into(); + + Ok(Self { root, client }) + } + + fn release_path(&self, product: &str, release: &str) -> PathBuf { + self.root.join("artifacts").join(product).join(release) + } + + fn package_path( + &self, + product: &str, + release: &str, + package: &str, + format: ReleaseArtifactFormat, + ) -> PathBuf { + self.release_path(product, release).join({ + let mut file_name = PathBuf::from(package); + file_name.set_extension(format.to_string()); + file_name + }) + } + + fn product_release_manfest_path(&self, product: &str, release: &str) -> PathBuf { + self.release_path(product, release).join("manifest.json") + } + + fn keys_path(&self) -> PathBuf { + self.root.join("keys.json") + } + + #[tracing::instrument(level = "debug", skip_all, fields( + %product, + %release, + %package, + %format + ))] + pub async fn package( + &self, + product: &str, + release: &str, + package: &str, + format: ReleaseArtifactFormat, + ) -> Result { + let cache_key = self.package_path(product, release, package, format); + + let cache_hit = cache_key.exists(); + tracing::trace!(%cache_hit, cache_key = %cache_key.display()); + + match (cache_hit, &self.client) { + (false, Some(client)) => { + // Cache miss, online mode + let cache_dir = self.release_path(product, release); + create_dir_all(&cache_dir) + .await + .map_err(|e| Error::Create(cache_dir.to_path_buf(), e))?; + + let download = client + .download_package(product, release, package, format) + .await?; + tokio::fs::write(&cache_key, download) + .await + .map_err(|e| Error::Write(cache_key.clone(), e))?; + } + (false, None) => { + // Cache miss, offline mode + return Err(Error::OfflineMode); + } + (true, _) => (), // Cache hit + } + + Ok(cache_key) + } + + #[tracing::instrument(level = "debug", skip_all, fields( + %product, + %release, + ))] + pub async fn product_release_manifest( + &self, + product: &str, + release: &str, + ) -> Result { + let cache_key = self.product_release_manfest_path(product, release); + + let cache_hit = cache_key.exists(); + tracing::trace!(%cache_hit, cache_key = %cache_key.display()); + + let data = match (cache_hit, &self.client) { + (false, Some(client)) => { + // Cache miss, online mode + let cache_dir = self.release_path(product, release); + create_dir_all(&cache_dir) + .await + .map_err(|e| Error::Create(cache_dir.to_path_buf(), e))?; + + let data = client + .get_product_release_manifest(product, release) + .await?; + // It would be preferable to store the raw server response. + let serialized = serde_json::to_string_pretty(&data)?; + write(&cache_key, serialized) + .await + .map_err(|e| Error::Write(cache_key.clone(), e))?; + data + } + (false, None) => { + // Cache miss, offline mode + return Err(Error::OfflineMode); + } + (true, _) => { + // Cache hit + let data = read(&cache_key) + .await + .map_err(|e| Error::Read(cache_key.clone(), e))?; + serde_json::from_slice(&data)? + } + }; + + Ok(data) + } + + #[tracing::instrument(level = "debug", skip_all)] + pub async fn keys(&self) -> Result { + let cache_key = self.keys_path(); + + let cache_hit = cache_key.exists(); + tracing::trace!(%cache_hit, cache_key = %cache_key.display()); + + let data = match (cache_hit, &self.client) { + (_, Some(client)) => { + // Cache hit or miss, online mode + // Eagerly refresh keys whenever online in case there are new keys with new expiration dates. + create_dir_all(&self.root) + .await + .map_err(|e| Error::Create(self.root.to_path_buf(), e))?; + + let data = client.get_keys().await?; + // It would be preferable to store the raw server response. + let serialized = serde_json::to_string_pretty(&data)?; + write(&cache_key, serialized) + .await + .map_err(|e| Error::Write(cache_key.clone(), e))?; + data + } + (false, None) => { + // Cache miss, offline mode + return Err(Error::OfflineMode); + } + (true, None) => { + // Cache hit, offline mode + // We cannot refresh keys, so continue as usual + let data = read(&cache_key) + .await + .map_err(|e| Error::Read(cache_key.clone(), e))?; + serde_json::from_slice(&data)? + } + }; + + Ok(data) + } +} diff --git a/crates/criticalup-core/src/download_server_client.rs b/crates/criticalup-core/src/download_server_client.rs index 502a3f8..d86f7a6 100644 --- a/crates/criticalup-core/src/download_server_client.rs +++ b/crates/criticalup-core/src/download_server_client.rs @@ -52,6 +52,7 @@ impl DownloadServerClient { .await } + #[tracing::instrument(level = "debug", skip_all)] pub async fn get_keys(&self) -> Result { let mut keychain = Keychain::new(&self.trust_root).map_err(Error::KeychainInitFailed)?; @@ -68,6 +69,10 @@ impl DownloadServerClient { Ok(keychain) } + #[tracing::instrument(level = "debug", skip_all, fields( + %product, + %release, + ))] pub async fn get_product_release_manifest( &self, product: &str, @@ -81,6 +86,12 @@ impl DownloadServerClient { .await } + #[tracing::instrument(level = "debug", skip_all, fields( + %product, + %release, + %package, + %format + ))] pub async fn download_package( &self, product: &str, @@ -93,6 +104,7 @@ impl DownloadServerClient { let download_url = format!("/v1/releases/{product}/{release}/download/{package}/{artifact_format}"); + tracing::info!("Downloading component '{package}' for '{product}' ({release})",); let response = self .send_with_auth(self.client.get(self.url(download_url.as_str()))) .await?; diff --git a/crates/criticalup-core/src/errors.rs b/crates/criticalup-core/src/errors.rs index 2c7b5a9..f579ace 100644 --- a/crates/criticalup-core/src/errors.rs +++ b/crates/criticalup-core/src/errors.rs @@ -15,6 +15,9 @@ pub enum Error { #[error("could not detect the criticalup root directory")] CouldNotDetectRootDirectory, + #[error("could not detect the criticalup cache directory")] + CouldNotDetectCacheDirectory, + #[error("failed to download {url}")] DownloadServerError { url: String, @@ -22,6 +25,21 @@ pub enum Error { kind: DownloadServerError, }, + #[error("Network access required, but in offline mode")] + OfflineMode, + + #[error("Creating `{}`", .0.display())] + Create(PathBuf, #[source] std::io::Error), + + #[error("Writing to `{}`", .0.display())] + Write(PathBuf, #[source] std::io::Error), + + #[error("Reading from `{}`", .0.display())] + Read(PathBuf, #[source] std::io::Error), + + #[error("JSON Serialization error")] + JsonSerialization(#[from] serde_json::Error), + #[error("state file at {} is not supported by this release (state format version {1})", .0.display())] UnsupportedStateFileVersion(PathBuf, u32), #[error("failed to read the criticalup state file at {}", .0.display())] diff --git a/crates/criticalup-core/src/lib.rs b/crates/criticalup-core/src/lib.rs index 9cb2279..356cda1 100644 --- a/crates/criticalup-core/src/lib.rs +++ b/crates/criticalup-core/src/lib.rs @@ -3,10 +3,10 @@ pub mod binary_proxies; pub mod config; +pub mod download_server_cache; pub mod download_server_client; pub mod errors; pub mod project_manifest; - pub mod state; mod utils; diff --git a/crates/criticalup-core/src/project_manifest/mod.rs b/crates/criticalup-core/src/project_manifest/mod.rs index 34432c1..2dbaba0 100644 --- a/crates/criticalup-core/src/project_manifest/mod.rs +++ b/crates/criticalup-core/src/project_manifest/mod.rs @@ -11,6 +11,7 @@ use crate::project_manifest::substitutions::apply_substitutions; use crate::utils::Sha256Hasher; use serde::{Deserialize, Serialize}; use std::env; +use std::fmt::Display; use std::hash::{Hash, Hasher}; use std::ops::{Deref, DerefMut}; use std::path::{Path, PathBuf}; @@ -214,6 +215,12 @@ impl Deref for InstallationId { } } +impl Display for InstallationId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] struct VersionDetector { diff --git a/crates/criticalup-core/src/test_utils.rs b/crates/criticalup-core/src/test_utils.rs index d57910d..e8ffd4c 100644 --- a/crates/criticalup-core/src/test_utils.rs +++ b/crates/criticalup-core/src/test_utils.rs @@ -100,12 +100,18 @@ impl TestEnvironmentBuilder { pub(crate) async fn prepare(self) -> TestEnvironment { #[cfg(not(target_os = "windows"))] let root = TempDir::new().expect("failed to create temp dir"); - #[cfg(target_os = "windows")] let root = TempDir::new_in(std::env::current_dir().unwrap()).expect("failed to create temp dir"); - let mut root_path = root.path().to_path_buf(); + + #[cfg(not(target_os = "windows"))] + let cache = TempDir::new().expect("failed to create temp dir"); + #[cfg(target_os = "windows")] + let cache = + TempDir::new_in(std::env::current_dir().unwrap()).expect("failed to create temp dir"); + let cache_path = cache.path().to_path_buf(); + if let Some(subdir) = self.root_in_subdir { // A subdir creation is a requirement because root cannot be changed to anything // that does not exist. @@ -115,7 +121,7 @@ impl TestEnvironmentBuilder { root_path = root_path.join(subdir); } - let mut config = Config::test(root_path).expect("failed to create config"); + let mut config = Config::test(root_path, cache_path).expect("failed to create config"); let keys = if self.keys { let keys = TestKeys::generate(); diff --git a/docs/src/using-criticalup/toolchain-management.rst b/docs/src/using-criticalup/toolchain-management.rst index 016bae4..6c31d15 100644 --- a/docs/src/using-criticalup/toolchain-management.rst +++ b/docs/src/using-criticalup/toolchain-management.rst @@ -83,6 +83,9 @@ Then run the install command again: criticalup install +When an internet connection is not available, a previously installed package +can be reinstalled without using the network by passing the ``--offline`` flag. + Removing Toolchains ^^^^^^^^^^^^^^^^^^^ @@ -97,9 +100,9 @@ from the directory containing the ``criticalup.toml``: Cleaning Unused Toolchains ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Over time CriticalUp's stored installations may accumulate artifacts that -are no longer used. If CriticalUp's state directory begins to consume too much -disk space the ``clean`` command can help by deleting unused toolchains. +Over time CriticalUp's stored installations or cache may accumulate artifacts +that are no longer used. If CriticalUp's state directory begins to consume too +much disk space the ``clean`` command can help by deleting unused toolchains. .. code-block::