Compare commits
3 Commits
feat/nats
...
feat/docke
| Author | SHA1 | Date | |
|---|---|---|---|
| dd6e36889b | |||
| 50b3995449 | |||
| 8d27ecf6de |
@@ -10,7 +10,7 @@ members = [
|
|||||||
"opnsense-config",
|
"opnsense-config",
|
||||||
"opnsense-config-xml",
|
"opnsense-config-xml",
|
||||||
"harmony_cli",
|
"harmony_cli",
|
||||||
"k3d",
|
"harmony_tools",
|
||||||
"harmony_composer",
|
"harmony_composer",
|
||||||
"harmony_inventory_agent",
|
"harmony_inventory_agent",
|
||||||
"harmony_secret_derive",
|
"harmony_secret_derive",
|
||||||
|
|||||||
@@ -9,6 +9,14 @@ license.workspace = true
|
|||||||
testing = []
|
testing = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
opnsense-config = { path = "../opnsense-config" }
|
||||||
|
opnsense-config-xml = { path = "../opnsense-config-xml" }
|
||||||
|
harmony_macros = { path = "../harmony_macros" }
|
||||||
|
harmony_types = { path = "../harmony_types" }
|
||||||
|
harmony_inventory_agent = { path = "../harmony_inventory_agent" }
|
||||||
|
harmony_secret_derive = { path = "../harmony_secret_derive" }
|
||||||
|
harmony_secret = { path = "../harmony_secret" }
|
||||||
|
harmony_tools = { path = "../harmony_tools" }
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
reqwest = { version = "0.11", features = [
|
reqwest = { version = "0.11", features = [
|
||||||
"blocking",
|
"blocking",
|
||||||
@@ -26,10 +34,6 @@ log.workspace = true
|
|||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
cidr.workspace = true
|
cidr.workspace = true
|
||||||
opnsense-config = { path = "../opnsense-config" }
|
|
||||||
opnsense-config-xml = { path = "../opnsense-config-xml" }
|
|
||||||
harmony_macros = { path = "../harmony_macros" }
|
|
||||||
harmony_types = { path = "../harmony_types" }
|
|
||||||
uuid.workspace = true
|
uuid.workspace = true
|
||||||
url.workspace = true
|
url.workspace = true
|
||||||
kube = { workspace = true, features = ["derive"] }
|
kube = { workspace = true, features = ["derive"] }
|
||||||
@@ -39,7 +43,6 @@ http.workspace = true
|
|||||||
serde-value.workspace = true
|
serde-value.workspace = true
|
||||||
helm-wrapper-rs = "0.4.0"
|
helm-wrapper-rs = "0.4.0"
|
||||||
non-blank-string-rs = "1.0.4"
|
non-blank-string-rs = "1.0.4"
|
||||||
k3d-rs = { path = "../k3d" }
|
|
||||||
directories.workspace = true
|
directories.workspace = true
|
||||||
lazy_static.workspace = true
|
lazy_static.workspace = true
|
||||||
dockerfile_builder = "0.1.5"
|
dockerfile_builder = "0.1.5"
|
||||||
@@ -71,9 +74,6 @@ base64.workspace = true
|
|||||||
thiserror.workspace = true
|
thiserror.workspace = true
|
||||||
once_cell = "1.21.3"
|
once_cell = "1.21.3"
|
||||||
walkdir = "2.5.0"
|
walkdir = "2.5.0"
|
||||||
harmony_inventory_agent = { path = "../harmony_inventory_agent" }
|
|
||||||
harmony_secret_derive = { path = "../harmony_secret_derive" }
|
|
||||||
harmony_secret = { path = "../harmony_secret" }
|
|
||||||
askama.workspace = true
|
askama.workspace = true
|
||||||
sqlx.workspace = true
|
sqlx.workspace = true
|
||||||
inquire.workspace = true
|
inquire.workspace = true
|
||||||
|
|||||||
11
harmony/src/domain/topology/docker/mod.rs
Normal file
11
harmony/src/domain/topology/docker/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
/// Docker Capability
|
||||||
|
#[async_trait]
|
||||||
|
pub trait Docker {
|
||||||
|
async fn ensure_installed(&self) -> Result<(), String>;
|
||||||
|
fn get_docker_env(&self) -> HashMap<String, String>;
|
||||||
|
fn docker_command(&self) -> std::process::Command;
|
||||||
|
}
|
||||||
@@ -16,7 +16,7 @@ use kube::{
|
|||||||
Api, AttachParams, DeleteParams, ListParams, ObjectList, Patch, PatchParams, ResourceExt,
|
Api, AttachParams, DeleteParams, ListParams, ObjectList, Patch, PatchParams, ResourceExt,
|
||||||
},
|
},
|
||||||
config::{KubeConfigOptions, Kubeconfig},
|
config::{KubeConfigOptions, Kubeconfig},
|
||||||
core::{DynamicResourceScope, ErrorResponse},
|
core::ErrorResponse,
|
||||||
discovery::{ApiCapabilities, Scope},
|
discovery::{ApiCapabilities, Scope},
|
||||||
error::DiscoveryError,
|
error::DiscoveryError,
|
||||||
runtime::reflector::Lookup,
|
runtime::reflector::Lookup,
|
||||||
|
|||||||
@@ -1,7 +1,13 @@
|
|||||||
use std::{collections::BTreeMap, process::Command, sync::Arc, time::Duration};
|
use std::{
|
||||||
|
collections::{BTreeMap, HashMap},
|
||||||
|
process::Command,
|
||||||
|
sync::Arc,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use base64::{Engine, engine::general_purpose};
|
use base64::{Engine, engine::general_purpose};
|
||||||
|
use harmony_tools::K3d;
|
||||||
use harmony_types::rfc1123::Rfc1123Name;
|
use harmony_types::rfc1123::Rfc1123Name;
|
||||||
use k8s_openapi::api::{
|
use k8s_openapi::api::{
|
||||||
core::v1::Secret,
|
core::v1::Secret,
|
||||||
@@ -13,10 +19,12 @@ use serde::Serialize;
|
|||||||
use tokio::sync::OnceCell;
|
use tokio::sync::OnceCell;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
config::HARMONY_DATA_DIR,
|
||||||
executors::ExecutorError,
|
executors::ExecutorError,
|
||||||
interpret::InterpretStatus,
|
interpret::InterpretStatus,
|
||||||
inventory::Inventory,
|
inventory::Inventory,
|
||||||
modules::{
|
modules::{
|
||||||
|
docker::DockerInstallationScore,
|
||||||
k3d::K3DInstallationScore,
|
k3d::K3DInstallationScore,
|
||||||
k8s::ingress::{K8sIngressScore, PathType},
|
k8s::ingress::{K8sIngressScore, PathType},
|
||||||
monitoring::{
|
monitoring::{
|
||||||
@@ -42,7 +50,7 @@ use crate::{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
score::Score,
|
score::Score,
|
||||||
topology::{TlsRoute, TlsRouter, ingress::Ingress},
|
topology::{Docker, TlsRoute, TlsRouter, ingress::Ingress},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::super::{
|
use super::super::{
|
||||||
@@ -350,6 +358,24 @@ impl PrometheusMonitoring<RHOBObservability> for K8sAnywhereTopology {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Docker for K8sAnywhereTopology {
|
||||||
|
async fn ensure_installed(&self) -> Result<(), String> {
|
||||||
|
DockerInstallationScore::default()
|
||||||
|
.interpret(&Inventory::empty(), self)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Could not ensure docker is installed : {e}"))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn get_docker_env(&self) -> HashMap<String, String> {
|
||||||
|
harmony_tools::Docker::new(HARMONY_DATA_DIR.join("docker")).get_docker_env()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn docker_command(&self) -> std::process::Command {
|
||||||
|
harmony_tools::Docker::new(HARMONY_DATA_DIR.join("docker")).command()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Serialize for K8sAnywhereTopology {
|
impl Serialize for K8sAnywhereTopology {
|
||||||
fn serialize<S>(&self, _serializer: S) -> Result<S::Ok, S::Error>
|
fn serialize<S>(&self, _serializer: S) -> Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
@@ -737,7 +763,7 @@ impl K8sAnywhereTopology {
|
|||||||
// K3DInstallationScore should expose a method to get_client ? Not too sure what would be a
|
// K3DInstallationScore should expose a method to get_client ? Not too sure what would be a
|
||||||
// good implementation due to the stateful nature of the k3d thing. Which is why I went
|
// good implementation due to the stateful nature of the k3d thing. Which is why I went
|
||||||
// with this solution for now
|
// with this solution for now
|
||||||
let k3d = k3d_rs::K3d::new(k3d_score.installation_path, Some(k3d_score.cluster_name));
|
let k3d = K3d::new(k3d_score.installation_path, Some(k3d_score.cluster_name));
|
||||||
let state = match k3d.get_client().await {
|
let state = match k3d.get_client().await {
|
||||||
Ok(client) => K8sState {
|
Ok(client) => K8sState {
|
||||||
client: Arc::new(K8sClient::new(client)),
|
client: Arc::new(K8sClient::new(client)),
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
|
mod docker;
|
||||||
mod failover;
|
mod failover;
|
||||||
mod ha_cluster;
|
mod ha_cluster;
|
||||||
pub mod ingress;
|
pub mod ingress;
|
||||||
pub mod node_exporter;
|
pub mod node_exporter;
|
||||||
pub mod opnsense;
|
pub mod opnsense;
|
||||||
|
pub use docker::*;
|
||||||
pub use failover::*;
|
pub use failover::*;
|
||||||
use harmony_types::net::IpAddress;
|
use harmony_types::net::IpAddress;
|
||||||
mod host_binding;
|
mod host_binding;
|
||||||
|
|||||||
79
harmony/src/modules/docker.rs
Normal file
79
harmony/src/modules/docker.rs
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use log::debug;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
config::HARMONY_DATA_DIR,
|
||||||
|
data::Version,
|
||||||
|
interpret::{Interpret, InterpretError, InterpretName, InterpretStatus, Outcome},
|
||||||
|
inventory::Inventory,
|
||||||
|
score::Score,
|
||||||
|
topology::{Docker, Topology},
|
||||||
|
};
|
||||||
|
use harmony_types::id::Id;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct DockerInstallationScore {
|
||||||
|
pub installation_path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for DockerInstallationScore {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
installation_path: HARMONY_DATA_DIR.join("docker"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Topology + Docker> Score<T> for DockerInstallationScore {
|
||||||
|
fn create_interpret(&self) -> Box<dyn Interpret<T>> {
|
||||||
|
Box::new(DockerInstallationInterpret {
|
||||||
|
score: self.clone(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(&self) -> String {
|
||||||
|
"DockerInstallationScore".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DockerInstallationInterpret {
|
||||||
|
score: DockerInstallationScore,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<T: Topology + Docker> Interpret<T> for DockerInstallationInterpret {
|
||||||
|
async fn execute(
|
||||||
|
&self,
|
||||||
|
_inventory: &Inventory,
|
||||||
|
_topology: &T,
|
||||||
|
) -> Result<Outcome, InterpretError> {
|
||||||
|
let docker = harmony_tools::Docker::new(self.score.installation_path.clone());
|
||||||
|
|
||||||
|
match docker.ensure_installed().await {
|
||||||
|
Ok(_) => {
|
||||||
|
let msg = "Docker is installed and ready".to_string();
|
||||||
|
debug!("{msg}");
|
||||||
|
Ok(Outcome::success(msg))
|
||||||
|
}
|
||||||
|
Err(msg) => Err(InterpretError::new(format!(
|
||||||
|
"failed to ensure docker is installed : {msg}"
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn get_name(&self) -> InterpretName {
|
||||||
|
InterpretName::Custom("DockerInstallation")
|
||||||
|
}
|
||||||
|
fn get_version(&self) -> Version {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
fn get_status(&self) -> InterpretStatus {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
fn get_children(&self) -> Vec<Id> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
|
use harmony_tools::K3d;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
@@ -10,7 +11,7 @@ use crate::{
|
|||||||
interpret::{Interpret, InterpretError, InterpretName, InterpretStatus, Outcome},
|
interpret::{Interpret, InterpretError, InterpretName, InterpretStatus, Outcome},
|
||||||
inventory::Inventory,
|
inventory::Inventory,
|
||||||
score::Score,
|
score::Score,
|
||||||
topology::Topology,
|
topology::{Docker, Topology},
|
||||||
};
|
};
|
||||||
use harmony_types::id::Id;
|
use harmony_types::id::Id;
|
||||||
|
|
||||||
@@ -29,7 +30,7 @@ impl Default for K3DInstallationScore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Topology> Score<T> for K3DInstallationScore {
|
impl<T: Topology + Docker> Score<T> for K3DInstallationScore {
|
||||||
fn create_interpret(&self) -> Box<dyn Interpret<T>> {
|
fn create_interpret(&self) -> Box<dyn Interpret<T>> {
|
||||||
Box::new(K3dInstallationInterpret {
|
Box::new(K3dInstallationInterpret {
|
||||||
score: self.clone(),
|
score: self.clone(),
|
||||||
@@ -47,19 +48,25 @@ pub struct K3dInstallationInterpret {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl<T: Topology> Interpret<T> for K3dInstallationInterpret {
|
impl<T: Topology + Docker> Interpret<T> for K3dInstallationInterpret {
|
||||||
async fn execute(
|
async fn execute(
|
||||||
&self,
|
&self,
|
||||||
_inventory: &Inventory,
|
_inventory: &Inventory,
|
||||||
_topology: &T,
|
topology: &T,
|
||||||
) -> Result<Outcome, InterpretError> {
|
) -> Result<Outcome, InterpretError> {
|
||||||
let k3d = k3d_rs::K3d::new(
|
let k3d = K3d::new(
|
||||||
self.score.installation_path.clone(),
|
self.score.installation_path.clone(),
|
||||||
Some(self.score.cluster_name.clone()),
|
Some(self.score.cluster_name.clone()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Docker::ensure_installed(topology)
|
||||||
|
.await
|
||||||
|
.map_err(|e| InterpretError::new(format!("Docker requirement for k3d failed: {e}")))?;
|
||||||
|
|
||||||
match k3d.ensure_installed().await {
|
match k3d.ensure_installed().await {
|
||||||
Ok(_client) => {
|
Ok(_client) => {
|
||||||
|
// Ensure Docker is also ready as k3d depends on it
|
||||||
|
|
||||||
let msg = format!("k3d cluster '{}' installed ", self.score.cluster_name);
|
let msg = format!("k3d cluster '{}' installed ", self.score.cluster_name);
|
||||||
debug!("{msg}");
|
debug!("{msg}");
|
||||||
Ok(Outcome::success(msg))
|
Ok(Outcome::success(msg))
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ pub mod brocade;
|
|||||||
pub mod cert_manager;
|
pub mod cert_manager;
|
||||||
pub mod dhcp;
|
pub mod dhcp;
|
||||||
pub mod dns;
|
pub mod dns;
|
||||||
|
pub mod docker;
|
||||||
pub mod dummy;
|
pub mod dummy;
|
||||||
pub mod helm;
|
pub mod helm;
|
||||||
pub mod http;
|
pub mod http;
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "k3d-rs"
|
name = "harmony_tools"
|
||||||
|
description = "Install tools such as k3d, docker and more"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
version.workspace = true
|
version.workspace = true
|
||||||
readme.workspace = true
|
readme.workspace = true
|
||||||
@@ -16,6 +17,7 @@ url.workspace = true
|
|||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
futures-util = "0.3.31"
|
futures-util = "0.3.31"
|
||||||
kube.workspace = true
|
kube.workspace = true
|
||||||
|
inquire.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
env_logger = { workspace = true }
|
env_logger = { workspace = true }
|
||||||
326
harmony_tools/src/docker.rs
Normal file
326
harmony_tools/src/docker.rs
Normal file
@@ -0,0 +1,326 @@
|
|||||||
|
use crate::downloadable_asset::DownloadableAsset;
|
||||||
|
use inquire::Select;
|
||||||
|
use log::{debug, error, info, trace, warn};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fmt;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
pub struct Docker {
|
||||||
|
base_dir: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
pub enum DockerVariant {
|
||||||
|
Standard,
|
||||||
|
Rootless,
|
||||||
|
Manual,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for DockerVariant {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
DockerVariant::Standard => write!(f, "Standard Docker (requires sudo)"),
|
||||||
|
DockerVariant::Rootless => write!(f, "Rootless Docker (no sudo required)"),
|
||||||
|
DockerVariant::Manual => {
|
||||||
|
write!(f, "Exit and install manually (Docker or podman-docker)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Docker {
|
||||||
|
pub fn new(base_dir: PathBuf) -> Self {
|
||||||
|
Self { base_dir }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Provides the DOCKER_HOST and DOCKER_SOCK env vars for local usage.
|
||||||
|
///
|
||||||
|
/// If a rootless Docker installation is detected in the user's home directory,
|
||||||
|
/// it returns the appropriate `DOCKER_HOST` pointing to the user's Docker socket.
|
||||||
|
/// Otherwise, it returns an empty HashMap, assuming the standard system-wide
|
||||||
|
/// Docker installation is used.
|
||||||
|
pub fn get_docker_env(&self) -> HashMap<String, String> {
|
||||||
|
let mut env = HashMap::new();
|
||||||
|
|
||||||
|
if let Ok(home) = std::env::var("HOME") {
|
||||||
|
let rootless_sock = PathBuf::from(&home).join(".docker/run/docker.sock");
|
||||||
|
let rootless_bin = PathBuf::from(&home).join("bin/docker");
|
||||||
|
|
||||||
|
if rootless_bin.exists() && rootless_sock.exists() {
|
||||||
|
let docker_host = format!("unix://{}", rootless_sock.display());
|
||||||
|
debug!(
|
||||||
|
"Detected rootless Docker, setting DOCKER_HOST={}",
|
||||||
|
docker_host
|
||||||
|
);
|
||||||
|
env.insert("DOCKER_HOST".to_string(), docker_host);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
env
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the path to the docker binary
|
||||||
|
pub fn get_bin_path(&self) -> PathBuf {
|
||||||
|
// Check standard PATH first
|
||||||
|
if let Ok(path) = std::process::Command::new("which")
|
||||||
|
.arg("docker")
|
||||||
|
.output()
|
||||||
|
.map(|o| PathBuf::from(String::from_utf8_lossy(&o.stdout).trim()))
|
||||||
|
{
|
||||||
|
if path.exists() {
|
||||||
|
debug!("Found Docker in PATH: {:?}", path);
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check common rootless location
|
||||||
|
if let Ok(home) = std::env::var("HOME") {
|
||||||
|
let rootless_path = PathBuf::from(home).join("bin/docker");
|
||||||
|
if rootless_path.exists() {
|
||||||
|
debug!("Found rootless Docker at: {:?}", rootless_path);
|
||||||
|
return rootless_path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Docker not found in PATH or rootless location, using 'docker' from PATH");
|
||||||
|
PathBuf::from("docker")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks if Docker is installed and the daemon is responsive.
|
||||||
|
pub fn is_installed(&self) -> bool {
|
||||||
|
trace!("Checking if Docker is installed and responsive");
|
||||||
|
|
||||||
|
self.command()
|
||||||
|
.arg("info")
|
||||||
|
.output()
|
||||||
|
.map(|output| {
|
||||||
|
if output.status.success() {
|
||||||
|
trace!("Docker daemon is responsive");
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
trace!(
|
||||||
|
"Docker daemon check failed with status: {:?}",
|
||||||
|
output.status
|
||||||
|
);
|
||||||
|
false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map_err(|e| {
|
||||||
|
trace!("Failed to execute Docker daemon check: {}", e);
|
||||||
|
e
|
||||||
|
})
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prompts the user to choose an installation method
|
||||||
|
fn prompt_for_installation(&self) -> DockerVariant {
|
||||||
|
let options = vec![
|
||||||
|
DockerVariant::Standard,
|
||||||
|
DockerVariant::Rootless,
|
||||||
|
DockerVariant::Manual,
|
||||||
|
];
|
||||||
|
|
||||||
|
Select::new(
|
||||||
|
"Docker binary was not found. How would you like to proceed?",
|
||||||
|
options,
|
||||||
|
)
|
||||||
|
.with_help_message("Standard requires sudo. Rootless runs in user space.")
|
||||||
|
.prompt()
|
||||||
|
.unwrap_or(DockerVariant::Manual)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Installs docker using the official shell script
|
||||||
|
pub async fn install(&self, variant: DockerVariant) -> Result<(), String> {
|
||||||
|
let (script_url, script_name, use_sudo) = match variant {
|
||||||
|
DockerVariant::Standard => ("https://get.docker.com", "get-docker.sh", true),
|
||||||
|
DockerVariant::Rootless => (
|
||||||
|
"https://get.docker.com/rootless",
|
||||||
|
"get-docker-rootless.sh",
|
||||||
|
false,
|
||||||
|
),
|
||||||
|
DockerVariant::Manual => return Err("Manual installation selected".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Installing {}...", variant);
|
||||||
|
debug!("Downloading installation script from: {}", script_url);
|
||||||
|
|
||||||
|
// Download the installation script
|
||||||
|
let asset = DownloadableAsset {
|
||||||
|
url: Url::parse(script_url).map_err(|e| {
|
||||||
|
error!("Failed to parse installation script URL: {}", e);
|
||||||
|
format!("Failed to parse installation script URL: {}", e)
|
||||||
|
})?,
|
||||||
|
file_name: script_name.to_string(),
|
||||||
|
checksum: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let downloaded_script = asset
|
||||||
|
.download_to_path(self.base_dir.join("scripts"))
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
error!("Failed to download installation script: {}", e);
|
||||||
|
format!("Failed to download installation script: {}", e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
debug!("Installation script downloaded to: {:?}", downloaded_script);
|
||||||
|
|
||||||
|
// Execute the installation script
|
||||||
|
let mut cmd = std::process::Command::new("sh");
|
||||||
|
if use_sudo {
|
||||||
|
cmd.arg("sudo").arg("sh");
|
||||||
|
}
|
||||||
|
cmd.arg(&downloaded_script);
|
||||||
|
|
||||||
|
debug!("Executing installation command: {:?}", cmd);
|
||||||
|
|
||||||
|
let status = cmd.status().map_err(|e| {
|
||||||
|
error!("Failed to execute docker installation script: {}", e);
|
||||||
|
format!("Failed to execute docker installation script: {}", e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if status.success() {
|
||||||
|
info!("{} installed successfully", variant);
|
||||||
|
if variant == DockerVariant::Rootless {
|
||||||
|
info!("Running rootless setup tool to install dependencies and start service...");
|
||||||
|
let mut setup_cmd = std::process::Command::new("sh");
|
||||||
|
|
||||||
|
// Set PATH to include ~/bin where the script was likely installed
|
||||||
|
if let Ok(home) = std::env::var("HOME") {
|
||||||
|
let bin_path = format!("{}/bin", home);
|
||||||
|
if let Ok(current_path) = std::env::var("PATH") {
|
||||||
|
setup_cmd.env("PATH", format!("{}:{}", bin_path, current_path));
|
||||||
|
}
|
||||||
|
setup_cmd.arg(format!("{}/bin/dockerd-rootless-setuptool.sh", home));
|
||||||
|
} else {
|
||||||
|
setup_cmd.arg("dockerd-rootless-setuptool.sh");
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_cmd.arg("install");
|
||||||
|
|
||||||
|
debug!("Executing rootless setup command: {:?}", setup_cmd);
|
||||||
|
let setup_status = setup_cmd.status().map_err(|e| {
|
||||||
|
error!("Failed to execute rootless setup tool: {}", e);
|
||||||
|
format!("Failed to execute rootless setup tool: {}", e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if !setup_status.success() {
|
||||||
|
warn!("Rootless setup tool finished with non-zero exit code. You may need to install 'uidmap' or start the service manually.");
|
||||||
|
}
|
||||||
|
|
||||||
|
warn!("Please follow the instructions above to finish rootless setup (environment variables).");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the installation by running hello-world
|
||||||
|
self.validate_installation()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
error!(
|
||||||
|
"{} installation script failed with exit code: {:?} \n\nOutput:\n{:?}",
|
||||||
|
variant,
|
||||||
|
status.code(),
|
||||||
|
cmd.output(),
|
||||||
|
);
|
||||||
|
Err(format!("{} installation script failed", variant))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validates the Docker installation by running a test container.
|
||||||
|
///
|
||||||
|
/// This method runs `docker run --rm hello-world` to verify that Docker
|
||||||
|
/// is properly installed and functional.
|
||||||
|
fn validate_installation(&self) -> Result<(), String> {
|
||||||
|
info!("Validating Docker installation by running hello-world container...");
|
||||||
|
|
||||||
|
let output = self
|
||||||
|
.command()
|
||||||
|
.args(["run", "--rm", "hello-world"])
|
||||||
|
.output()
|
||||||
|
.map_err(|e| {
|
||||||
|
error!("Failed to execute hello-world validation: {}", e);
|
||||||
|
format!("Failed to execute hello-world validation: {}", e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if output.status.success() {
|
||||||
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
|
if stdout.contains("Hello from Docker!") {
|
||||||
|
info!("Docker installation validated successfully");
|
||||||
|
trace!("Validation output: {}", stdout);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
warn!("Hello-world container ran but expected output not found");
|
||||||
|
debug!("Output was: {}", stdout);
|
||||||
|
Err("Docker validation failed: unexpected output from hello-world".to_string())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||||
|
error!(
|
||||||
|
"Hello-world validation failed with exit code: {:?}",
|
||||||
|
output.status.code()
|
||||||
|
);
|
||||||
|
debug!("Validation stderr: {}", stderr);
|
||||||
|
if !stderr.is_empty() {
|
||||||
|
Err(format!("Docker validation failed: {}", stderr.trim()))
|
||||||
|
} else {
|
||||||
|
Err(
|
||||||
|
"Docker validation failed: hello-world container did not run successfully"
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensures docker is installed, prompting if necessary
|
||||||
|
pub async fn ensure_installed(&self) -> Result<(), String> {
|
||||||
|
if self.is_installed() {
|
||||||
|
debug!("Docker is already installed at: {:?}", self.get_bin_path());
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Docker is not installed, prompting for installation method");
|
||||||
|
match self.prompt_for_installation() {
|
||||||
|
DockerVariant::Manual => {
|
||||||
|
info!("User chose manual installation");
|
||||||
|
Err("Docker installation cancelled by user. Please install docker or podman-docker manually.".to_string())
|
||||||
|
}
|
||||||
|
variant => self.install(variant).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a pre-configured Command for running Docker commands.
|
||||||
|
///
|
||||||
|
/// The returned Command is set up with:
|
||||||
|
/// - The correct Docker binary path (handles rootless installations)
|
||||||
|
/// - Appropriate environment variables (e.g., DOCKER_HOST for rootless)
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```no_run
|
||||||
|
/// # use harmony_tools::Docker;
|
||||||
|
/// # use std::path::PathBuf;
|
||||||
|
/// # let docker = Docker::new(PathBuf::from("."));
|
||||||
|
/// let mut cmd = docker.command();
|
||||||
|
/// cmd.args(["ps", "-a"]);
|
||||||
|
/// // Now cmd is ready to be executed
|
||||||
|
/// ```
|
||||||
|
pub fn command(&self) -> std::process::Command {
|
||||||
|
let bin_path = self.get_bin_path();
|
||||||
|
trace!("Creating Docker command with binary: {:?}", bin_path);
|
||||||
|
|
||||||
|
let mut cmd = std::process::Command::new(&bin_path);
|
||||||
|
|
||||||
|
// Add Docker-specific environment variables
|
||||||
|
let env = self.get_docker_env();
|
||||||
|
if !env.is_empty() {
|
||||||
|
trace!("Setting Docker environment variables: {:?}", env);
|
||||||
|
for (key, value) in env {
|
||||||
|
cmd.env(key, value);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
trace!("No Docker-specific environment variables to set");
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -39,11 +39,20 @@ const CHECKSUM_FAILED_MSG: &str = "Downloaded file failed checksum verification"
|
|||||||
pub(crate) struct DownloadableAsset {
|
pub(crate) struct DownloadableAsset {
|
||||||
pub(crate) url: Url,
|
pub(crate) url: Url,
|
||||||
pub(crate) file_name: String,
|
pub(crate) file_name: String,
|
||||||
pub(crate) checksum: String,
|
pub(crate) checksum: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DownloadableAsset {
|
impl DownloadableAsset {
|
||||||
fn verify_checksum(&self, file: PathBuf) -> bool {
|
fn verify_checksum(&self, file: PathBuf) -> bool {
|
||||||
|
// Skip verification if no checksum is provided
|
||||||
|
let expected_checksum = match &self.checksum {
|
||||||
|
Some(checksum) => checksum,
|
||||||
|
None => {
|
||||||
|
debug!("No checksum provided, skipping verification");
|
||||||
|
return file.exists();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if !file.exists() {
|
if !file.exists() {
|
||||||
debug!("File does not exist: {:?}", file);
|
debug!("File does not exist: {:?}", file);
|
||||||
return false;
|
return false;
|
||||||
@@ -76,10 +85,10 @@ impl DownloadableAsset {
|
|||||||
let result = hasher.finalize();
|
let result = hasher.finalize();
|
||||||
let calculated_hash = format!("{:x}", result);
|
let calculated_hash = format!("{:x}", result);
|
||||||
|
|
||||||
debug!("Expected checksum: {}", self.checksum);
|
debug!("Expected checksum: {}", expected_checksum);
|
||||||
debug!("Calculated checksum: {}", calculated_hash);
|
debug!("Calculated checksum: {}", calculated_hash);
|
||||||
|
|
||||||
calculated_hash == self.checksum
|
calculated_hash == *expected_checksum
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Downloads the asset to the specified directory, verifying its checksum.
|
/// Downloads the asset to the specified directory, verifying its checksum.
|
||||||
@@ -151,7 +160,8 @@ impl DownloadableAsset {
|
|||||||
file.flush().await.expect("Failed to flush file");
|
file.flush().await.expect("Failed to flush file");
|
||||||
drop(file);
|
drop(file);
|
||||||
|
|
||||||
if !self.verify_checksum(target_file_path.clone()) {
|
// Only verify checksum if one was provided
|
||||||
|
if self.checksum.is_some() && !self.verify_checksum(target_file_path.clone()) {
|
||||||
return Err(CHECKSUM_FAILED_MSG.to_string());
|
return Err(CHECKSUM_FAILED_MSG.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -202,7 +212,7 @@ mod tests {
|
|||||||
let asset = DownloadableAsset {
|
let asset = DownloadableAsset {
|
||||||
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
||||||
file_name: "test.txt".to_string(),
|
file_name: "test.txt".to_string(),
|
||||||
checksum: TEST_CONTENT_HASH.to_string(),
|
checksum: Some(TEST_CONTENT_HASH.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = asset
|
let result = asset
|
||||||
@@ -226,7 +236,7 @@ mod tests {
|
|||||||
let asset = DownloadableAsset {
|
let asset = DownloadableAsset {
|
||||||
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
||||||
file_name: "test.txt".to_string(),
|
file_name: "test.txt".to_string(),
|
||||||
checksum: TEST_CONTENT_HASH.to_string(),
|
checksum: Some(TEST_CONTENT_HASH.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let target_file_path = folder.join(&asset.file_name);
|
let target_file_path = folder.join(&asset.file_name);
|
||||||
@@ -248,7 +258,7 @@ mod tests {
|
|||||||
let asset = DownloadableAsset {
|
let asset = DownloadableAsset {
|
||||||
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
||||||
file_name: "test.txt".to_string(),
|
file_name: "test.txt".to_string(),
|
||||||
checksum: TEST_CONTENT_HASH.to_string(),
|
checksum: Some(TEST_CONTENT_HASH.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = asset.download_to_path(folder.join("error")).await;
|
let result = asset.download_to_path(folder.join("error")).await;
|
||||||
@@ -269,7 +279,7 @@ mod tests {
|
|||||||
let asset = DownloadableAsset {
|
let asset = DownloadableAsset {
|
||||||
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
||||||
file_name: "test.txt".to_string(),
|
file_name: "test.txt".to_string(),
|
||||||
checksum: TEST_CONTENT_HASH.to_string(),
|
checksum: Some(TEST_CONTENT_HASH.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let join_handle =
|
let join_handle =
|
||||||
@@ -293,11 +303,58 @@ mod tests {
|
|||||||
let asset = DownloadableAsset {
|
let asset = DownloadableAsset {
|
||||||
url: Url::parse(&server.url("/specific/path.txt").to_string()).unwrap(),
|
url: Url::parse(&server.url("/specific/path.txt").to_string()).unwrap(),
|
||||||
file_name: "path.txt".to_string(),
|
file_name: "path.txt".to_string(),
|
||||||
checksum: TEST_CONTENT_HASH.to_string(),
|
checksum: Some(TEST_CONTENT_HASH.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = asset.download_to_path(folder).await.unwrap();
|
let result = asset.download_to_path(folder).await.unwrap();
|
||||||
let downloaded_content = std::fs::read_to_string(result).unwrap();
|
let downloaded_content = std::fs::read_to_string(result).unwrap();
|
||||||
assert_eq!(downloaded_content, TEST_CONTENT);
|
assert_eq!(downloaded_content, TEST_CONTENT);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_download_without_checksum() {
|
||||||
|
let (folder, server) = setup_test();
|
||||||
|
|
||||||
|
server.expect(
|
||||||
|
Expectation::matching(matchers::any())
|
||||||
|
.respond_with(responders::status_code(200).body(TEST_CONTENT)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let asset = DownloadableAsset {
|
||||||
|
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
||||||
|
file_name: "test.txt".to_string(),
|
||||||
|
checksum: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = asset
|
||||||
|
.download_to_path(folder.join("no_checksum"))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let downloaded_content = std::fs::read_to_string(result).unwrap();
|
||||||
|
assert_eq!(downloaded_content, TEST_CONTENT);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_download_without_checksum_already_exists() {
|
||||||
|
let (folder, server) = setup_test();
|
||||||
|
|
||||||
|
server.expect(
|
||||||
|
Expectation::matching(matchers::any())
|
||||||
|
.times(0)
|
||||||
|
.respond_with(responders::status_code(200).body(TEST_CONTENT)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let asset = DownloadableAsset {
|
||||||
|
url: Url::parse(&server.url("/test.txt").to_string()).unwrap(),
|
||||||
|
file_name: "test.txt".to_string(),
|
||||||
|
checksum: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let target_file_path = folder.join(&asset.file_name);
|
||||||
|
std::fs::write(&target_file_path, TEST_CONTENT).unwrap();
|
||||||
|
|
||||||
|
let result = asset.download_to_path(folder).await.unwrap();
|
||||||
|
let content = std::fs::read_to_string(result).unwrap();
|
||||||
|
assert_eq!(content, TEST_CONTENT);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
mod downloadable_asset;
|
|
||||||
use downloadable_asset::*;
|
|
||||||
|
|
||||||
use kube::Client;
|
use kube::Client;
|
||||||
use log::{debug, info};
|
use log::{debug, info};
|
||||||
use std::{ffi::OsStr, path::PathBuf};
|
use std::{ffi::OsStr, path::PathBuf};
|
||||||
|
|
||||||
|
use crate::downloadable_asset::DownloadableAsset;
|
||||||
|
|
||||||
const K3D_BIN_FILE_NAME: &str = "k3d";
|
const K3D_BIN_FILE_NAME: &str = "k3d";
|
||||||
|
|
||||||
pub struct K3d {
|
pub struct K3d {
|
||||||
@@ -78,6 +77,7 @@ impl K3d {
|
|||||||
|
|
||||||
debug!("Found binary at {} with checksum {}", binary_url, checksum);
|
debug!("Found binary at {} with checksum {}", binary_url, checksum);
|
||||||
|
|
||||||
|
let checksum = Some(checksum);
|
||||||
DownloadableAsset {
|
DownloadableAsset {
|
||||||
url: binary_url,
|
url: binary_url,
|
||||||
file_name: K3D_BIN_FILE_NAME.to_string(),
|
file_name: K3D_BIN_FILE_NAME.to_string(),
|
||||||
@@ -399,7 +399,7 @@ mod test {
|
|||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::{K3d, K3D_BIN_FILE_NAME};
|
use crate::{k3d::K3D_BIN_FILE_NAME, K3d};
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn k3d_latest_release_should_get_latest() {
|
async fn k3d_latest_release_should_get_latest() {
|
||||||
6
harmony_tools/src/lib.rs
Normal file
6
harmony_tools/src/lib.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
mod docker;
|
||||||
|
mod downloadable_asset;
|
||||||
|
mod k3d;
|
||||||
|
pub use docker::*;
|
||||||
|
use downloadable_asset::*;
|
||||||
|
pub use k3d::*;
|
||||||
Reference in New Issue
Block a user