diff --git a/examples/monitoring/src/main.rs b/examples/monitoring/src/main.rs index d3eb3b8..c0fcf33 100644 --- a/examples/monitoring/src/main.rs +++ b/examples/monitoring/src/main.rs @@ -12,7 +12,7 @@ use harmony::{ async fn main() { let discord_receiver = DiscordWebhook { name: "test-discord".to_string(), - url: Url::Url(url::Url::parse("https://discord.i.dont.exist.com").unwrap()), + url: Url::Url(url::Url::parse("discord.doesnt.exist.com").unwrap()), }; let alerting_score = HelmPrometheusAlertingScore { receivers: vec![Box::new(discord_receiver)], diff --git a/harmony/src/domain/topology/installable.rs b/harmony/src/domain/topology/installable.rs index 0092065..8d8178c 100644 --- a/harmony/src/domain/topology/installable.rs +++ b/harmony/src/domain/topology/installable.rs @@ -1,10 +1,12 @@ use async_trait::async_trait; -use crate::interpret::InterpretError; - -use super::oberservability::monitoring::AlertSender; +use crate::{interpret::InterpretError, inventory::Inventory}; #[async_trait] -pub trait Installable { - async fn ensure_installed(&self) -> Result<(), InterpretError>; +pub trait Installable: Send + Sync { + async fn ensure_installed( + &self, + inventory: &Inventory, + topology: &T, + ) -> Result<(), InterpretError>; } diff --git a/harmony/src/domain/topology/oberservability/monitoring.rs b/harmony/src/domain/topology/oberservability/monitoring.rs index c4d19d5..c66cfc2 100644 --- a/harmony/src/domain/topology/oberservability/monitoring.rs +++ b/harmony/src/domain/topology/oberservability/monitoring.rs @@ -4,18 +4,12 @@ use crate::{ data::{Id, Version}, interpret::{Interpret, InterpretError, InterpretName, InterpretStatus, Outcome}, inventory::Inventory, - modules::monitoring::kube_prometheus::prometheus::Installer, topology::{HelmCommand, Topology, installable::Installable}, }; #[async_trait] -pub trait AlertSender: Send + Sync + std::fmt::Debug + Installable { +pub trait AlertSender: Send + Sync + std::fmt::Debug { fn name(&self) -> String; - async fn install( - &self, - inventory: &Inventory, - topology: &T, - ) -> Result; } #[derive(Debug)] @@ -25,7 +19,7 @@ pub struct AlertingInterpret { } #[async_trait] -impl Interpret for AlertingInterpret { +impl, T: Topology + HelmCommand> Interpret for AlertingInterpret { async fn execute( &self, inventory: &Inventory, @@ -34,7 +28,7 @@ impl Interpret for AlertingInterpr for receiver in self.receivers.iter() { receiver.install(&self.sender).await?; } - self.sender.ensure_installed().await?; + self.sender.ensure_installed(inventory, topology).await?; Ok(Outcome::success(format!( "successfully installed alert sender {}", self.sender.name() diff --git a/harmony/src/modules/monitoring/alert_channel/discord_alert_channel.rs b/harmony/src/modules/monitoring/alert_channel/discord_alert_channel.rs index 8fa7523..05e313e 100644 --- a/harmony/src/modules/monitoring/alert_channel/discord_alert_channel.rs +++ b/harmony/src/modules/monitoring/alert_channel/discord_alert_channel.rs @@ -5,7 +5,7 @@ use crate::{ interpret::{InterpretError, Outcome}, modules::monitoring::kube_prometheus::{ prometheus::{Prometheus, PrometheusReceiver}, - types::AlertChannelConfig, + types::{AlertChannelConfig, AlertManagerChannelConfig}, }, topology::{Url, oberservability::monitoring::AlertReceiver}, }; @@ -28,20 +28,30 @@ impl AlertReceiver for DiscordWebhook { #[async_trait] impl PrometheusReceiver for DiscordWebhook { - //TODO not return a tuple fn name(&self) -> String { self.name.clone() } - async fn receiver_config(&self) -> Value { - self.alert_channel_receiver().await + async fn configure_receiver(&self) -> AlertManagerChannelConfig { + self.get_config().await } } #[async_trait] impl AlertChannelConfig for DiscordWebhook { - async fn alert_channel_global_config(&self) -> Option { - None + async fn get_config(&self) -> AlertManagerChannelConfig { + let channel_global_config = None; + let channel_receiver = self.alert_channel_receiver().await; + let channel_route = self.alert_channel_route().await; + + AlertManagerChannelConfig { + channel_global_config, + channel_receiver, + channel_route, + } } +} + +impl DiscordWebhook { async fn alert_channel_route(&self) -> serde_yaml::Value { let mut route = Mapping::new(); diff --git a/harmony/src/modules/monitoring/kube_prometheus/helm/config.rs b/harmony/src/modules/monitoring/kube_prometheus/helm/config.rs index 39d449f..631d9d8 100644 --- a/harmony/src/modules/monitoring/kube_prometheus/helm/config.rs +++ b/harmony/src/modules/monitoring/kube_prometheus/helm/config.rs @@ -1,7 +1,7 @@ use serde::Serialize; use serde_yaml::Value; -use crate::modules::monitoring::kube_prometheus::prometheus::PrometheusReceiver; +use crate::modules::monitoring::kube_prometheus::{prometheus::PrometheusReceiver, types::AlertManagerChannelConfig}; #[derive(Debug, Clone, Serialize)] pub struct KubePrometheusConfig { @@ -22,7 +22,7 @@ pub struct KubePrometheusConfig { pub kube_proxy: bool, pub kube_state_metrics: bool, pub prometheus_operator: bool, - pub alert_receiver_configs: Vec, + pub alert_receiver_configs: Vec, } impl KubePrometheusConfig { pub fn new() -> Self { diff --git a/harmony/src/modules/monitoring/kube_prometheus/helm/kube_prometheus_helm_chart.rs b/harmony/src/modules/monitoring/kube_prometheus/helm/kube_prometheus_helm_chart.rs index 5f2baaf..8420fca 100644 --- a/harmony/src/modules/monitoring/kube_prometheus/helm/kube_prometheus_helm_chart.rs +++ b/harmony/src/modules/monitoring/kube_prometheus/helm/kube_prometheus_helm_chart.rs @@ -1,11 +1,19 @@ use super::config::KubePrometheusConfig; +use log::debug; use non_blank_string_rs::NonBlankString; +use serde_yaml::{Mapping, Value}; use std::{ str::FromStr, sync::{Arc, Mutex}, }; -use crate::modules::helm::chart::HelmChartScore; +use crate::modules::{ + helm::chart::HelmChartScore, + monitoring::kube_prometheus::types::{ + AlertManager, AlertManagerChannelConfig, AlertManagerConfig, AlertManagerRoute, + AlertManagerValues, + }, +}; pub fn kube_prometheus_helm_chart_score( config: Arc>, @@ -15,11 +23,11 @@ pub fn kube_prometheus_helm_chart_score( //to the overrides or something leaving the user to deal with formatting here seems bad let default_rules = config.default_rules.to_string(); let windows_monitoring = config.windows_monitoring.to_string(); - let alert_manager = config.alert_manager.to_string(); let grafana = config.grafana.to_string(); let kubernetes_service_monitors = config.kubernetes_service_monitors.to_string(); let kubernetes_api_server = config.kubernetes_api_server.to_string(); let kubelet = config.kubelet.to_string(); + let alert_manager = config.alert_manager.to_string(); let kube_controller_manager = config.kube_controller_manager.to_string(); let core_dns = config.core_dns.to_string(); let kube_etcd = config.kube_etcd.to_string(); @@ -29,7 +37,7 @@ pub fn kube_prometheus_helm_chart_score( let node_exporter = config.node_exporter.to_string(); let prometheus_operator = config.prometheus_operator.to_string(); let prometheus = config.prometheus.to_string(); - let values = format!( + let mut values = format!( r#" additionalPrometheusRulesMap: pods-status-alerts: @@ -148,6 +156,54 @@ prometheus: enabled: {prometheus} "#, ); + + + let mut null_receiver = Mapping::new(); + null_receiver.insert( + Value::String("receiver".to_string()), + Value::String("null".to_string()), + ); + null_receiver.insert( + Value::String("matchers".to_string()), + Value::Sequence(vec![Value::String("alertname!=Watchdog".to_string())]), + ); + null_receiver.insert(Value::String("continue".to_string()), Value::Bool(true)); + + + let mut alert_manager_channel_config = AlertManagerConfig { + global: Mapping::new(), + route: AlertManagerRoute { + routes: vec![Value::Mapping(null_receiver)], + }, + receivers: vec![serde_yaml::from_str("name: 'null'").unwrap()], + }; + for receiver in config.alert_receiver_configs.iter() { + if let Some(global) = receiver.channel_global_config.clone() { + alert_manager_channel_config + .global + .insert(global.0, global.1); + } + alert_manager_channel_config + .route + .routes + .push(receiver.channel_route.clone()); + alert_manager_channel_config + .receivers + .push(receiver.channel_receiver.clone()); + } + + let alert_manager_values = AlertManagerValues { + alertmanager: AlertManager { + enabled: config.alert_manager, + config: alert_manager_channel_config, + }, + }; + + let alert_manager_yaml = + serde_yaml::to_string(&alert_manager_values).expect("Failed to serialize YAML"); + debug!("serialed alert manager: \n {:#}", alert_manager_yaml); + values.push_str(&alert_manager_yaml); + debug!("full values.yaml: \n {:#}", values); HelmChartScore { namespace: Some(NonBlankString::from_str(&config.namespace).unwrap()), release_name: NonBlankString::from_str("kube-prometheus").unwrap(), diff --git a/harmony/src/modules/monitoring/kube_prometheus/prometheus.rs b/harmony/src/modules/monitoring/kube_prometheus/prometheus.rs index 68e7d46..f4a81d8 100644 --- a/harmony/src/modules/monitoring/kube_prometheus/prometheus.rs +++ b/harmony/src/modules/monitoring/kube_prometheus/prometheus.rs @@ -16,54 +16,46 @@ use crate::{ use score::Score; -use super::helm::{ +use super::{helm::{ config::KubePrometheusConfig, kube_prometheus_helm_chart::kube_prometheus_helm_chart_score, -}; +}, types::AlertManagerChannelConfig}; #[async_trait] impl AlertSender for Prometheus { fn name(&self) -> String { "HelmKubePrometheus".to_string() } - async fn install( - &self, - inventory: &Inventory, - topology: &T, - ) -> Result { - let _ = self.install_prometheus(inventory, topology).await; - todo!() - } -} - -//im not totally sure what to do in the impl installable -//should we have a oncecell that checks insured is true? - -#[async_trait] -impl Installable for Prometheus { - async fn ensure_installed(&self) -> Result<(), InterpretError> { - todo!() - } -} - -//before we talked about having a trait installable and a trait installer for the topology -// i feel like that might still be necessary to meet the requirement of inventory and topology on -// the score.create_interpret().execute(inventory, topology) method -#[async_trait] -pub trait Installer { - async fn install(&self, sender: I) -> Result<(), InterpretError>; } #[async_trait] -impl Installer for K8sAnywhereTopology { - async fn install( - &self, - installable: I, - ) -> Result<(), InterpretError> { - installable.ensure_installed().await?; +impl Installable for Prometheus { + async fn ensure_installed(&self, inventory: &Inventory, topology: &T) -> Result<(), InterpretError> { + //install_prometheus + self.install_prometheus(inventory, topology).await?; Ok(()) } } +// //before we talked about having a trait installable and a trait installer for the topology +// // i feel like that might still be necessary to meet the requirement of inventory and topology on +// // the score.create_interpret().execute(inventory, topology) method +// #[async_trait] +// pub trait Installer { +// async fn install(&self, inventory: &Inventory, sender: Box) -> Result<(), InterpretError>; +// } +// +// #[async_trait] +// impl Installer for K8sAnywhereTopology { +// async fn install( +// &self, +// inventory: &Inventory, +// installable: Box>, +// ) -> Result<(), InterpretError> { +// installable.ensure_installed(inventory, self).await?; +// Ok(()) +// } +// } + #[derive(Debug)] pub struct Prometheus { pub config: Arc>, @@ -74,7 +66,7 @@ impl Prometheus { &self, prometheus_receiver: &dyn PrometheusReceiver, ) -> Result { - let prom_receiver = prometheus_receiver.receiver_config().await; + let prom_receiver = prometheus_receiver.configure_receiver().await; debug!( "adding alert receiver to prometheus config: {:#?}", &prom_receiver @@ -105,7 +97,7 @@ impl Prometheus { #[async_trait] pub trait PrometheusReceiver: Send + Sync + std::fmt::Debug { fn name(&self) -> String; - async fn receiver_config(&self) -> Value; + async fn configure_receiver(&self) -> AlertManagerChannelConfig; //this probably needs to be a type //that //represents diff --git a/harmony/src/modules/monitoring/kube_prometheus/types.rs b/harmony/src/modules/monitoring/kube_prometheus/types.rs index d6f28d2..f237bba 100644 --- a/harmony/src/modules/monitoring/kube_prometheus/types.rs +++ b/harmony/src/modules/monitoring/kube_prometheus/types.rs @@ -1,9 +1,40 @@ use async_trait::async_trait; -use serde_yaml::Value; +use serde::Serialize; +use serde_yaml::{Mapping, Sequence, Value}; #[async_trait] pub trait AlertChannelConfig { - async fn alert_channel_global_config(&self) -> Option; - async fn alert_channel_route(&self) -> Value; - async fn alert_channel_receiver(&self) -> Value; + async fn get_config(&self) -> AlertManagerChannelConfig; +} + +#[derive(Debug, Clone, Serialize)] +pub struct AlertManagerValues { + pub alertmanager: AlertManager, +} +#[derive(Debug, Clone, Serialize)] +pub struct AlertManager { + pub enabled: bool, + pub config: AlertManagerConfig, +} + +#[derive(Debug, Clone, Serialize)] +pub struct AlertManagerConfig { + pub global: Mapping, + pub route: AlertManagerRoute, + pub receivers: Sequence, +} + +#[derive(Debug, Clone, Serialize)] +pub struct AlertManagerRoute { + pub routes: Sequence, +} + +#[derive(Debug, Clone, Serialize)] +pub struct AlertManagerChannelConfig { + ///expecting an option that contains two values + ///if necessary for the alertchannel + ///[ jira_api_url: ] + pub channel_global_config: Option<(Value, Value)>, + pub channel_route: Value, + pub channel_receiver: Value, }