wip: using the name from tenant config as deployment namespace for kubeprometheus deployment or defaulting to monitoring if no tenant config exists

This commit is contained in:
Willem 2025-06-26 16:24:19 -04:00
parent 29e74a2712
commit 8e857bc72a
9 changed files with 47 additions and 14 deletions

View File

@ -4,6 +4,8 @@ use crate::{interpret::InterpretError, inventory::Inventory};
#[async_trait] #[async_trait]
pub trait Installable<T>: Send + Sync { pub trait Installable<T>: Send + Sync {
fn configure(&self, inventory: &Inventory, topology: &T) -> Result<(), InterpretError>;
async fn ensure_installed( async fn ensure_installed(
&self, &self,
inventory: &Inventory, inventory: &Inventory,

View File

@ -35,6 +35,7 @@ enum K8sSource {
pub struct K8sAnywhereTopology { pub struct K8sAnywhereTopology {
k8s_state: OnceCell<Option<K8sState>>, k8s_state: OnceCell<Option<K8sState>>,
tenant_manager: OnceCell<K8sTenantManager>, tenant_manager: OnceCell<K8sTenantManager>,
tenant_manager_config: OnceCell<TenantConfig>,
config: K8sAnywhereConfig, config: K8sAnywhereConfig,
} }
@ -60,6 +61,7 @@ impl K8sAnywhereTopology {
Self { Self {
k8s_state: OnceCell::new(), k8s_state: OnceCell::new(),
tenant_manager: OnceCell::new(), tenant_manager: OnceCell::new(),
tenant_manager_config: OnceCell::new(),
config: K8sAnywhereConfig::from_env(), config: K8sAnywhereConfig::from_env(),
} }
} }
@ -68,6 +70,7 @@ impl K8sAnywhereTopology {
Self { Self {
k8s_state: OnceCell::new(), k8s_state: OnceCell::new(),
tenant_manager: OnceCell::new(), tenant_manager: OnceCell::new(),
tenant_manager_config: OnceCell::new(),
config, config,
} }
} }
@ -182,7 +185,7 @@ impl K8sAnywhereTopology {
self.tenant_manager self.tenant_manager
.get_or_try_init(async || -> Result<K8sTenantManager, String> { .get_or_try_init(async || -> Result<K8sTenantManager, String> {
let k8s_client = self.k8s_client().await?; let k8s_client = self.k8s_client().await?;
Ok(K8sTenantManager::new(k8s_client)) Ok(K8sTenantManager::new(k8s_client, TenantConfig::default()))
}) })
.await .await
.unwrap(); .unwrap();
@ -272,4 +275,8 @@ impl TenantManager for K8sAnywhereTopology {
.provision_tenant(config) .provision_tenant(config)
.await .await
} }
fn get_tenant_config(&self) -> Option<TenantConfig> {
self.tenant_manager_config.get().cloned()
}
} }

View File

@ -27,6 +27,7 @@ impl<S: AlertSender + Installable<T>, T: Topology> Interpret<T> for AlertingInte
inventory: &Inventory, inventory: &Inventory,
topology: &T, topology: &T,
) -> Result<Outcome, InterpretError> { ) -> Result<Outcome, InterpretError> {
self.sender.configure(inventory, topology)?;
for receiver in self.receivers.iter() { for receiver in self.receivers.iter() {
receiver.install(&self.sender).await?; receiver.install(&self.sender).await?;
} }

View File

@ -25,6 +25,7 @@ use super::{TenantConfig, TenantManager};
#[derive(new)] #[derive(new)]
pub struct K8sTenantManager { pub struct K8sTenantManager {
k8s_client: Arc<K8sClient>, k8s_client: Arc<K8sClient>,
k8s_tenant_config: TenantConfig,
} }
impl K8sTenantManager { impl K8sTenantManager {
@ -324,4 +325,7 @@ impl TenantManager for K8sTenantManager {
); );
Ok(()) Ok(())
} }
fn get_tenant_config(&self) -> Option<TenantConfig> {
Some(self.k8s_tenant_config.clone())
}
} }

View File

@ -15,4 +15,6 @@ pub trait TenantManager {
/// # Arguments /// # Arguments
/// * `config`: The desired configuration for the new tenant. /// * `config`: The desired configuration for the new tenant.
async fn provision_tenant(&self, config: &TenantConfig) -> Result<(), ExecutorError>; async fn provision_tenant(&self, config: &TenantConfig) -> Result<(), ExecutorError>;
fn get_tenant_config(&self) -> Option<TenantConfig>;
} }

View File

@ -1,13 +1,12 @@
use serde::Serialize; use serde::Serialize;
use crate::modules::monitoring::{ use crate::modules::monitoring::
alert_rule::prometheus_alert_rule::AlertManagerRuleGroup, kube_prometheus::types::{AlertManagerAdditionalPromRules, AlertManagerChannelConfig}
kube_prometheus::types::{AlertManagerAdditionalPromRules, AlertManagerChannelConfig}, ;
};
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
pub struct KubePrometheusConfig { pub struct KubePrometheusConfig {
pub namespace: String, pub namespace: Option<String>,
pub default_rules: bool, pub default_rules: bool,
pub windows_monitoring: bool, pub windows_monitoring: bool,
pub alert_manager: bool, pub alert_manager: bool,
@ -30,7 +29,7 @@ pub struct KubePrometheusConfig {
impl KubePrometheusConfig { impl KubePrometheusConfig {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
namespace: "monitoring".into(), namespace: None,
default_rules: true, default_rules: true,
windows_monitoring: false, windows_monitoring: false,
alert_manager: true, alert_manager: true,

View File

@ -184,8 +184,9 @@ prometheus:
values.push_str(&alert_manager_additional_rules_yaml); values.push_str(&alert_manager_additional_rules_yaml);
debug!("full values.yaml: \n {:#}", values); debug!("full values.yaml: \n {:#}", values);
HelmChartScore { HelmChartScore {
namespace: Some(NonBlankString::from_str(&config.namespace).unwrap()), namespace: Some(NonBlankString::from_str(&config.namespace.clone().unwrap()).unwrap()),
release_name: NonBlankString::from_str("kube-prometheus").unwrap(), release_name: NonBlankString::from_str("kube-prometheus").unwrap(),
chart_name: NonBlankString::from_str( chart_name: NonBlankString::from_str(
"oci://ghcr.io/prometheus-community/charts/kube-prometheus-stack", "oci://ghcr.io/prometheus-community/charts/kube-prometheus-stack",

View File

@ -8,6 +8,7 @@ use crate::{
topology::{ topology::{
HelmCommand, Topology, HelmCommand, Topology,
oberservability::monitoring::{AlertReceiver, AlertRule, AlertingInterpret}, oberservability::monitoring::{AlertReceiver, AlertRule, AlertingInterpret},
tenant::TenantManager,
}, },
}; };
@ -17,12 +18,10 @@ pub struct HelmPrometheusAlertingScore {
pub rules: Vec<Box<dyn AlertRule<Prometheus>>>, pub rules: Vec<Box<dyn AlertRule<Prometheus>>>,
} }
impl<T: Topology + HelmCommand> Score<T> for HelmPrometheusAlertingScore { impl<T: Topology + HelmCommand + TenantManager> Score<T> for HelmPrometheusAlertingScore {
fn create_interpret(&self) -> Box<dyn crate::interpret::Interpret<T>> { fn create_interpret(&self) -> Box<dyn crate::interpret::Interpret<T>> {
Box::new(AlertingInterpret { Box::new(AlertingInterpret {
sender: Prometheus { sender: Prometheus::new() ,
config: Arc::new(Mutex::new(KubePrometheusConfig::new())),
},
receivers: self.receivers.clone(), receivers: self.receivers.clone(),
rules: self.rules.clone(), rules: self.rules.clone(),
}) })

View File

@ -10,9 +10,10 @@ use crate::{
modules::monitoring::alert_rule::prometheus_alert_rule::AlertManagerRuleGroup, modules::monitoring::alert_rule::prometheus_alert_rule::AlertManagerRuleGroup,
score, score,
topology::{ topology::{
HelmCommand, Topology, HelmCommand, K8sAnywhereTopology, Topology,
installable::Installable, installable::Installable,
oberservability::monitoring::{AlertReceiver, AlertRule, AlertSender}, oberservability::monitoring::{AlertReceiver, AlertRule, AlertSender},
tenant::TenantManager,
}, },
}; };
@ -33,7 +34,12 @@ impl AlertSender for Prometheus {
} }
#[async_trait] #[async_trait]
impl<T: Topology + HelmCommand> Installable<T> for Prometheus { impl<T: Topology + HelmCommand + TenantManager> Installable<T> for Prometheus {
fn configure(&self, _inventory: &Inventory, topology: &T) -> Result<(), InterpretError> {
self.configure_with_topology(topology);
Ok(())
}
async fn ensure_installed( async fn ensure_installed(
&self, &self,
inventory: &Inventory, inventory: &Inventory,
@ -50,6 +56,18 @@ pub struct Prometheus {
} }
impl Prometheus { impl Prometheus {
pub fn new() -> Self {
Self {
config: Arc::new(Mutex::new(KubePrometheusConfig::new())),
}
}
pub fn configure_with_topology<T: TenantManager>(&self, topology: &T) {
let ns = topology.get_tenant_config().map(|cfg| cfg.name.clone())
.unwrap_or_else(|| "monitoring".to_string());
self.config.lock().unwrap().namespace = Some(ns);
}
pub async fn install_receiver( pub async fn install_receiver(
&self, &self,
prometheus_receiver: &dyn PrometheusReceiver, prometheus_receiver: &dyn PrometheusReceiver,