Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c56e9d7be2 | |||
| c8f800a1e5 | |||
| fc6b3424cf | |||
| 35e06c6734 | |||
| 783d233319 | |||
| 6509a2b91a | |||
| 52f8c40b86 | |||
| a86b5ba8f9 |
6
Cargo.lock
generated
6
Cargo.lock
generated
@@ -270,7 +270,7 @@ checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
|
||||
|
||||
[[package]]
|
||||
name = "cm-dashboard"
|
||||
version = "0.1.34"
|
||||
version = "0.1.43"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@@ -291,7 +291,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cm-dashboard-agent"
|
||||
version = "0.1.34"
|
||||
version = "0.1.43"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@@ -314,7 +314,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cm-dashboard-shared"
|
||||
version = "0.1.34"
|
||||
version = "0.1.43"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"serde",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "cm-dashboard-agent"
|
||||
version = "0.1.35"
|
||||
version = "0.1.43"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -8,6 +8,7 @@ use crate::communication::{AgentCommand, ServiceAction, ZmqHandler};
|
||||
use crate::config::AgentConfig;
|
||||
use crate::metrics::MetricCollectionManager;
|
||||
use crate::notifications::NotificationManager;
|
||||
use crate::service_tracker::UserStoppedServiceTracker;
|
||||
use crate::status::HostStatusManager;
|
||||
use cm_dashboard_shared::{Metric, MetricMessage, MetricValue, Status};
|
||||
|
||||
@@ -18,6 +19,7 @@ pub struct Agent {
|
||||
metric_manager: MetricCollectionManager,
|
||||
notification_manager: NotificationManager,
|
||||
host_status_manager: HostStatusManager,
|
||||
service_tracker: UserStoppedServiceTracker,
|
||||
}
|
||||
|
||||
impl Agent {
|
||||
@@ -50,6 +52,10 @@ impl Agent {
|
||||
let host_status_manager = HostStatusManager::new(config.status_aggregation.clone());
|
||||
info!("Host status manager initialized");
|
||||
|
||||
// Initialize user-stopped service tracker
|
||||
let service_tracker = UserStoppedServiceTracker::init_global()?;
|
||||
info!("User-stopped service tracker initialized");
|
||||
|
||||
Ok(Self {
|
||||
hostname,
|
||||
config,
|
||||
@@ -57,6 +63,7 @@ impl Agent {
|
||||
metric_manager,
|
||||
notification_manager,
|
||||
host_status_manager,
|
||||
service_tracker,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -271,13 +278,38 @@ impl Agent {
|
||||
|
||||
/// Handle systemd service control commands
|
||||
async fn handle_service_control(&mut self, service_name: &str, action: &ServiceAction) -> Result<()> {
|
||||
let action_str = match action {
|
||||
ServiceAction::Start => "start",
|
||||
ServiceAction::Stop => "stop",
|
||||
ServiceAction::Status => "status",
|
||||
let (action_str, is_user_action) = match action {
|
||||
ServiceAction::Start => ("start", false),
|
||||
ServiceAction::Stop => ("stop", false),
|
||||
ServiceAction::Status => ("status", false),
|
||||
ServiceAction::UserStart => ("start", true),
|
||||
ServiceAction::UserStop => ("stop", true),
|
||||
};
|
||||
|
||||
info!("Executing systemctl {} {}", action_str, service_name);
|
||||
info!("Executing systemctl {} {} (user action: {})", action_str, service_name, is_user_action);
|
||||
|
||||
// Handle user-stopped service tracking before systemctl execution
|
||||
match action {
|
||||
ServiceAction::UserStop => {
|
||||
info!("Marking service '{}' as user-stopped", service_name);
|
||||
if let Err(e) = self.service_tracker.mark_user_stopped(service_name) {
|
||||
error!("Failed to mark service as user-stopped: {}", e);
|
||||
} else {
|
||||
// Sync to global tracker
|
||||
UserStoppedServiceTracker::update_global(&self.service_tracker);
|
||||
}
|
||||
}
|
||||
ServiceAction::UserStart => {
|
||||
info!("Clearing user-stopped flag for service '{}'", service_name);
|
||||
if let Err(e) = self.service_tracker.clear_user_stopped(service_name) {
|
||||
error!("Failed to clear user-stopped flag: {}", e);
|
||||
} else {
|
||||
// Sync to global tracker
|
||||
UserStoppedServiceTracker::update_global(&self.service_tracker);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let output = tokio::process::Command::new("sudo")
|
||||
.arg("systemctl")
|
||||
@@ -298,7 +330,7 @@ impl Agent {
|
||||
}
|
||||
|
||||
// Force refresh metrics after service control to update service status
|
||||
if matches!(action, ServiceAction::Start | ServiceAction::Stop) {
|
||||
if matches!(action, ServiceAction::Start | ServiceAction::Stop | ServiceAction::UserStart | ServiceAction::UserStop) {
|
||||
info!("Triggering immediate metric refresh after service control");
|
||||
if let Err(e) = self.collect_metrics_only().await {
|
||||
error!("Failed to refresh metrics after service control: {}", e);
|
||||
|
||||
@@ -37,6 +37,22 @@ impl NixOSCollector {
|
||||
}
|
||||
|
||||
/// Get configuration hash from deployed nix store system
|
||||
/// Get git commit hash from rebuild process
|
||||
fn get_git_commit(&self) -> Result<String, Box<dyn std::error::Error>> {
|
||||
let commit_file = "/var/lib/cm-dashboard/git-commit";
|
||||
match std::fs::read_to_string(commit_file) {
|
||||
Ok(content) => {
|
||||
let commit_hash = content.trim();
|
||||
if commit_hash.len() >= 7 {
|
||||
Ok(commit_hash.to_string())
|
||||
} else {
|
||||
Err("Git commit hash too short".into())
|
||||
}
|
||||
}
|
||||
Err(e) => Err(format!("Failed to read git commit file: {}", e).into())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_config_hash(&self) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Read the symlink target of /run/current-system to get nix store path
|
||||
let output = Command::new("readlink")
|
||||
@@ -74,25 +90,25 @@ impl Collector for NixOSCollector {
|
||||
let mut metrics = Vec::new();
|
||||
let timestamp = chrono::Utc::now().timestamp() as u64;
|
||||
|
||||
// Collect NixOS build information (config hash)
|
||||
match self.get_config_hash() {
|
||||
Ok(config_hash) => {
|
||||
// Collect git commit information (shows what's actually deployed)
|
||||
match self.get_git_commit() {
|
||||
Ok(git_commit) => {
|
||||
metrics.push(Metric {
|
||||
name: "system_nixos_build".to_string(),
|
||||
value: MetricValue::String(config_hash),
|
||||
value: MetricValue::String(git_commit),
|
||||
unit: None,
|
||||
description: Some("NixOS deployed configuration hash".to_string()),
|
||||
description: Some("Git commit hash of deployed configuration".to_string()),
|
||||
status: Status::Ok,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Failed to get config hash: {}", e);
|
||||
debug!("Failed to get git commit: {}", e);
|
||||
metrics.push(Metric {
|
||||
name: "system_nixos_build".to_string(),
|
||||
value: MetricValue::String("unknown".to_string()),
|
||||
unit: None,
|
||||
description: Some("NixOS config hash (failed to detect)".to_string()),
|
||||
description: Some("Git commit hash (failed to detect)".to_string()),
|
||||
status: Status::Unknown,
|
||||
timestamp,
|
||||
});
|
||||
|
||||
@@ -8,6 +8,7 @@ use tracing::debug;
|
||||
|
||||
use super::{Collector, CollectorError};
|
||||
use crate::config::SystemdConfig;
|
||||
use crate::service_tracker::UserStoppedServiceTracker;
|
||||
|
||||
/// Systemd collector for monitoring systemd services
|
||||
pub struct SystemdCollector {
|
||||
@@ -353,11 +354,19 @@ impl SystemdCollector {
|
||||
Ok((active_status, detailed_info))
|
||||
}
|
||||
|
||||
/// Calculate service status
|
||||
fn calculate_service_status(&self, active_status: &str) -> Status {
|
||||
/// Calculate service status, taking user-stopped services into account
|
||||
fn calculate_service_status(&self, service_name: &str, active_status: &str) -> Status {
|
||||
match active_status.to_lowercase().as_str() {
|
||||
"active" => Status::Ok,
|
||||
"inactive" | "dead" => Status::Warning,
|
||||
"inactive" | "dead" => {
|
||||
// Check if this service was stopped by user action
|
||||
if UserStoppedServiceTracker::is_service_user_stopped(service_name) {
|
||||
debug!("Service '{}' is inactive but marked as user-stopped - treating as OK", service_name);
|
||||
Status::Ok
|
||||
} else {
|
||||
Status::Warning
|
||||
}
|
||||
},
|
||||
"failed" | "error" => Status::Critical,
|
||||
"activating" | "deactivating" | "reloading" | "start" | "stop" | "restart" => Status::Pending,
|
||||
_ => Status::Unknown,
|
||||
@@ -480,7 +489,7 @@ impl Collector for SystemdCollector {
|
||||
for service in &monitored_services {
|
||||
match self.get_service_status(service) {
|
||||
Ok((active_status, _detailed_info)) => {
|
||||
let status = self.calculate_service_status(&active_status);
|
||||
let status = self.calculate_service_status(service, &active_status);
|
||||
|
||||
// Individual service status metric
|
||||
metrics.push(Metric {
|
||||
@@ -555,10 +564,8 @@ impl SystemdCollector {
|
||||
for (site_name, url) in &sites {
|
||||
match self.check_site_latency(url) {
|
||||
Ok(latency_ms) => {
|
||||
let status = if latency_ms < 500.0 {
|
||||
let status = if latency_ms < self.config.nginx_latency_critical_ms {
|
||||
Status::Ok
|
||||
} else if latency_ms < 2000.0 {
|
||||
Status::Warning
|
||||
} else {
|
||||
Status::Critical
|
||||
};
|
||||
|
||||
@@ -113,4 +113,6 @@ pub enum ServiceAction {
|
||||
Start,
|
||||
Stop,
|
||||
Status,
|
||||
UserStart, // User-initiated start (clears user-stopped flag)
|
||||
UserStop, // User-initiated stop (marks as user-stopped)
|
||||
}
|
||||
|
||||
@@ -108,6 +108,7 @@ pub struct SystemdConfig {
|
||||
pub nginx_check_interval_seconds: u64,
|
||||
pub http_timeout_seconds: u64,
|
||||
pub http_connect_timeout_seconds: u64,
|
||||
pub nginx_latency_critical_ms: f32,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -83,6 +83,13 @@ pub fn validate_config(config: &AgentConfig) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
// Validate systemd configuration
|
||||
if config.collectors.systemd.enabled {
|
||||
if config.collectors.systemd.nginx_latency_critical_ms <= 0.0 {
|
||||
bail!("Nginx latency critical threshold must be positive");
|
||||
}
|
||||
}
|
||||
|
||||
// Validate SMTP configuration
|
||||
if config.notifications.enabled {
|
||||
if config.notifications.smtp_host.is_empty() {
|
||||
|
||||
@@ -9,6 +9,7 @@ mod communication;
|
||||
mod config;
|
||||
mod metrics;
|
||||
mod notifications;
|
||||
mod service_tracker;
|
||||
mod status;
|
||||
|
||||
use agent::Agent;
|
||||
|
||||
172
agent/src/service_tracker.rs
Normal file
172
agent/src/service_tracker.rs
Normal file
@@ -0,0 +1,172 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex, OnceLock};
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Shared instance for global access
|
||||
static GLOBAL_TRACKER: OnceLock<Arc<Mutex<UserStoppedServiceTracker>>> = OnceLock::new();
|
||||
|
||||
/// Tracks services that have been stopped by user action
|
||||
/// These services should be treated as OK status instead of Warning
|
||||
#[derive(Debug)]
|
||||
pub struct UserStoppedServiceTracker {
|
||||
/// Set of services stopped by user action
|
||||
user_stopped_services: HashSet<String>,
|
||||
/// Path to persistent storage file
|
||||
storage_path: String,
|
||||
}
|
||||
|
||||
/// Serializable data structure for persistence
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct UserStoppedData {
|
||||
services: Vec<String>,
|
||||
}
|
||||
|
||||
impl UserStoppedServiceTracker {
|
||||
/// Create new tracker with default storage path
|
||||
pub fn new() -> Self {
|
||||
Self::with_storage_path("/var/lib/cm-dashboard/user-stopped-services.json")
|
||||
}
|
||||
|
||||
/// Initialize global instance (called by agent)
|
||||
pub fn init_global() -> Result<Self> {
|
||||
let tracker = Self::new();
|
||||
|
||||
// Set global instance
|
||||
let global_instance = Arc::new(Mutex::new(tracker));
|
||||
if GLOBAL_TRACKER.set(global_instance).is_err() {
|
||||
warn!("Global service tracker was already initialized");
|
||||
}
|
||||
|
||||
// Return a new instance for the agent to use
|
||||
Ok(Self::new())
|
||||
}
|
||||
|
||||
/// Check if a service is user-stopped (global access for collectors)
|
||||
pub fn is_service_user_stopped(service_name: &str) -> bool {
|
||||
if let Some(global) = GLOBAL_TRACKER.get() {
|
||||
if let Ok(tracker) = global.lock() {
|
||||
tracker.is_user_stopped(service_name)
|
||||
} else {
|
||||
debug!("Failed to lock global service tracker");
|
||||
false
|
||||
}
|
||||
} else {
|
||||
debug!("Global service tracker not initialized");
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Update global tracker (called by agent when tracker state changes)
|
||||
pub fn update_global(updated_tracker: &UserStoppedServiceTracker) {
|
||||
if let Some(global) = GLOBAL_TRACKER.get() {
|
||||
if let Ok(mut tracker) = global.lock() {
|
||||
tracker.user_stopped_services = updated_tracker.user_stopped_services.clone();
|
||||
} else {
|
||||
debug!("Failed to lock global service tracker for update");
|
||||
}
|
||||
} else {
|
||||
debug!("Global service tracker not initialized for update");
|
||||
}
|
||||
}
|
||||
|
||||
/// Create new tracker with custom storage path
|
||||
pub fn with_storage_path<P: AsRef<Path>>(storage_path: P) -> Self {
|
||||
let storage_path = storage_path.as_ref().to_string_lossy().to_string();
|
||||
let mut tracker = Self {
|
||||
user_stopped_services: HashSet::new(),
|
||||
storage_path,
|
||||
};
|
||||
|
||||
// Load existing data from storage
|
||||
if let Err(e) = tracker.load_from_storage() {
|
||||
warn!("Failed to load user-stopped services from storage: {}", e);
|
||||
info!("Starting with empty user-stopped services list");
|
||||
}
|
||||
|
||||
tracker
|
||||
}
|
||||
|
||||
/// Mark a service as user-stopped
|
||||
pub fn mark_user_stopped(&mut self, service_name: &str) -> Result<()> {
|
||||
info!("Marking service '{}' as user-stopped", service_name);
|
||||
self.user_stopped_services.insert(service_name.to_string());
|
||||
self.save_to_storage()?;
|
||||
debug!("Service '{}' marked as user-stopped and saved to storage", service_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clear user-stopped flag for a service (when user starts it)
|
||||
pub fn clear_user_stopped(&mut self, service_name: &str) -> Result<()> {
|
||||
if self.user_stopped_services.remove(service_name) {
|
||||
info!("Cleared user-stopped flag for service '{}'", service_name);
|
||||
self.save_to_storage()?;
|
||||
debug!("Service '{}' user-stopped flag cleared and saved to storage", service_name);
|
||||
} else {
|
||||
debug!("Service '{}' was not marked as user-stopped", service_name);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if a service is marked as user-stopped
|
||||
pub fn is_user_stopped(&self, service_name: &str) -> bool {
|
||||
let is_stopped = self.user_stopped_services.contains(service_name);
|
||||
debug!("Service '{}' user-stopped status: {}", service_name, is_stopped);
|
||||
is_stopped
|
||||
}
|
||||
|
||||
|
||||
/// Save current state to persistent storage
|
||||
fn save_to_storage(&self) -> Result<()> {
|
||||
// Create parent directory if it doesn't exist
|
||||
if let Some(parent_dir) = Path::new(&self.storage_path).parent() {
|
||||
if !parent_dir.exists() {
|
||||
fs::create_dir_all(parent_dir)?;
|
||||
debug!("Created parent directory: {}", parent_dir.display());
|
||||
}
|
||||
}
|
||||
|
||||
let data = UserStoppedData {
|
||||
services: self.user_stopped_services.iter().cloned().collect(),
|
||||
};
|
||||
|
||||
let json_data = serde_json::to_string_pretty(&data)?;
|
||||
fs::write(&self.storage_path, json_data)?;
|
||||
|
||||
debug!(
|
||||
"Saved {} user-stopped services to {}",
|
||||
data.services.len(),
|
||||
self.storage_path
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load state from persistent storage
|
||||
fn load_from_storage(&mut self) -> Result<()> {
|
||||
if !Path::new(&self.storage_path).exists() {
|
||||
debug!("Storage file {} does not exist, starting fresh", self.storage_path);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let json_data = fs::read_to_string(&self.storage_path)?;
|
||||
let data: UserStoppedData = serde_json::from_str(&json_data)?;
|
||||
|
||||
self.user_stopped_services = data.services.into_iter().collect();
|
||||
|
||||
info!(
|
||||
"Loaded {} user-stopped services from {}",
|
||||
self.user_stopped_services.len(),
|
||||
self.storage_path
|
||||
);
|
||||
|
||||
if !self.user_stopped_services.is_empty() {
|
||||
debug!("User-stopped services: {:?}", self.user_stopped_services);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "cm-dashboard"
|
||||
version = "0.1.35"
|
||||
version = "0.1.43"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -295,18 +295,18 @@ impl Dashboard {
|
||||
async fn execute_ui_command(&self, command: UiCommand) -> Result<()> {
|
||||
match command {
|
||||
UiCommand::ServiceStart { hostname, service_name } => {
|
||||
info!("Sending start command for service {} on {}", service_name, hostname);
|
||||
info!("Sending user start command for service {} on {}", service_name, hostname);
|
||||
let agent_command = AgentCommand::ServiceControl {
|
||||
service_name: service_name.clone(),
|
||||
action: ServiceAction::Start,
|
||||
action: ServiceAction::UserStart,
|
||||
};
|
||||
self.zmq_command_sender.send_command(&hostname, agent_command).await?;
|
||||
}
|
||||
UiCommand::ServiceStop { hostname, service_name } => {
|
||||
info!("Sending stop command for service {} on {}", service_name, hostname);
|
||||
info!("Sending user stop command for service {} on {}", service_name, hostname);
|
||||
let agent_command = AgentCommand::ServiceControl {
|
||||
service_name: service_name.clone(),
|
||||
action: ServiceAction::Stop,
|
||||
action: ServiceAction::UserStop,
|
||||
};
|
||||
self.zmq_command_sender.send_command(&hostname, agent_command).await?;
|
||||
}
|
||||
|
||||
@@ -36,6 +36,8 @@ pub enum ServiceAction {
|
||||
Start,
|
||||
Stop,
|
||||
Status,
|
||||
UserStart, // User-initiated start (clears user-stopped flag)
|
||||
UserStop, // User-initiated stop (marks as user-stopped)
|
||||
}
|
||||
|
||||
/// ZMQ consumer for receiving metrics from agents
|
||||
|
||||
@@ -12,10 +12,6 @@ mod ui;
|
||||
|
||||
use app::Dashboard;
|
||||
|
||||
/// Get hardcoded version
|
||||
fn get_version() -> &'static str {
|
||||
"v0.1.33"
|
||||
}
|
||||
|
||||
/// Check if running inside tmux session
|
||||
fn check_tmux_session() {
|
||||
@@ -42,7 +38,7 @@ fn check_tmux_session() {
|
||||
#[derive(Parser)]
|
||||
#[command(name = "cm-dashboard")]
|
||||
#[command(about = "CM Dashboard TUI with individual metric consumption")]
|
||||
#[command(version = get_version())]
|
||||
#[command(version)]
|
||||
struct Cli {
|
||||
/// Increase logging verbosity (-v, -vv)
|
||||
#[arg(short, long, action = clap::ArgAction::Count)]
|
||||
|
||||
@@ -244,16 +244,23 @@ impl TuiApp {
|
||||
KeyCode::Char('r') => {
|
||||
// System rebuild command - works on any panel for current host
|
||||
if let Some(hostname) = self.current_host.clone() {
|
||||
// Launch tmux popup with SSH using config values
|
||||
let ssh_command = format!(
|
||||
"ssh -tt {}@{} 'bash -ic {}'",
|
||||
// Create command that shows CM Dashboard logo and then rebuilds
|
||||
let logo_and_rebuild = format!(
|
||||
r"cat << 'EOF'
|
||||
NixOS System Rebuild
|
||||
Target: {}
|
||||
|
||||
EOF
|
||||
ssh -tt {}@{} 'bash -ic {}'",
|
||||
hostname,
|
||||
self.config.ssh.rebuild_user,
|
||||
hostname,
|
||||
self.config.ssh.rebuild_alias
|
||||
);
|
||||
|
||||
std::process::Command::new("tmux")
|
||||
.arg("display-popup")
|
||||
.arg(&ssh_command)
|
||||
.arg(&logo_and_rebuild)
|
||||
.spawn()
|
||||
.ok(); // Ignore errors, tmux will handle them
|
||||
}
|
||||
@@ -537,24 +544,34 @@ impl TuiApp {
|
||||
if self.available_hosts.is_empty() {
|
||||
let title_text = "cm-dashboard • no hosts discovered";
|
||||
let title = Paragraph::new(title_text)
|
||||
.style(Style::default().fg(Theme::background()).bg(Theme::highlight()));
|
||||
.style(Style::default().fg(Theme::background()).bg(Theme::status_color(Status::Unknown)));
|
||||
frame.render_widget(title, area);
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate worst-case status across all hosts
|
||||
let mut worst_status = Status::Ok;
|
||||
for host in &self.available_hosts {
|
||||
let host_status = self.calculate_host_status(host, metric_store);
|
||||
worst_status = Status::aggregate(&[worst_status, host_status]);
|
||||
}
|
||||
|
||||
// Use the worst status color as background
|
||||
let background_color = Theme::status_color(worst_status);
|
||||
|
||||
// Split the title bar into left and right sections
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Horizontal)
|
||||
.constraints([Constraint::Min(0), Constraint::Min(0)])
|
||||
.constraints([Constraint::Length(15), Constraint::Min(0)])
|
||||
.split(area);
|
||||
|
||||
// Left side: "cm-dashboard" text
|
||||
let left_span = Span::styled(
|
||||
"cm-dashboard",
|
||||
Style::default().fg(Theme::background()).bg(Theme::highlight())
|
||||
" cm-dashboard",
|
||||
Style::default().fg(Theme::background()).bg(background_color).add_modifier(Modifier::BOLD)
|
||||
);
|
||||
let left_title = Paragraph::new(Line::from(vec![left_span]))
|
||||
.style(Style::default().bg(Theme::highlight()));
|
||||
.style(Style::default().bg(background_color));
|
||||
frame.render_widget(left_title, chunks[0]);
|
||||
|
||||
// Right side: hosts with status indicators
|
||||
@@ -564,7 +581,7 @@ impl TuiApp {
|
||||
if i > 0 {
|
||||
host_spans.push(Span::styled(
|
||||
" ",
|
||||
Style::default().fg(Theme::background()).bg(Theme::highlight())
|
||||
Style::default().fg(Theme::background()).bg(background_color)
|
||||
));
|
||||
}
|
||||
|
||||
@@ -572,33 +589,39 @@ impl TuiApp {
|
||||
let host_status = self.calculate_host_status(host, metric_store);
|
||||
let status_icon = StatusIcons::get_icon(host_status);
|
||||
|
||||
// Add status icon with background color as foreground against blue background
|
||||
// Add status icon with background color as foreground against status background
|
||||
host_spans.push(Span::styled(
|
||||
format!("{} ", status_icon),
|
||||
Style::default().fg(Theme::background()).bg(Theme::highlight()),
|
||||
Style::default().fg(Theme::background()).bg(background_color),
|
||||
));
|
||||
|
||||
if Some(host) == self.current_host.as_ref() {
|
||||
// Selected host in bold background color against blue background
|
||||
// Selected host in bold background color against status background
|
||||
host_spans.push(Span::styled(
|
||||
host.clone(),
|
||||
Style::default()
|
||||
.fg(Theme::background())
|
||||
.bg(Theme::highlight())
|
||||
.bg(background_color)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
));
|
||||
} else {
|
||||
// Other hosts in normal background color against blue background
|
||||
// Other hosts in normal background color against status background
|
||||
host_spans.push(Span::styled(
|
||||
host.clone(),
|
||||
Style::default().fg(Theme::background()).bg(Theme::highlight()),
|
||||
Style::default().fg(Theme::background()).bg(background_color),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Add right padding
|
||||
host_spans.push(Span::styled(
|
||||
" ",
|
||||
Style::default().fg(Theme::background()).bg(background_color)
|
||||
));
|
||||
|
||||
let host_line = Line::from(host_spans);
|
||||
let host_title = Paragraph::new(vec![host_line])
|
||||
.style(Style::default().bg(Theme::highlight()))
|
||||
.style(Style::default().bg(background_color))
|
||||
.alignment(ratatui::layout::Alignment::Right);
|
||||
frame.render_widget(host_title, chunks[1]);
|
||||
}
|
||||
@@ -685,7 +708,7 @@ impl TuiApp {
|
||||
host_widgets.system_scroll_offset
|
||||
};
|
||||
let host_widgets = self.get_or_create_host_widgets(&hostname);
|
||||
host_widgets.system_widget.render_with_scroll(frame, inner_area, scroll_offset);
|
||||
host_widgets.system_widget.render_with_scroll(frame, inner_area, scroll_offset, &hostname);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -292,12 +292,6 @@ impl Components {
|
||||
}
|
||||
|
||||
impl Typography {
|
||||
/// Main title style (dashboard header)
|
||||
pub fn title() -> Style {
|
||||
Style::default()
|
||||
.fg(Theme::primary_text())
|
||||
.bg(Theme::background())
|
||||
}
|
||||
|
||||
/// Widget title style (panel headers) - bold bright white
|
||||
pub fn widget_title() -> Style {
|
||||
|
||||
@@ -439,12 +439,12 @@ impl Widget for SystemWidget {
|
||||
|
||||
impl SystemWidget {
|
||||
/// Render with scroll offset support
|
||||
pub fn render_with_scroll(&mut self, frame: &mut Frame, area: Rect, scroll_offset: usize) {
|
||||
pub fn render_with_scroll(&mut self, frame: &mut Frame, area: Rect, scroll_offset: usize, hostname: &str) {
|
||||
let mut lines = Vec::new();
|
||||
|
||||
// NixOS section
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled("NixOS:", Typography::widget_title())
|
||||
Span::styled(format!("NixOS {}:", hostname), Typography::widget_title())
|
||||
]));
|
||||
|
||||
let build_text = self.nixos_build.as_deref().unwrap_or("unknown");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "cm-dashboard-shared"
|
||||
version = "0.1.35"
|
||||
version = "0.1.43"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
Reference in New Issue
Block a user