Skip to content

Commit e3efb1b

Browse files
remove querier metrics
1 parent 0b2d1a7 commit e3efb1b

File tree

3 files changed

+27
-75
lines changed

3 files changed

+27
-75
lines changed

src/handlers/http/cluster/mod.rs

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -737,13 +737,11 @@ pub async fn get_node_info<T: Metadata + DeserializeOwned>(
737737
)
738738
.await?
739739
.iter()
740-
.filter_map(|x| {
741-
match serde_json::from_slice::<T>(x) {
742-
Ok(val) => Some(val),
743-
Err(e) => {
744-
error!("Failed to parse node metadata: {:?}", e);
745-
None
746-
}
740+
.filter_map(|x| match serde_json::from_slice::<T>(x) {
741+
Ok(val) => Some(val),
742+
Err(e) => {
743+
error!("Failed to parse node metadata: {:?}", e);
744+
None
747745
}
748746
})
749747
.collect();
@@ -992,7 +990,6 @@ async fn fetch_cluster_metrics() -> Result<Vec<Metrics>, PostError> {
992990
Err(err) => return Err(err),
993991
}
994992

995-
all_metrics.push(Metrics::querier_prometheus_metrics().await);
996993
Ok(all_metrics)
997994
}
998995

src/handlers/http/modal/ingest_server.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ use serde_json::Value;
3030
use tokio::sync::oneshot;
3131
use tokio::sync::OnceCell;
3232

33-
use crate::metrics::init_system_metrics_scheduler;
3433
use crate::handlers::http::modal::NodeType;
34+
use crate::metrics::init_system_metrics_scheduler;
3535
use crate::{
3636
analytics,
3737
handlers::{

src/metrics/prom_utils.rs

Lines changed: 21 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -16,15 +16,11 @@
1616
*
1717
*/
1818

19-
use std::collections::HashMap;
20-
use std::path::Path;
21-
22-
use crate::about::current;
19+
use super::DiskMetrics;
20+
use super::MemoryMetrics;
2321
use crate::handlers::http::base_path_without_preceding_slash;
2422
use crate::handlers::http::ingest::PostError;
2523
use crate::handlers::http::modal::Metadata;
26-
use crate::option::Mode;
27-
use crate::parseable::PARSEABLE;
2824
use crate::HTTP_CLIENT;
2925
use actix_web::http::header;
3026
use chrono::NaiveDateTime;
@@ -34,15 +30,11 @@ use prometheus_parse::Value as PromValue;
3430
use serde::Serialize;
3531
use serde_json::Error as JsonError;
3632
use serde_json::Value as JsonValue;
33+
use std::collections::HashMap;
3734
use tracing::error;
3835
use tracing::warn;
3936
use url::Url;
4037

41-
use super::get_system_metrics;
42-
use super::get_volume_disk_usage;
43-
use super::DiskMetrics;
44-
use super::MemoryMetrics;
45-
4638
#[derive(Debug, Serialize, Clone)]
4739
pub struct Metrics {
4840
address: String,
@@ -200,57 +192,6 @@ impl Metrics {
200192
Ok(metrics)
201193
}
202194

203-
pub async fn querier_prometheus_metrics() -> Self {
204-
let mut metrics = Metrics::new(
205-
PARSEABLE.options.get_url(Mode::Query).to_string(),
206-
"querier".to_string(),
207-
);
208-
209-
let system_metrics = get_system_metrics().expect("Failed to get system metrics");
210-
211-
metrics.parseable_memory_usage.total = system_metrics.memory.total;
212-
metrics.parseable_memory_usage.used = system_metrics.memory.used;
213-
metrics.parseable_memory_usage.total_swap = system_metrics.memory.total_swap;
214-
metrics.parseable_memory_usage.used_swap = system_metrics.memory.used_swap;
215-
for cpu_usage in system_metrics.cpu {
216-
metrics
217-
.parseable_cpu_usage
218-
.insert(cpu_usage.name.clone(), cpu_usage.usage);
219-
}
220-
221-
let staging_disk_usage = get_volume_disk_usage(PARSEABLE.options.staging_dir())
222-
.expect("Failed to get staging volume disk usage");
223-
224-
metrics.parseable_staging_disk_usage.total = staging_disk_usage.total;
225-
metrics.parseable_staging_disk_usage.used = staging_disk_usage.used;
226-
metrics.parseable_staging_disk_usage.available = staging_disk_usage.available;
227-
228-
if PARSEABLE.get_storage_mode_string() == "Local drive" {
229-
let data_disk_usage =
230-
get_volume_disk_usage(Path::new(&PARSEABLE.storage().get_endpoint()))
231-
.expect("Failed to get data volume disk usage");
232-
233-
metrics.parseable_data_disk_usage.total = data_disk_usage.total;
234-
metrics.parseable_data_disk_usage.used = data_disk_usage.used;
235-
metrics.parseable_data_disk_usage.available = data_disk_usage.available;
236-
}
237-
238-
if PARSEABLE.options.hot_tier_storage_path.is_some() {
239-
let hot_tier_disk_usage =
240-
get_volume_disk_usage(PARSEABLE.hot_tier_dir().as_ref().unwrap())
241-
.expect("Failed to get hot tier volume disk usage");
242-
243-
metrics.parseable_hot_tier_disk_usage.total = hot_tier_disk_usage.total;
244-
metrics.parseable_hot_tier_disk_usage.used = hot_tier_disk_usage.used;
245-
metrics.parseable_hot_tier_disk_usage.available = hot_tier_disk_usage.available;
246-
}
247-
248-
metrics.commit = current().commit_hash;
249-
metrics.staging = PARSEABLE.options.staging_dir().display().to_string();
250-
251-
metrics
252-
}
253-
254195
fn build_metrics_from_samples(
255196
samples: Vec<PromSample>,
256197
metrics: &mut Metrics,
@@ -286,7 +227,7 @@ impl Metrics {
286227
Self::process_simple_gauge(metrics, &metric_name, val)
287228
}
288229
MetricType::StorageSize(storage_type) => {
289-
Self::process_storage_size(metrics, &storage_type, val)
230+
Self::process_storage_size(metrics, &storage_type, val, metric_name)
290231
}
291232
MetricType::DiskUsage(volume_type) => {
292233
Self::process_disk_usage(metrics, &volume_type, val, metric_name)
@@ -318,10 +259,24 @@ impl Metrics {
318259
}
319260
}
320261

321-
fn process_storage_size(metrics: &mut Metrics, storage_type: &str, val: f64) {
262+
fn process_storage_size(
263+
metrics: &mut Metrics,
264+
storage_type: &str,
265+
val: f64,
266+
metric_name: &str,
267+
) {
268+
let target = match metric_name {
269+
"parseable_storage_size" => &mut metrics.parseable_storage_size,
270+
"parseable_lifetime_events_storage_size" => {
271+
&mut metrics.parseable_lifetime_storage_size
272+
}
273+
"parseable_deleted_events_storage_size" => &mut metrics.parseable_deleted_storage_size,
274+
_ => return,
275+
};
276+
322277
match storage_type {
323-
"staging" => metrics.parseable_storage_size.staging += val,
324-
"data" => metrics.parseable_storage_size.data += val,
278+
"staging" => target.staging += val,
279+
"data" => target.data += val,
325280
_ => {}
326281
}
327282
}

0 commit comments

Comments
 (0)