mirror of
https://github.com/zaphar/Heracles.git
synced 2025-07-23 12:39:50 -04:00
feat: Point in time shows as bar chart
This commit is contained in:
parent
41b450c302
commit
631fedfa2f
@ -4,14 +4,17 @@
|
|||||||
- title: Node cpu
|
- title: Node cpu
|
||||||
source: http://heimdall:9001
|
source: http://heimdall:9001
|
||||||
query: 'sum by (instance)(irate(node_cpu_seconds_total{mode="system",job="nodestats"}[5m])) * 100'
|
query: 'sum by (instance)(irate(node_cpu_seconds_total{mode="system",job="nodestats"}[5m])) * 100'
|
||||||
|
query_type: Range
|
||||||
name_label: instance
|
name_label: instance
|
||||||
- title: Test Dasbboard 2
|
- title: Test Dasbboard 2
|
||||||
graphs:
|
graphs:
|
||||||
- title: Node cpu
|
- title: Node cpu
|
||||||
source: http://heimdall:9001
|
source: http://heimdall:9001
|
||||||
query: 'sum by (instance)(irate(node_cpu_seconds_total{mode="system",job="nodestats"}[5m])) * 100'
|
query: 'sum by (instance)(irate(node_cpu_seconds_total{mode="system",job="nodestats"}[5m])) * 100'
|
||||||
|
query_type: Range
|
||||||
name_label: instance
|
name_label: instance
|
||||||
- title: Node memory
|
- title: Node memory
|
||||||
source: http://heimdall:9001
|
source: http://heimdall:9001
|
||||||
query: 'node_memory_MemFree_bytes{,job="nodestats"}'
|
query: 'node_memory_MemFree_bytes{job="nodestats"}'
|
||||||
|
query_type: Scalar
|
||||||
name_label: instance
|
name_label: instance
|
||||||
|
@ -15,9 +15,9 @@ use std::path::Path;
|
|||||||
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_yaml;
|
use serde_yaml;
|
||||||
use tracing::{debug, info};
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::query::QueryConn;
|
use crate::query::{QueryConn, QueryType};
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct Dashboard {
|
pub struct Dashboard {
|
||||||
@ -31,12 +31,13 @@ pub struct Graph {
|
|||||||
pub source: String,
|
pub source: String,
|
||||||
pub query: String,
|
pub query: String,
|
||||||
pub name_label: String,
|
pub name_label: String,
|
||||||
|
pub query_type: QueryType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Graph {
|
impl Graph {
|
||||||
pub fn get_query_connection<'conn, 'graph: 'conn>(&'graph self) -> QueryConn<'conn> {
|
pub fn get_query_connection<'conn, 'graph: 'conn>(&'graph self) -> QueryConn<'conn> {
|
||||||
debug!(query=self.query, source=self.source, "Getting query connection for graph");
|
debug!(query=self.query, source=self.source, "Getting query connection for graph");
|
||||||
QueryConn::new(&self.source, &self.query)
|
QueryConn::new(&self.source, &self.query, self.query_type.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
23
src/query.rs
23
src/query.rs
@ -18,16 +18,24 @@ use serde::{Serialize, Deserialize};
|
|||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Deserialize, Clone)]
|
||||||
|
pub enum QueryType {
|
||||||
|
Range,
|
||||||
|
Scalar,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct QueryConn<'conn> {
|
pub struct QueryConn<'conn> {
|
||||||
source: &'conn str,
|
source: &'conn str,
|
||||||
query: &'conn str,
|
query: &'conn str,
|
||||||
|
query_type: QueryType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'conn> QueryConn<'conn> {
|
impl<'conn> QueryConn<'conn> {
|
||||||
pub fn new<'a: 'conn>(source: &'a str, query: &'a str) -> Self {
|
pub fn new<'a: 'conn>(source: &'a str, query: &'a str, query_type: QueryType) -> Self {
|
||||||
Self {
|
Self {
|
||||||
source,
|
source,
|
||||||
query,
|
query,
|
||||||
|
query_type,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,7 +45,10 @@ impl<'conn> QueryConn<'conn> {
|
|||||||
let end = Utc::now().timestamp();
|
let end = Utc::now().timestamp();
|
||||||
let start = end - (60 * 10);
|
let start = end - (60 * 10);
|
||||||
let step_resolution = 10 as f64;
|
let step_resolution = 10 as f64;
|
||||||
Ok(client.query_range(self.query, start, end, step_resolution).get().await?)
|
match self.query_type {
|
||||||
|
QueryType::Range => Ok(client.query_range(self.query, start, end, step_resolution).get().await?),
|
||||||
|
QueryType::Scalar => Ok(client.query(self.query).get().await?),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,7 +62,7 @@ pub struct DataPoint {
|
|||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub enum QueryResult {
|
pub enum QueryResult {
|
||||||
Series(Vec<(HashMap<String, String>, Vec<DataPoint>)>),
|
Series(Vec<(HashMap<String, String>, Vec<DataPoint>)>),
|
||||||
Scalar(DataPoint),
|
Scalar(Vec<(HashMap<String, String>, DataPoint)>),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_samples(data: Data) -> QueryResult {
|
pub fn to_samples(data: Data) -> QueryResult {
|
||||||
@ -65,13 +76,13 @@ pub fn to_samples(data: Data) -> QueryResult {
|
|||||||
}).collect())
|
}).collect())
|
||||||
}
|
}
|
||||||
Data::Vector(mut vector) => {
|
Data::Vector(mut vector) => {
|
||||||
QueryResult::Series(vector.drain(0..).map(|iv| {
|
QueryResult::Scalar(vector.drain(0..).map(|iv| {
|
||||||
let (metric, sample) = iv.into_inner();
|
let (metric, sample) = iv.into_inner();
|
||||||
(metric, vec![DataPoint { timestamp: sample.timestamp(), value: sample.value() }])
|
(metric, DataPoint { timestamp: sample.timestamp(), value: sample.value() })
|
||||||
}).collect())
|
}).collect())
|
||||||
}
|
}
|
||||||
Data::Scalar(sample) => {
|
Data::Scalar(sample) => {
|
||||||
QueryResult::Scalar(DataPoint { timestamp: sample.timestamp(), value: sample.value() })
|
QueryResult::Scalar(vec![(HashMap::new(), DataPoint { timestamp: sample.timestamp(), value: sample.value() })])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,16 @@
|
|||||||
|
// Copyright 2021 Jeremy Wall
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
class TimeseriesGraph extends HTMLElement {
|
class TimeseriesGraph extends HTMLElement {
|
||||||
#uri;
|
#uri;
|
||||||
#width;
|
#width;
|
||||||
@ -121,7 +134,34 @@ class TimeseriesGraph extends HTMLElement {
|
|||||||
responsive: true
|
responsive: true
|
||||||
});
|
});
|
||||||
} else if (data.Scalar) {
|
} else if (data.Scalar) {
|
||||||
// The graph should be a single value
|
console.log("scalar data: ", data.Scalar);
|
||||||
|
var traces = [];
|
||||||
|
for (const pair of data.Scalar) {
|
||||||
|
const series = pair[1];
|
||||||
|
const labels = pair[0];
|
||||||
|
var trace = {
|
||||||
|
type: "bar",
|
||||||
|
x: [],
|
||||||
|
y: []
|
||||||
|
};
|
||||||
|
console.log("labels: ", labels, this.#label);
|
||||||
|
if (labels[this.#label]) {
|
||||||
|
trace.x.push(labels[this.#label]);
|
||||||
|
};
|
||||||
|
trace.y.push(series.value);
|
||||||
|
traces.push(trace);
|
||||||
|
}
|
||||||
|
console.log("Traces: ", traces);
|
||||||
|
Plotly.react(this.getTargetNode(), traces,
|
||||||
|
{
|
||||||
|
legend: {
|
||||||
|
orientation: 'h'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayModeBar: false,
|
||||||
|
responsive: true
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user