Compare commits

...

7 Commits

7 changed files with 208 additions and 75 deletions

View File

@ -9,11 +9,12 @@ license = "Apache-2.0"
[dependencies] [dependencies]
anyhow = "1.0.79" anyhow = "1.0.79"
async-io = "2.3.1" async-io = "2.3.1"
axum = { version = "0.7.4", features = [ "http2" ] } axum = { version = "0.7.4", features = ["http2", "query"] }
axum-macros = "0.4.1" axum-macros = "0.4.1"
chrono = { version = "0.4.33", features = ["alloc", "std", "now"] } chrono = { version = "0.4.33", features = ["alloc", "std", "now", "serde"] }
clap = { version = "4.4.18", features = ["derive"] } clap = { version = "4.4.18", features = ["derive"] }
maud = { version = "0.26.0", features = ["axum"] } maud = { version = "0.26.0", features = ["axum"] }
parse_duration = "2.1.1"
prometheus-http-query = "0.8.2" prometheus-http-query = "0.8.2"
serde = { version = "1.0.196", features = ["derive"] } serde = { version = "1.0.196", features = ["derive"] }
serde_json = "1.0.113" serde_json = "1.0.113"

View File

@ -5,8 +5,16 @@
source: http://heimdall:9001 source: http://heimdall:9001
query: 'sum by (instance)(irate(node_cpu_seconds_total{mode="system",job="nodestats"}[5m])) * 100' query: 'sum by (instance)(irate(node_cpu_seconds_total{mode="system",job="nodestats"}[5m])) * 100'
query_type: Range query_type: Range
span:
start: 2024-02-10T00:00:00.00Z
duration: 2d
step_duration: 1min
name_label: instance name_label: instance
- title: Test Dasbboard 2 - title: Test Dasbboard 2
span:
start: 2024-02-10T00:00:00.00Z
duration: 2d
step_duration: 1min
graphs: graphs:
- title: Node cpu - title: Node cpu
source: http://heimdall:9001 source: http://heimdall:9001

View File

@ -1,4 +1,4 @@
// Copyright 2021 Jeremy Wall // Copyright 2023 Jeremy Wall
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -13,16 +13,26 @@
// limitations under the License. // limitations under the License.
use std::path::Path; use std::path::Path;
use chrono::prelude::*;
use chrono::Duration;
use serde::Deserialize; use serde::Deserialize;
use serde_yaml; use serde_yaml;
use tracing::debug; use tracing::{debug, error};
use crate::query::{QueryConn, QueryType}; use crate::query::{QueryConn, QueryType};
#[derive(Deserialize)]
pub struct GraphSpan {
pub start: DateTime<Utc>,
pub duration: String,
pub step_duration: String,
}
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct Dashboard { pub struct Dashboard {
pub title: String, pub title: String,
pub graphs: Vec<Graph>, pub graphs: Vec<Graph>,
pub span: Option<GraphSpan>
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -30,14 +40,67 @@ pub struct Graph {
pub title: String, pub title: String,
pub source: String, pub source: String,
pub query: String, pub query: String,
// serialized with https://datatracker.ietf.org/doc/html/rfc3339
pub span: Option<GraphSpan>,
pub name_label: String, pub name_label: String,
pub query_type: QueryType, pub query_type: QueryType,
} }
fn duration_from_string(duration: &str) -> Option<Duration> {
match parse_duration::parse(duration) {
Ok(d) => match Duration::from_std(d) {
Ok(d) => Some(d),
Err(e) => {
error!(err = ?e, "specified Duration is out of bounds");
return None;
}
},
Err(e) => {
error!(
err = ?e,
"Failed to parse duration"
);
return None;
}
}
}
fn graph_span_to_tuple(span: &Option<GraphSpan>) -> Option<(DateTime<Utc>, Duration, Duration)> {
if span.is_none() {
return None;
}
let span = span.as_ref().unwrap();
let duration = match duration_from_string(&span.duration) {
Some(d) => d,
None => {
error!("Invalid query duration not assigning span to to graph query");
return None;
}
};
let step_duration = match duration_from_string(&span.step_duration) {
Some(d) => d,
None => {
error!("Invalid query step resolution not assigning span to to graph query");
return None;
}
};
Some((span.start.clone(), duration, step_duration))
}
impl Graph { impl Graph {
pub fn get_query_connection<'conn, 'graph: 'conn>(&'graph self) -> QueryConn<'conn> { pub fn get_query_connection<'conn, 'graph: 'conn>(&'graph self, graph_span: &'graph Option<GraphSpan>) -> QueryConn<'conn> {
debug!(query=self.query, source=self.source, "Getting query connection for graph"); debug!(
QueryConn::new(&self.source, &self.query, self.query_type.clone()) query = self.query,
source = self.source,
"Getting query connection for graph"
);
let mut conn = QueryConn::new(&self.source, &self.query, self.query_type.clone());
if let Some((start, duration, step_duration)) = graph_span_to_tuple(&self.span) {
conn = conn.with_span(start, duration, step_duration);
} else if let Some((start, duration, step_duration)) = graph_span_to_tuple(graph_span) {
conn = conn.with_span(start, duration, step_duration);
}
conn
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2021 Jeremy Wall // Copyright 2023 Jeremy Wall
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.

View File

@ -1,4 +1,4 @@
// Copyright 2021 Jeremy Wall // Copyright 2023 Jeremy Wall
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -13,10 +13,13 @@
// limitations under the License. // limitations under the License.
use std::collections::HashMap; use std::collections::HashMap;
use prometheus_http_query::{Client, response::{PromqlResult, Data}};
use serde::{Serialize, Deserialize};
use tracing::debug;
use chrono::prelude::*; use chrono::prelude::*;
use prometheus_http_query::{
response::{Data, PromqlResult},
Client,
};
use serde::{Deserialize, Serialize};
use tracing::debug;
#[derive(Deserialize, Clone)] #[derive(Deserialize, Clone)]
pub enum QueryType { pub enum QueryType {
@ -24,9 +27,16 @@ pub enum QueryType {
Scalar, Scalar,
} }
pub struct TimeSpan {
pub start: DateTime<Utc>,
pub duration: chrono::Duration,
pub step_seconds: i64,
}
pub struct QueryConn<'conn> { pub struct QueryConn<'conn> {
source: &'conn str, source: &'conn str,
query: &'conn str, query: &'conn str,
span: Option<TimeSpan>,
query_type: QueryType, query_type: QueryType,
} }
@ -36,17 +46,36 @@ impl<'conn> QueryConn<'conn> {
source, source,
query, query,
query_type, query_type,
span: None,
} }
} }
pub fn with_span(mut self, start: DateTime<Utc>, duration: chrono::Duration, step: chrono::Duration) -> Self {
self.span = Some(TimeSpan { start, duration, step_seconds: step.num_seconds() , });
self
}
pub async fn get_results(&self) -> anyhow::Result<PromqlResult> { pub async fn get_results(&self) -> anyhow::Result<PromqlResult> {
debug!("Getting results for query"); debug!("Getting results for query");
let client = Client::try_from(self.source)?; let client = Client::try_from(self.source)?;
let (end, start, step_resolution) = if let Some(TimeSpan {
start: st,
duration: du,
step_seconds,
}) = self.span
{
((st + du).timestamp(), st.timestamp(), step_seconds as f64)
} else {
let end = Utc::now().timestamp(); let end = Utc::now().timestamp();
let start = end - (60 * 10); let start = end - (60 * 10);
let step_resolution = 10 as f64; (end, start, 30 as f64)
};
debug!(start, end, step_resolution, "Running Query with range values");
match self.query_type { match self.query_type {
QueryType::Range => Ok(client.query_range(self.query, start, end, step_resolution).get().await?), QueryType::Range => Ok(client
.query_range(self.query, start, end, step_resolution)
.get()
.await?),
QueryType::Scalar => Ok(client.query(self.query).get().await?), QueryType::Scalar => Ok(client.query(self.query).get().await?),
} }
} }
@ -58,7 +87,6 @@ pub struct DataPoint {
value: f64, value: f64,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub enum QueryResult { pub enum QueryResult {
Series(Vec<(HashMap<String, String>, Vec<DataPoint>)>), Series(Vec<(HashMap<String, String>, Vec<DataPoint>)>),
@ -67,22 +95,45 @@ pub enum QueryResult {
pub fn to_samples(data: Data) -> QueryResult { pub fn to_samples(data: Data) -> QueryResult {
match data { match data {
Data::Matrix(mut range) => { Data::Matrix(mut range) => QueryResult::Series(
QueryResult::Series(range.drain(0..).map(|rv| { range
.drain(0..)
.map(|rv| {
let (metric, mut samples) = rv.into_inner(); let (metric, mut samples) = rv.into_inner();
(metric, samples.drain(0..).map(|s| { (
DataPoint { timestamp: s.timestamp(), value: s.value() } metric,
}).collect()) samples
}).collect()) .drain(0..)
} .map(|s| DataPoint {
Data::Vector(mut vector) => { timestamp: s.timestamp(),
QueryResult::Scalar(vector.drain(0..).map(|iv| { value: s.value(),
})
.collect(),
)
})
.collect(),
),
Data::Vector(mut vector) => QueryResult::Scalar(
vector
.drain(0..)
.map(|iv| {
let (metric, sample) = iv.into_inner(); let (metric, sample) = iv.into_inner();
(metric, DataPoint { timestamp: sample.timestamp(), value: sample.value() }) (
}).collect()) metric,
} DataPoint {
Data::Scalar(sample) => { timestamp: sample.timestamp(),
QueryResult::Scalar(vec![(HashMap::new(), DataPoint { timestamp: sample.timestamp(), value: sample.value() })]) value: sample.value(),
} },
)
})
.collect(),
),
Data::Scalar(sample) => QueryResult::Scalar(vec![(
HashMap::new(),
DataPoint {
timestamp: sample.timestamp(),
value: sample.value(),
},
)]),
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2021 Jeremy Wall // Copyright 2023 Jeremy Wall
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -11,10 +11,10 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use std::sync::Arc; use std::{sync::Arc, collections::HashMap};
use axum::{ use axum::{
extract::{Path, State}, extract::{Path, Query, State},
response::Response, response::Response,
routing::get, routing::get,
Json, Router, Json, Router,
@ -22,9 +22,10 @@ use axum::{
// https://maud.lambda.xyz/getting-started.html // https://maud.lambda.xyz/getting-started.html
use maud::{html, Markup}; use maud::{html, Markup};
use tracing::debug; use tracing::{debug, error};
use chrono::prelude::*;
use crate::dashboard::{Dashboard, Graph}; use crate::dashboard::{Dashboard, Graph, GraphSpan};
use crate::query::{to_samples, QueryResult}; use crate::query::{to_samples, QueryResult};
type Config = State<Arc<Vec<Dashboard>>>; type Config = State<Arc<Vec<Dashboard>>>;
@ -32,17 +33,34 @@ type Config = State<Arc<Vec<Dashboard>>>;
pub async fn graph_query( pub async fn graph_query(
State(config): Config, State(config): Config,
Path((dash_idx, graph_idx)): Path<(usize, usize)>, Path((dash_idx, graph_idx)): Path<(usize, usize)>,
Query(query): Query<HashMap<String, String>>,
) -> Json<QueryResult> { ) -> Json<QueryResult> {
debug!("Getting data for query"); debug!("Getting data for query");
let graph = config let dash = config.get(dash_idx).expect("No such dashboard index");
.get(dash_idx) let graph = dash
.expect("No such dashboard index")
.graphs .graphs
.get(graph_idx) .get(graph_idx)
.expect(&format!("No such graph in dasboard {}", dash_idx)); .expect(&format!("No such graph in dasboard {}", dash_idx));
let query_span = {
if query.contains_key("start") && query.contains_key("duration") && query.contains_key("step_duration")
{
if let Ok(start) = DateTime::parse_from_rfc3339(&query["start"]) {
Some(GraphSpan {
start: start.to_utc(),
duration: query["duration"].clone(),
step_duration: query["step_duration"].clone(),
})
} else {
error!(?query, "Invalid date time in start for query string");
None
}
} else {
None
}
};
let data = to_samples( let data = to_samples(
graph graph
.get_query_connection() .get_query_connection(if query_span.is_some() { &query_span } else { &dash.span })
.get_results() .get_results()
.await .await
.expect("Unable to get query results") .expect("Unable to get query results")
@ -54,6 +72,7 @@ pub async fn graph_query(
pub fn mk_api_routes(config: Arc<Vec<Dashboard>>) -> Router<Config> { pub fn mk_api_routes(config: Arc<Vec<Dashboard>>) -> Router<Config> {
// Query routes // Query routes
// TODO(zaphar): Allow passing the timespan in via query
Router::new().route( Router::new().route(
"/dash/:dash_idx/graph/:graph_idx", "/dash/:dash_idx/graph/:graph_idx",
get(graph_query).with_state(config), get(graph_query).with_state(config),

View File

@ -1,4 +1,4 @@
// Copyright 2021 Jeremy Wall // Copyright 2023 Jeremy Wall
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -101,6 +101,15 @@ class TimeseriesGraph extends HTMLElement {
async updateGraph() { async updateGraph() {
const data = await this.fetchData(); const data = await this.fetchData();
const config = {
legend: {
orientation: 'h'
}
};
const layout = {
displayModeBar: false,
responsive: true
};
if (data.Series) { if (data.Series) {
// https://plotly.com/javascript/reference/scatter/ // https://plotly.com/javascript/reference/scatter/
var traces = []; var traces = [];
@ -118,23 +127,14 @@ class TimeseriesGraph extends HTMLElement {
trace.name = labels[this.#label]; trace.name = labels[this.#label];
}; };
for (const point of series) { for (const point of series) {
trace.x.push(point.timestamp); trace.x.push(new Date(point.timestamp * 1000));
trace.y.push(point.value); trace.y.push(point.value);
} }
traces.push(trace); traces.push(trace);
} }
console.log("Traces: ", traces); console.log("Traces: ", traces);
// https://plotly.com/javascript/plotlyjs-function-reference/#plotlyreact // https://plotly.com/javascript/plotlyjs-function-reference/#plotlyreact
Plotly.react(this.getTargetNode(), traces, Plotly.react(this.getTargetNode(), traces, config, layout);
{
legend: {
orientation: 'h'
}
},
{
displayModeBar: false,
responsive: true
});
} else if (data.Scalar) { } else if (data.Scalar) {
// https://plotly.com/javascript/reference/bar/ // https://plotly.com/javascript/reference/bar/
console.log("scalar data: ", data.Scalar); console.log("scalar data: ", data.Scalar);
@ -155,16 +155,7 @@ class TimeseriesGraph extends HTMLElement {
traces.push(trace); traces.push(trace);
} }
console.log("Traces: ", traces); console.log("Traces: ", traces);
Plotly.react(this.getTargetNode(), traces, Plotly.react(this.getTargetNode(), traces, config, layout);
{
legend: {
orientation: 'h'
}
},
{
displayModeBar: false,
responsive: true
});
} }
} }
} }