Browse Source

seasonality grid++

seasonality-grid-#28
Alexey Velikiy 3 years ago
parent
commit
842e86deb3
  1. 11
      client/src/components/Graph.vue
  2. 30
      client/src/components/pods/anomaly_pod.ts
  3. 8
      client/src/services/analytics.service.ts
  4. 6
      client/src/types/index.ts
  5. 2
      server/src/services/analytic_service/analytic_service.rs
  6. 79
      server/src/services/analytic_service/analytic_unit/anomaly_analytic_unit.rs
  7. 8
      server/src/services/analytic_service/types.rs

11
client/src/components/Graph.vue

@ -6,7 +6,7 @@
<script lang="ts"> <script lang="ts">
import { TimeRange } from "@/types"; import { AnomalyHSR, TimeRange } from "@/types";
import { PatternPod } from "./pods/pattern_pod"; import { PatternPod } from "./pods/pattern_pod";
import { ThresholdPod } from './pods/threshold_pod'; import { ThresholdPod } from './pods/threshold_pod';
import { AnomalyPod } from './pods/anomaly_pod'; import { AnomalyPod } from './pods/anomaly_pod';
@ -86,7 +86,7 @@ async function resolveDataThreshold(range: TimeRange): Promise<{
// TODO: remove code repetition // TODO: remove code repetition
async function resolveDataAnomaly(range: TimeRange): Promise<{ async function resolveDataAnomaly(range: TimeRange): Promise<{
timeserie: LineTimeSerie[], timeserie: LineTimeSerie[],
bounds: [number, [number, number]][], hsr: AnomalyHSR,
segments: Segment[] segments: Segment[]
}> { }> {
@ -103,13 +103,8 @@ async function resolveDataAnomaly(range: TimeRange): Promise<{
return { return {
timeserie: [ timeserie: [
{ target: target, datapoints: values, color: 'green' }, { target: target, datapoints: values, color: 'green' },
// {
// target: "HSR",
// datapoints: hsr.map(([t, v, [l, u]]) => [t, v]),
// color: 'red'
// }
], ],
bounds: hsr.map(([t, v, [u, l]]) => [t, [u, l]]), hsr,
segments: segments, segments: segments,
} }
} catch (e) { } catch (e) {

30
client/src/components/pods/anomaly_pod.ts

@ -1,5 +1,5 @@
import { HasticPod } from './hastic_pod'; import { HasticPod } from './hastic_pod';
import { TimeRange } from '@/types'; import { AnomalyHSR, TimeRange } from '@/types';
import { Segment } from "@/types/segment"; import { Segment } from "@/types/segment";
import { LineTimeSerie } from '@chartwerk/line-pod'; import { LineTimeSerie } from '@chartwerk/line-pod';
@ -8,7 +8,7 @@ import { SegmentsSet } from '@/types/segment_set';
export type UpdateDataCallback = (range: TimeRange) => Promise<{ export type UpdateDataCallback = (range: TimeRange) => Promise<{
timeserie: LineTimeSerie[], timeserie: LineTimeSerie[],
bounds: [number, [number, number]][], hsr: AnomalyHSR,
segments: Segment[] segments: Segment[]
}>; }>;
@ -17,7 +17,7 @@ import * as _ from 'lodash';
export class AnomalyPod extends HasticPod<UpdateDataCallback> { export class AnomalyPod extends HasticPod<UpdateDataCallback> {
_bounds: [number, [number, number]][]; private _hsr: AnomalyHSR;
constructor( constructor(
el: HTMLElement, el: HTMLElement,
@ -28,8 +28,6 @@ export class AnomalyPod extends HasticPod<UpdateDataCallback> {
this.fetchData(); this.fetchData();
} }
// TODO: implement renderMetrics
public fetchData(): void { public fetchData(): void {
let to = Math.floor(Date.now() / 1000); let to = Math.floor(Date.now() / 1000);
let from = to - 50000; // -50000 seconds let from = to - 50000; // -50000 seconds
@ -41,32 +39,31 @@ export class AnomalyPod extends HasticPod<UpdateDataCallback> {
this.udc({ from, to }) this.udc({ from, to })
.then(resp => { .then(resp => {
this.updateSegments(resp.segments); this.updateSegments(resp.segments);
this.updateBounds(resp.bounds); this.updateHSR(resp.hsr);
this.updateData(resp.timeserie, undefined, true); this.updateData(resp.timeserie, undefined, true);
}) })
.catch(() => { /* set "error" message */ }) .catch(() => { /* set "error" message */ })
} }
renderMetrics() { renderMetrics() {
this.renderBounds() this.renderHSR()
super.renderMetrics(); super.renderMetrics();
} }
updateBounds(bounds: [number, [number, number]][]) { updateHSR(hsr: AnomalyHSR) {
this._bounds = bounds; this._hsr = hsr;
} }
renderBounds() { renderHSR() {
// TODO: check the case when this._bounds == undefined // TODO: check the case when this._bounds == undefined
if(this._bounds == undefined) { if(this._hsr == undefined) {
return; return;
} }
const pointsUp = this._bounds.map(([t, [u, _]]) => [t, u])
const pointsDown = this._bounds.map(([t, [_, l]]) => [t, l]); const pointsUp = this._hsr.ts.map(([t, v, [p, q]]) => [t, q]);
const pointsDown = this._hsr.ts.map(([t, v, [p, q]]) => [t, p]);
const points = pointsUp.reverse().concat(pointsDown) const points = pointsUp.reverse().concat(pointsDown)
.map(([t, v]) => `${this.xScale(t)},${this.yScale(v)}`) .map(([t, v]) => `${this.xScale(t)},${this.yScale(v)}`)
@ -80,6 +77,9 @@ export class AnomalyPod extends HasticPod<UpdateDataCallback> {
.attr('fill-opacity', 0.2) .attr('fill-opacity', 0.2)
.attr('pointer-events', 'none') .attr('pointer-events', 'none')
.attr('points', points); .attr('points', points);
// TODO: render timestamp
// TODO: render seasonality grid
} }
} }

8
client/src/services/analytics.service.ts

@ -10,6 +10,7 @@ import {
AnalyticUnitType, AnlyticUnitConfig, AnalyticUnitType, AnlyticUnitConfig,
PatternConfig, ThresholdConfig, AnomalyConfig PatternConfig, ThresholdConfig, AnomalyConfig
} from "@/types/analytic_units"; } from "@/types/analytic_units";
import { AnomalyHSR } from "@/types";
const ANALYTICS_API_URL = API_URL + "analytics/"; const ANALYTICS_API_URL = API_URL + "analytics/";
@ -57,7 +58,8 @@ export function getStatusGenerator(): AsyncIterableIterator<string> {
return getGenerator<string>(100, getStatus); return getGenerator<string>(100, getStatus);
} }
export async function getHSRAnomaly(from: number, to: number): Promise<[number, number, [number, number]][]> {
export async function getHSRAnomaly(from: number, to: number): Promise<AnomalyHSR> {
if(from >= to) { if(from >= to) {
throw new Error("`from` can`t be less than `to`"); throw new Error("`from` can`t be less than `to`");
} }
@ -65,7 +67,7 @@ export async function getHSRAnomaly(from: number, to: number): Promise<[number,
const uri = ANALYTICS_API_URL + `hsr/?from=${from}&to=${to}`; const uri = ANALYTICS_API_URL + `hsr/?from=${from}&to=${to}`;
const res = await axios.get(uri); const res = await axios.get(uri);
const values = res["data"]["ConfidenceTimeSerie"]; const values = res["data"]["AnomalyHSR"];
return values as [number, number, [number, number]][]; return values as AnomalyHSR;
} }

6
client/src/types/index.ts

@ -1 +1,7 @@
export type TimeRange = { from: number, to: number }; export type TimeRange = { from: number, to: number };
export type AnomalyHSR = {
seasonality: number,
timestamp: number,
ts: [number, number, [number, number]][]
};

2
server/src/services/analytic_service/analytic_service.rs

@ -196,7 +196,7 @@ impl AnalyticService {
// TODO: maybe make `consume_response` async // TODO: maybe make `consume_response` async
fn consume_response(&mut self, res: types::ResponseType) { fn consume_response(&mut self, res: types::ResponseType) {
match res { match res {
// TODO: handle when learning panic // TODO: handle when learning panics
ResponseType::LearningStarted => { ResponseType::LearningStarted => {
self.analytic_unit_learning_status = LearningStatus::Learning self.analytic_unit_learning_status = LearningStatus::Learning
} }

79
server/src/services/analytic_service/analytic_unit/anomaly_analytic_unit.rs

@ -1,6 +1,4 @@
use crate::services::{ use crate::services::{analytic_service::types::{AnomalyHSRConfig, HSR}, metric_service::MetricService, segments_service::SegmentsService};
analytic_service::types::HSR, metric_service::MetricService, segments_service::SegmentsService,
};
use super::types::{AnalyticUnit, AnalyticUnitConfig, AnomalyConfig, LearningResult}; use super::types::{AnalyticUnit, AnalyticUnitConfig, AnomalyConfig, LearningResult};
@ -31,13 +29,15 @@ fn get_value_with_offset(ts: &Vec<(u64, f64)>, offset: u64) -> Option<(u64, f64)
struct SARIMA { struct SARIMA {
pub ts: Vec<(u64, f64)>, pub ts: Vec<(u64, f64)>,
pub seasonality: u64, pub seasonality: u64,
pub confidence: f64,
} }
impl SARIMA { impl SARIMA {
pub fn new(seasonality: u64) -> SARIMA { pub fn new(seasonality: u64, confidence: f64) -> SARIMA {
return SARIMA { return SARIMA {
ts: Vec::new(), ts: Vec::new(),
seasonality, seasonality,
confidence
}; };
} }
@ -59,7 +59,7 @@ impl SARIMA {
if to - from != SEASONALITY_ITERATIONS * self.seasonality { if to - from != SEASONALITY_ITERATIONS * self.seasonality {
return Err(anyhow::format_err!("timeserie to learn from should be {} * sasonality", SEASONALITY_ITERATIONS)); return Err(anyhow::format_err!("timeserie to learn from should be {} * sasonality", SEASONALITY_ITERATIONS));
} }
for k in 0..iter_steps { for k in 0..iter_steps {
let mut vts = Vec::new(); let mut vts = Vec::new();
for si in 0..SEASONALITY_ITERATIONS { for si in 0..SEASONALITY_ITERATIONS {
@ -75,9 +75,24 @@ impl SARIMA {
return Ok(()); return Ok(());
} }
pub fn predict(&self, timestamp: u64, value: f64) -> (f64, f64, f64) { pub fn predict(&self, mut timestamp: u64) -> (f64, (f64, f64)) {
// TODO: basic implement based on existing ts let from = self.ts[0].0;
return (0.0, 0.0, 0.0);
if timestamp < from {
let len = from - timestamp;
timestamp += self.seasonality * (len / self.seasonality);
if len % self.seasonality != 0 {
timestamp += self.seasonality;
}
}
let len_from = timestamp - from;
// TODO: take avg if timestamp in between
let index_diff = (len_from / DETECTION_STEP) % self.ts.len() as u64;
let p = self.ts[index_diff as usize].1;
return (p, (p + self.confidence, p - self.confidence));
} }
pub fn push_point() { pub fn push_point() {
@ -101,39 +116,41 @@ impl AnomalyAnalyticUnit {
} }
fn get_hsr_from_metric_result(&self, mr: &MetricResult) -> anyhow::Result<HSR> { fn get_hsr_from_metric_result(&self, mr: &MetricResult) -> anyhow::Result<HSR> {
if self.sarima.is_none() {
return Err(anyhow::format_err!("model is not ready"));
}
// TODO: get it from model // TODO: get it from model
if mr.data.keys().len() == 0 { if mr.data.keys().len() == 0 {
return Ok(HSR::ConfidenceTimeSerie(Vec::new())); return Ok(HSR::AnomalyHSR(AnomalyHSRConfig {
seasonality: self.config.seasonality,
timestamp: self.sarima.as_ref().unwrap().ts.last().unwrap().0,
ts: Vec::new()
}));
} }
let k = mr.data.keys().nth(0).unwrap(); let k = mr.data.keys().nth(0).unwrap();
let ts = mr.data[k].clone(); let ts = mr.data[k].clone();
if ts.len() == 0 { if ts.len() == 0 {
return Ok(HSR::ConfidenceTimeSerie(Vec::new())); return Ok(HSR::AnomalyHSR(AnomalyHSRConfig {
seasonality: self.config.seasonality,
timestamp: self.sarima.as_ref().unwrap().ts.last().unwrap().0,
ts: Vec::new()
}));
} }
let mut sts = Vec::new(); let mut sts = Vec::new();
sts.push(( let sarima = self.sarima.as_ref().unwrap();
ts[0].0, for vt in ts {
ts[0].1, let x = sarima.predict(vt.0);
(( sts.push((vt.0, x.0, (x.1.0, x.1.1)));
ts[0].1 + self.config.confidence,
ts[0].1 - self.config.confidence,
)),
));
for t in 1..ts.len() {
let alpha = self.config.alpha;
let stv = alpha * ts[t].1 + (1.0 - alpha) * sts[t - 1].1;
sts.push((
ts[t].0,
stv,
(stv + self.config.confidence, stv - self.config.confidence),
));
} }
Ok(HSR::ConfidenceTimeSerie(sts)) return Ok(HSR::AnomalyHSR(AnomalyHSRConfig {
seasonality: self.config.seasonality,
timestamp: self.sarima.as_ref().unwrap().ts.last().unwrap().0,
ts: sts
}));
} }
} }
@ -147,7 +164,7 @@ impl AnalyticUnit for AnomalyAnalyticUnit {
} }
} }
async fn learn(&mut self, ms: MetricService, _ss: SegmentsService) -> LearningResult { async fn learn(&mut self, ms: MetricService, _ss: SegmentsService) -> LearningResult {
let mut sarima = SARIMA::new(self.config.seasonality); let mut sarima = SARIMA::new(self.config.seasonality, self.config.confidence);
let utc: DateTime<Utc> = Utc::now(); let utc: DateTime<Utc> = Utc::now();
let to = utc.timestamp() as u64; let to = utc.timestamp() as u64;
@ -199,10 +216,10 @@ impl AnalyticUnit for AnomalyAnalyticUnit {
let confidence_time_serie = self.get_hsr_from_metric_result(&mr)?; let confidence_time_serie = self.get_hsr_from_metric_result(&mr)?;
if let HSR::ConfidenceTimeSerie(hsr) = confidence_time_serie { if let HSR::AnomalyHSR(hsr) = confidence_time_serie {
let mut from = None; let mut from = None;
for ((t, _, (u, l)), (t1, rv)) in hsr.iter().zip(ts.iter()) { for ((t, _, (u, l)), (t1, rv)) in hsr.ts.iter().zip(ts.iter()) {
if *t != *t1 { if *t != *t1 {
return Err(anyhow::format_err!("incompatible hsr/ts")); return Err(anyhow::format_err!("incompatible hsr/ts"));
} }

8
server/src/services/analytic_service/types.rs

@ -63,12 +63,18 @@ pub struct DetectionTask {
pub to: u64, pub to: u64,
} }
#[derive(Debug, Serialize)]
pub struct AnomalyHSRConfig {
pub timestamp: u64,
pub seasonality: u64,
pub ts: Vec<(u64, f64, (f64, f64))>
}
// HSR Stands for Hastic Signal Representation, // HSR Stands for Hastic Signal Representation,
// varies for different analytic units // varies for different analytic units
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub enum HSR { pub enum HSR {
TimeSerie(Vec<(u64, f64)>), TimeSerie(Vec<(u64, f64)>),
ConfidenceTimeSerie(Vec<(u64, f64, (f64, f64))>), AnomalyHSR(AnomalyHSRConfig),
} }
#[derive(Debug)] #[derive(Debug)]

Loading…
Cancel
Save