Browse Source

add support of threshold line

pull/1/head
amper43 5 years ago
parent
commit
f4a5f20313
  1. 8
      analytics/analytics/analytic_types/detector_typing.py
  2. 24
      analytics/analytics/detectors/threshold_detector.py
  3. 99
      server/src/controllers/analytics_controller.ts

8
analytics/analytics/analytic_types/detector_typing.py

@ -50,3 +50,11 @@ class AnomalyProcessingResult():
): ):
self.lower_bound = lower_bound self.lower_bound = lower_bound
self.upper_bound = upper_bound self.upper_bound = upper_bound
@utils.meta.JSONClass
class ThresholdProcessingResult():
def __init__(
self,
threshold: Optional[TimeSeries] = None,
):
self.threshold = threshold

24
analytics/analytics/detectors/threshold_detector.py

@ -6,9 +6,9 @@ import numpy as np
from typing import Optional, List from typing import Optional, List
from analytic_types import ModelCache from analytic_types import ModelCache
from analytic_types.detector_typing import DetectionResult from analytic_types.detector_typing import DetectionResult, ThresholdProcessingResult
from analytic_types.segment import Segment from analytic_types.segment import Segment
from detectors import Detector from detectors import ProcessingDetector
from time import time from time import time
import utils import utils
@ -16,7 +16,7 @@ import utils
logger = log.getLogger('THRESHOLD_DETECTOR') logger = log.getLogger('THRESHOLD_DETECTOR')
class ThresholdDetector(Detector): class ThresholdDetector(ProcessingDetector):
WINDOW_SIZE = 3 WINDOW_SIZE = 3
@ -89,3 +89,21 @@ class ThresholdDetector(Detector):
result.cache = detection.cache result.cache = detection.cache
result.segments = utils.merge_intersecting_segments(result.segments, time_step) result.segments = utils.merge_intersecting_segments(result.segments, time_step)
return result return result
def process_data(self, dataframe: pd.DataFrame, cache: ModelCache) -> ThresholdProcessingResult:
data = dataframe['value']
value = cache['value']
data.values[:] = value
timestamps = utils.convert_series_to_timestamp_list(dataframe.timestamp)
result_series = list(zip(timestamps, data.values.tolist()))
return ThresholdProcessingResult(result_series)
def concat_processing_results(self, processing_results: List[ThresholdProcessingResult]) -> Optional[ThresholdProcessingResult]:
if len(processing_results) == 0:
return None
united_result = ThresholdProcessingResult([])
for result in processing_results:
united_result.threshold.extend(result.threshold)
return united_result

99
server/src/controllers/analytics_controller.ts

@ -631,47 +631,78 @@ export async function getHSR(
try { try {
const grafanaUrl = getGrafanaUrl(analyticUnit.grafanaUrl); const grafanaUrl = getGrafanaUrl(analyticUnit.grafanaUrl);
const data = await queryByMetric(analyticUnit.metric, grafanaUrl, from, to, HASTIC_API_KEY); const data = await queryByMetric(analyticUnit.metric, grafanaUrl, from, to, HASTIC_API_KEY);
let resultSeries = {
if(analyticUnit.detectorType !== AnalyticUnit.DetectorType.ANOMALY) { hsr: data
return { hsr: data };
}
let cache = await AnalyticUnitCache.findById(analyticUnit.id);
if(
cache === null ||
cache.data.alpha !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).alpha ||
cache.data.confidence !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).confidence
) {
await runLearning(analyticUnit.id, from, to);
cache = await AnalyticUnitCache.findById(analyticUnit.id);
} }
cache = cache.data; if(analyticUnit.detectorType === AnalyticUnit.DetectorType.THRESHOLD) {
let cache = await AnalyticUnitCache.findById(analyticUnit.id);
if(
cache === null ||
cache.data.alpha !== (analyticUnit as AnalyticUnit.ThresholdAnalyticUnit).value ||
cache.data.confidence !== (analyticUnit as AnalyticUnit.ThresholdAnalyticUnit).condition
) {
await runLearning(analyticUnit.id, from, to);
cache = await AnalyticUnitCache.findById(analyticUnit.id);
}
const analyticUnitType = analyticUnit.type; cache = cache.data;
const detector = analyticUnit.detectorType;
const payload = {
data: data.values,
analyticUnitType,
detector,
cache
};
const processingTask = new AnalyticsTask(analyticUnit.id, AnalyticsTaskType.PROCESS, payload);
const result = await runTask(processingTask);
if(result.status !== AnalyticUnit.AnalyticUnitStatus.SUCCESS) {
throw new Error(`Data processing error: ${result.error}`);
}
let resultSeries = { const analyticUnitType = analyticUnit.type;
hsr: data const detector = analyticUnit.detectorType;
} const payload = {
data: data.values,
analyticUnitType,
detector,
cache
};
if(result.payload.lowerBound !== undefined) { const processingTask = new AnalyticsTask(analyticUnit.id, AnalyticsTaskType.PROCESS, payload);
resultSeries['lowerBound'] = { values: result.payload.lowerBound, columns: data.columns }; const result = await runTask(processingTask);
if(result.status !== AnalyticUnit.AnalyticUnitStatus.SUCCESS) {
throw new Error(`Data processing error: ${result.error}`);
}
if(result.payload.threshold !== undefined) {
resultSeries['threshold'] = { values: result.payload.threshold, columns: data.columns };
}
} }
if(result.payload.upperBound !== undefined) { if(analyticUnit.detectorType === AnalyticUnit.DetectorType.ANOMALY) {
resultSeries['upperBound'] = { values: result.payload.upperBound, columns: data.columns }; let cache = await AnalyticUnitCache.findById(analyticUnit.id);
if(
cache === null ||
cache.data.alpha !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).alpha ||
cache.data.confidence !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).confidence
) {
await runLearning(analyticUnit.id, from, to);
cache = await AnalyticUnitCache.findById(analyticUnit.id);
}
cache = cache.data;
const analyticUnitType = analyticUnit.type;
const detector = analyticUnit.detectorType;
const payload = {
data: data.values,
analyticUnitType,
detector,
cache
};
const processingTask = new AnalyticsTask(analyticUnit.id, AnalyticsTaskType.PROCESS, payload);
const result = await runTask(processingTask);
if(result.status !== AnalyticUnit.AnalyticUnitStatus.SUCCESS) {
throw new Error(`Data processing error: ${result.error}`);
}
if(result.payload.lowerBound !== undefined) {
resultSeries['lowerBound'] = { values: result.payload.lowerBound, columns: data.columns };
}
if(result.payload.upperBound !== undefined) {
resultSeries['upperBound'] = { values: result.payload.upperBound, columns: data.columns };
}
} }
return resultSeries; return resultSeries;

Loading…
Cancel
Save