Browse Source

Revert "add support of threshold line"

This reverts commit f4a5f20313.
pull/1/head
amper43 5 years ago
parent
commit
4fb96302b1
  1. 8
      analytics/analytics/analytic_types/detector_typing.py
  2. 24
      analytics/analytics/detectors/threshold_detector.py
  3. 99
      server/src/controllers/analytics_controller.ts

8
analytics/analytics/analytic_types/detector_typing.py

@ -50,11 +50,3 @@ class AnomalyProcessingResult():
):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
@utils.meta.JSONClass
class ThresholdProcessingResult():
def __init__(
self,
threshold: Optional[TimeSeries] = None,
):
self.threshold = threshold

24
analytics/analytics/detectors/threshold_detector.py

@ -6,9 +6,9 @@ import numpy as np
from typing import Optional, List
from analytic_types import ModelCache
from analytic_types.detector_typing import DetectionResult, ThresholdProcessingResult
from analytic_types.detector_typing import DetectionResult
from analytic_types.segment import Segment
from detectors import ProcessingDetector
from detectors import Detector
from time import time
import utils
@ -16,7 +16,7 @@ import utils
logger = log.getLogger('THRESHOLD_DETECTOR')
class ThresholdDetector(ProcessingDetector):
class ThresholdDetector(Detector):
WINDOW_SIZE = 3
@ -89,21 +89,3 @@ class ThresholdDetector(ProcessingDetector):
result.cache = detection.cache
result.segments = utils.merge_intersecting_segments(result.segments, time_step)
return result
def process_data(self, dataframe: pd.DataFrame, cache: ModelCache) -> ThresholdProcessingResult:
data = dataframe['value']
value = cache['value']
data.values[:] = value
timestamps = utils.convert_series_to_timestamp_list(dataframe.timestamp)
result_series = list(zip(timestamps, data.values.tolist()))
return ThresholdProcessingResult(result_series)
def concat_processing_results(self, processing_results: List[ThresholdProcessingResult]) -> Optional[ThresholdProcessingResult]:
if len(processing_results) == 0:
return None
united_result = ThresholdProcessingResult([])
for result in processing_results:
united_result.threshold.extend(result.threshold)
return united_result

99
server/src/controllers/analytics_controller.ts

@ -631,78 +631,47 @@ export async function getHSR(
try {
const grafanaUrl = getGrafanaUrl(analyticUnit.grafanaUrl);
const data = await queryByMetric(analyticUnit.metric, grafanaUrl, from, to, HASTIC_API_KEY);
let resultSeries = {
hsr: data
}
if(analyticUnit.detectorType === AnalyticUnit.DetectorType.THRESHOLD) {
let cache = await AnalyticUnitCache.findById(analyticUnit.id);
if(
cache === null ||
cache.data.alpha !== (analyticUnit as AnalyticUnit.ThresholdAnalyticUnit).value ||
cache.data.confidence !== (analyticUnit as AnalyticUnit.ThresholdAnalyticUnit).condition
) {
await runLearning(analyticUnit.id, from, to);
cache = await AnalyticUnitCache.findById(analyticUnit.id);
}
cache = cache.data;
const analyticUnitType = analyticUnit.type;
const detector = analyticUnit.detectorType;
const payload = {
data: data.values,
analyticUnitType,
detector,
cache
};
const processingTask = new AnalyticsTask(analyticUnit.id, AnalyticsTaskType.PROCESS, payload);
const result = await runTask(processingTask);
if(result.status !== AnalyticUnit.AnalyticUnitStatus.SUCCESS) {
throw new Error(`Data processing error: ${result.error}`);
}
if(result.payload.threshold !== undefined) {
resultSeries['threshold'] = { values: result.payload.threshold, columns: data.columns };
}
if(analyticUnit.detectorType !== AnalyticUnit.DetectorType.ANOMALY) {
return { hsr: data };
}
let cache = await AnalyticUnitCache.findById(analyticUnit.id);
if(
cache === null ||
cache.data.alpha !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).alpha ||
cache.data.confidence !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).confidence
) {
await runLearning(analyticUnit.id, from, to);
cache = await AnalyticUnitCache.findById(analyticUnit.id);
}
if(analyticUnit.detectorType === AnalyticUnit.DetectorType.ANOMALY) {
let cache = await AnalyticUnitCache.findById(analyticUnit.id);
if(
cache === null ||
cache.data.alpha !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).alpha ||
cache.data.confidence !== (analyticUnit as AnalyticUnit.AnomalyAnalyticUnit).confidence
) {
await runLearning(analyticUnit.id, from, to);
cache = await AnalyticUnitCache.findById(analyticUnit.id);
}
cache = cache.data;
cache = cache.data;
const analyticUnitType = analyticUnit.type;
const detector = analyticUnit.detectorType;
const payload = {
data: data.values,
analyticUnitType,
detector,
cache
};
const analyticUnitType = analyticUnit.type;
const detector = analyticUnit.detectorType;
const payload = {
data: data.values,
analyticUnitType,
detector,
cache
};
const processingTask = new AnalyticsTask(analyticUnit.id, AnalyticsTaskType.PROCESS, payload);
const result = await runTask(processingTask);
if(result.status !== AnalyticUnit.AnalyticUnitStatus.SUCCESS) {
throw new Error(`Data processing error: ${result.error}`);
}
const processingTask = new AnalyticsTask(analyticUnit.id, AnalyticsTaskType.PROCESS, payload);
const result = await runTask(processingTask);
if(result.status !== AnalyticUnit.AnalyticUnitStatus.SUCCESS) {
throw new Error(`Data processing error: ${result.error}`);
}
let resultSeries = {
hsr: data
}
if(result.payload.lowerBound !== undefined) {
resultSeries['lowerBound'] = { values: result.payload.lowerBound, columns: data.columns };
}
if(result.payload.lowerBound !== undefined) {
resultSeries['lowerBound'] = { values: result.payload.lowerBound, columns: data.columns };
}
if(result.payload.upperBound !== undefined) {
resultSeries['upperBound'] = { values: result.payload.upperBound, columns: data.columns };
}
if(result.payload.upperBound !== undefined) {
resultSeries['upperBound'] = { values: result.payload.upperBound, columns: data.columns };
}
return resultSeries;

Loading…
Cancel
Save