|
|
@ -6,7 +6,7 @@ import math |
|
|
|
from typing import Optional, Union, List, Tuple |
|
|
|
from typing import Optional, Union, List, Tuple |
|
|
|
|
|
|
|
|
|
|
|
from analytic_types import AnalyticUnitId, ModelCache |
|
|
|
from analytic_types import AnalyticUnitId, ModelCache |
|
|
|
from analytic_types.detector_typing import DetectionResult, AnomalyProcessingResult |
|
|
|
from analytic_types.detector_typing import DetectionResult, ProcessingResult |
|
|
|
from analytic_types.data_bucket import DataBucket |
|
|
|
from analytic_types.data_bucket import DataBucket |
|
|
|
from analytic_types.segment import Segment |
|
|
|
from analytic_types.segment import Segment |
|
|
|
from detectors import Detector, ProcessingDetector |
|
|
|
from detectors import Detector, ProcessingDetector |
|
|
@ -173,7 +173,7 @@ class AnomalyDetector(ProcessingDetector): |
|
|
|
return result |
|
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
# TODO: remove duplication with detect() |
|
|
|
# TODO: remove duplication with detect() |
|
|
|
def process_data(self, dataframe: pd.DataFrame, cache: ModelCache) -> AnomalyProcessingResult: |
|
|
|
def process_data(self, dataframe: pd.DataFrame, cache: ModelCache) -> ProcessingResult: |
|
|
|
segments = self.get_value_from_cache(cache, 'segments') |
|
|
|
segments = self.get_value_from_cache(cache, 'segments') |
|
|
|
alpha = self.get_value_from_cache(cache, 'alpha', required = True) |
|
|
|
alpha = self.get_value_from_cache(cache, 'alpha', required = True) |
|
|
|
confidence = self.get_value_from_cache(cache, 'confidence', required = True) |
|
|
|
confidence = self.get_value_from_cache(cache, 'confidence', required = True) |
|
|
@ -212,11 +212,11 @@ class AnomalyDetector(ProcessingDetector): |
|
|
|
upper_bound_timeseries = list(zip(timestamps, upper_bound.values.tolist())) |
|
|
|
upper_bound_timeseries = list(zip(timestamps, upper_bound.values.tolist())) |
|
|
|
|
|
|
|
|
|
|
|
if enable_bounds == Bound.ALL: |
|
|
|
if enable_bounds == Bound.ALL: |
|
|
|
return AnomalyProcessingResult(lower_bound_timeseries, upper_bound_timeseries) |
|
|
|
return ProcessingResult(lower_bound_timeseries, upper_bound_timeseries) |
|
|
|
elif enable_bounds == Bound.UPPER: |
|
|
|
elif enable_bounds == Bound.UPPER: |
|
|
|
return AnomalyProcessingResult(upper_bound = upper_bound_timeseries) |
|
|
|
return ProcessingResult(upper_bound = upper_bound_timeseries) |
|
|
|
elif enable_bounds == Bound.LOWER: |
|
|
|
elif enable_bounds == Bound.LOWER: |
|
|
|
return AnomalyProcessingResult(lower_bound = lower_bound_timeseries) |
|
|
|
return ProcessingResult(lower_bound = lower_bound_timeseries) |
|
|
|
|
|
|
|
|
|
|
|
def add_season_to_data(self, data: pd.Series, segment: pd.Series, offset: int, seasonality: int, bound_type: Bound) -> pd.Series: |
|
|
|
def add_season_to_data(self, data: pd.Series, segment: pd.Series, offset: int, seasonality: int, bound_type: Bound) -> pd.Series: |
|
|
|
#data - smoothed data to which seasonality will be added |
|
|
|
#data - smoothed data to which seasonality will be added |
|
|
@ -239,22 +239,6 @@ class AnomalyDetector(ProcessingDetector): |
|
|
|
|
|
|
|
|
|
|
|
return data[:len_smoothed_data] |
|
|
|
return data[:len_smoothed_data] |
|
|
|
|
|
|
|
|
|
|
|
def concat_processing_results(self, processing_results: List[AnomalyProcessingResult]) -> Optional[AnomalyProcessingResult]: |
|
|
|
|
|
|
|
if len(processing_results) == 0: |
|
|
|
|
|
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
united_result = AnomalyProcessingResult() |
|
|
|
|
|
|
|
for result in processing_results: |
|
|
|
|
|
|
|
if result.lower_bound is not None: |
|
|
|
|
|
|
|
if united_result.lower_bound is None: united_result.lower_bound = [] |
|
|
|
|
|
|
|
united_result.lower_bound.extend(result.lower_bound) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if result.upper_bound is not None: |
|
|
|
|
|
|
|
if united_result.upper_bound is None: united_result.upper_bound = [] |
|
|
|
|
|
|
|
united_result.upper_bound.extend(result.upper_bound) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return united_result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_bounds_for_segment(self, segment: pd.Series) -> Tuple[pd.Series, pd.Series]: |
|
|
|
def get_bounds_for_segment(self, segment: pd.Series) -> Tuple[pd.Series, pd.Series]: |
|
|
|
''' |
|
|
|
''' |
|
|
|
segment is divided by the median to determine its top and bottom parts |
|
|
|
segment is divided by the median to determine its top and bottom parts |
|
|
|