Browse Source

Segment class #636 (#637)

pull/1/head
Alexandr Velikiy 5 years ago committed by rozetko
parent
commit
6b398d6ee9
  1. 21
      analytics/analytics/analytic_types/detector_typing.py
  2. 18
      analytics/analytics/analytic_types/segment.py
  3. 6
      analytics/analytics/analytic_unit_worker.py
  4. 10
      analytics/analytics/detectors/anomaly_detector.py
  5. 6
      analytics/analytics/detectors/detector.py
  6. 6
      analytics/analytics/detectors/pattern_detector.py
  7. 13
      analytics/analytics/detectors/threshold_detector.py
  8. 2
      analytics/analytics/models/__init__.py
  9. 11
      analytics/analytics/models/custom_model.py
  10. 10
      analytics/analytics/models/drop_model.py
  11. 10
      analytics/analytics/models/general_model.py
  12. 10
      analytics/analytics/models/jump_model.py
  13. 68
      analytics/analytics/models/model.py
  14. 10
      analytics/analytics/models/peak_model.py
  15. 10
      analytics/analytics/models/trough_model.py
  16. 1
      analytics/analytics/utils/__init__.py
  17. 31
      analytics/analytics/utils/common.py
  18. 9
      analytics/analytics/utils/segments.py
  19. 4
      analytics/tests/test_detectors.py
  20. 117
      analytics/tests/test_utils.py
  21. 4
      server/src/services/alert_service.ts

21
analytics/analytics/analytic_types/detector_typing.py

@ -1,16 +1,29 @@
import utils.meta
from analytic_types import ModelCache
from analytic_types.segment import Segment
from typing import List, Optional
@utils.meta.JSONClass
class DetectionResult:
def __init__(
self,
cache: ModelCache = ModelCache(),
segments: list = [],
cache: Optional[ModelCache] = None,
segments: Optional[List[Segment]] = None,
last_detection_time: int = None
):
if cache is None:
cache = {}
if segments is None:
segments = []
self.cache = cache
self.segments = segments
self.last_detection_time = last_detection_time
# TODO: use @utils.meta.JSONClass (now it can't serialize list of objects)
def to_json(self):
return {
'cache': self.cache,
'segments': list(map(lambda segment: segment.to_json(), self.segments)),
'lastDetectionTime': self.last_detection_time
}

18
analytics/analytics/analytic_types/segment.py

@ -0,0 +1,18 @@
from typing import Optional
class Segment:
'''
Used for segment manipulation instead of { 'from': ..., 'to': ... } dict
'''
def __init__(self, from_timestamp: int, to_timestamp: int):
if to_timestamp < from_timestamp:
raise ValueError(f'Can`t create segment with to < from: {to_timestamp} < {from_timestamp}')
self.from_timestamp = from_timestamp
self.to_timestamp = to_timestamp
def to_json(self):
return {
'from': self.from_timestamp,
'to': self.to_timestamp
}

6
analytics/analytics/analytic_unit_worker.py

@ -59,7 +59,7 @@ class AnalyticUnitWorker:
chunk_dataframe = prepare_data(chunk)
detected = self._detector.detect(chunk_dataframe, cache)
self.__append_detection_result(detection_result, detected)
detection_result.segments = self._detector.get_intersections(detection_result.segments)
detection_result.segments = self._detector.merge_segments(detection_result.segments)
return detection_result.to_json()
def cancel(self):
@ -77,7 +77,7 @@ class AnalyticUnitWorker:
detected = self._detector.consume_data(chunk_dataframe, cache)
self.__append_detection_result(detection_result, detected)
detection_result.segments = self._detector.get_intersections(detection_result.segments)
detection_result.segments = self._detector.merge_segments(detection_result.segments)
if detection_result.last_detection_time is None:
return None
@ -85,7 +85,7 @@ class AnalyticUnitWorker:
return detection_result.to_json()
# TODO: move result concatenation to Detectors
def __append_detection_result(self, detection_result: DetectionResult, new_chunk: dict):
def __append_detection_result(self, detection_result: DetectionResult, new_chunk: DetectionResult):
if new_chunk is not None:
detection_result.cache = new_chunk.cache
detection_result.last_detection_time = new_chunk.last_detection_time

10
analytics/analytics/detectors/anomaly_detector.py

@ -5,6 +5,7 @@ from typing import Optional, Union, List, Tuple
from analytic_types import AnalyticUnitId, ModelCache
from analytic_types.detector_typing import DetectionResult
from analytic_types.data_bucket import DataBucket
from analytic_types.segment import Segment
from detectors import Detector
import utils
@ -40,9 +41,10 @@ class AnomalyDetector(Detector):
for idx, val in enumerate(data.values):
if val > upper_bound.values[idx] or val < lower_bound.values[idx]:
anomaly_indexes.append(data.index[idx])
# TODO: use Segment in utils
segments = utils.close_filtering(anomaly_indexes, 1)
segments = utils.get_start_and_end_of_segments(segments)
segments = [(
segments = [Segment(
utils.convert_pd_timestamp_to_ms(dataframe['timestamp'][segment[0]]),
utils.convert_pd_timestamp_to_ms(dataframe['timestamp'][segment[1]]),
) for segment in segments]
@ -67,8 +69,6 @@ class AnomalyDetector(Detector):
break
return level
def get_intersections(self, segments: List[dict]) -> List[dict]:
segments = [[segment['from'], segment['to']] for segment in segments]
segments = utils.merge_intersecting_intervals(segments)
segments = [{'from': segment[0], 'to': segment[1]} for segment in segments]
def merge_segments(self, segments: List[Segment]) -> List[Segment]:
segments = utils.merge_intersecting_segments(segments)
return segments

6
analytics/analytics/detectors/detector.py

@ -4,6 +4,7 @@ from typing import Optional, Union, List
from analytic_types import ModelCache
from analytic_types.detector_typing import DetectionResult
from analytic_types.segment import Segment
class Detector(ABC):
@ -27,6 +28,5 @@ class Detector(ABC):
def get_window_size(self, cache: Optional[ModelCache]) -> int:
pass
@abstractmethod
def get_intersections(self, segments: List[dict]) -> List[dict]:
pass
def merge_segments(self, segments: List[Segment]) -> List[Segment]:
return segments

6
analytics/analytics/detectors/pattern_detector.py

@ -12,6 +12,7 @@ from analytic_types.data_bucket import DataBucket
from utils import convert_pd_timestamp_to_ms
from analytic_types import AnalyticUnitId, ModelCache
from analytic_types.detector_typing import DetectionResult
from analytic_types.segment import Segment
logger = logging.getLogger('PATTERN_DETECTOR')
@ -78,7 +79,7 @@ class PatternDetector(Detector):
detected = self.model.detect(dataframe, self.analytic_unit_id)
segments = [{ 'from': segment[0], 'to': segment[1] } for segment in detected['segments']]
segments = [Segment(segment[0], segment[1]) for segment in detected['segments']]
new_cache = detected['cache'].to_json()
last_dataframe_time = dataframe.iloc[-1]['timestamp']
last_detection_time = convert_pd_timestamp_to_ms(last_dataframe_time)
@ -123,7 +124,6 @@ class PatternDetector(Detector):
def get_window_size(self, cache: Optional[ModelCache]) -> int:
if cache is None: return self.DEFAULT_WINDOW_SIZE
# TODO: windowSize -> window_size
return cache.get('windowSize', self.DEFAULT_WINDOW_SIZE)
def get_intersections(self, segments: List[dict]) -> List[dict]:
return segments

13
analytics/analytics/detectors/threshold_detector.py

@ -6,6 +6,7 @@ from typing import Optional, List
from analytic_types import ModelCache
from analytic_types.detector_typing import DetectionResult
from analytic_types.segment import Segment
from detectors import Detector
from time import time
from utils import convert_sec_to_ms, convert_pd_timestamp_to_ms
@ -40,17 +41,16 @@ class ThresholdDetector(Detector):
segments = []
for index, row in dataframe.iterrows():
current_value = row['value']
current_timestamp = convert_pd_timestamp_to_ms(row['timestamp'])
segment = { 'from': current_timestamp, 'to': current_timestamp }
segment = Segment(current_timestamp, current_timestamp)
# TODO: merge segments
if pd.isnull(row['value']):
if pd.isnull(current_value):
if condition == 'NO_DATA':
segment['params'] = { value: None }
segments.append(segment)
continue
current_value = row['value']
segment['params'] = { value: row['value'] }
if condition == '>':
if current_value > value:
segments.append(segment)
@ -78,6 +78,3 @@ class ThresholdDetector(Detector):
def get_window_size(self, cache: Optional[ModelCache]) -> int:
return self.WINDOW_SIZE
def get_intersections(self, segments: List[dict]) -> List[dict]:
return segments

2
analytics/analytics/models/__init__.py

@ -1,4 +1,4 @@
from models.model import Model, ModelState
from models.model import Model, ModelState, AnalyticSegment
from models.drop_model import DropModel, DropModelState
from models.peak_model import PeakModel, PeakModelState
from models.jump_model import JumpModel, JumpModelState

11
analytics/analytics/models/custom_model.py

@ -1,10 +1,17 @@
from models import Model
from models import Model, AnalyticSegment
import utils
import pandas as pd
from typing import List
class CustomModel(Model):
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list) -> None:
def do_fit(
self,
dataframe: pd.DataFrame,
labeled_segments: List[AnalyticSegment],
deleted_segments: List[AnalyticSegment],
learning_info: dict
) -> None:
pass
def do_detect(self, dataframe: pd.DataFrame) -> list:

10
analytics/analytics/models/drop_model.py

@ -1,4 +1,4 @@
from models import Model, ModelState
from models import Model, ModelState, AnalyticSegment
import scipy.signal
from scipy.fftpack import fft
@ -43,7 +43,13 @@ class DropModel(Model):
def get_state(self, cache: Optional[dict] = None) -> DropModelState:
return DropModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: List[dict], deleted_segments: List[dict], learning_info: dict) -> None:
def do_fit(
self,
dataframe: pd.DataFrame,
labeled_segments: List[AnalyticSegment],
deleted_segments: List[AnalyticSegment],
learning_info: dict
) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state.window_size

10
analytics/analytics/models/general_model.py

@ -1,5 +1,5 @@
from analytic_types import AnalyticUnitId
from models import Model, ModelState
from models import Model, ModelState, AnalyticSegment
from typing import Union, List, Generator
import utils
import utils.meta
@ -43,7 +43,13 @@ class GeneralModel(Model):
def get_state(self, cache: Optional[dict] = None) -> GeneralModelState:
return GeneralModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: List[dict], deleted_segments: List[dict], learning_info: dict) -> None:
def do_fit(
self,
dataframe: pd.DataFrame,
labeled_segments: List[AnalyticSegment],
deleted_segments: List[AnalyticSegment],
learning_info: dict
) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
last_pattern_center = self.state.pattern_center

10
analytics/analytics/models/jump_model.py

@ -1,4 +1,4 @@
from models import Model, ModelState
from models import Model, ModelState, AnalyticSegment
import utils
import utils.meta
@ -44,7 +44,13 @@ class JumpModel(Model):
def get_state(self, cache: Optional[dict] = None) -> JumpModelState:
return JumpModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: List[dict], deleted_segments: List[dict], learning_info: dict) -> None:
def do_fit(
self,
dataframe: pd.DataFrame,
labeled_segments: List[AnalyticSegment],
deleted_segments: List[AnalyticSegment],
learning_info: dict
) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state.window_size

68
analytics/analytics/models/model.py

@ -6,30 +6,50 @@ from typing import Optional, List, Tuple
import pandas as pd
import math
import logging
from analytic_types import AnalyticUnitId
from analytic_types import AnalyticUnitId, ModelCache
from analytic_types.segment import Segment
import utils.meta
class Segment(AttrDict):
class AnalyticSegment(Segment):
'''
Segment with specific analytics fields used by models:
- `labeled` / `deleted` flags
- `from` / `to` / `center` indices
- `length`
- `data`
- etc
'''
def __init__(self, dataframe: pd.DataFrame, segment_map: dict, center_finder = None):
self.update(segment_map)
self.start = utils.timestamp_to_index(dataframe, pd.to_datetime(self['from'], unit='ms'))
self.end = utils.timestamp_to_index(dataframe, pd.to_datetime(self['to'], unit='ms'))
self.length = abs(self.end - self.start)
def __init__(
self,
from_timestamp: int,
to_timestamp: int,
labeled: bool,
deleted: bool,
dataframe: pd.DataFrame,
center_finder = None
):
super().__init__(from_timestamp, to_timestamp)
self.labeled = labeled
self.deleted = deleted
self.from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(self.from_timestamp, unit='ms'))
self.to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(self.to_timestamp, unit='ms'))
self.length = abs(self.to_index - self.from_index)
self.__percent_of_nans = 0
if callable(center_finder):
self.center_index = center_finder(dataframe, self.start, self.end)
self.center_index = center_finder(dataframe, self.from_index, self.to_index)
self.pattern_timestamp = dataframe['timestamp'][self.center_index]
else:
self.center_index = self.start + math.ceil(self.length / 2)
self.center_index = self.from_index + math.ceil(self.length / 2)
self.pattern_timestamp = dataframe['timestamp'][self.center_index]
assert len(dataframe['value']) >= self.end + 1, \
'segment {}-{} out of dataframe length={}'.format(self.start, self.end+1, len(dataframe['value']))
assert len(dataframe['value']) >= self.to_index + 1, \
'segment {}-{} out of dataframe length={}'.format(self.from_index, self.to_index + 1, len(dataframe['value']))
self.data = dataframe['value'][self.start: self.end + 1]
self.data = dataframe['value'][self.from_index: self.to_index + 1]
@property
def percent_of_nans(self):
@ -71,7 +91,13 @@ class Model(ABC):
DEL_CONV_ERROR = 0.02
@abstractmethod
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: List[dict], deleted_segments: List[dict], learning_info: dict) -> None:
def do_fit(
self,
dataframe: pd.DataFrame,
labeled_segments: List[AnalyticSegment],
deleted_segments: List[AnalyticSegment],
learning_info: dict
) -> None:
pass
@abstractmethod
@ -87,7 +113,7 @@ class Model(ABC):
pass
@abstractmethod
def get_state(self, cache: Optional[dict] = None) -> ModelState:
def get_state(self, cache: Optional[ModelCache] = None) -> ModelState:
pass
def fit(self, dataframe: pd.DataFrame, segments: List[dict], id: AnalyticUnitId) -> ModelState:
@ -98,9 +124,16 @@ class Model(ABC):
deleted = []
for segment_map in segments:
if segment_map['labeled'] or segment_map['deleted']:
segment = Segment(dataframe, segment_map, self.find_segment_center)
segment = AnalyticSegment(
segment_map['from'],
segment_map['to'],
segment_map['labeled'],
segment_map['deleted'],
dataframe,
self.find_segment_center
)
if segment.percent_of_nans > 0.1 or len(segment.data) == 0:
logging.debug(f'segment {segment.start}-{segment.end} skip because of invalid data')
logging.debug(f'segment {segment.from_index}-{segment.to_index} skip because of invalid data')
continue
if segment.percent_of_nans > 0:
segment.convert_nan_to_zero()
@ -113,6 +146,7 @@ class Model(ABC):
if self.state.window_size == 0:
self.state.window_size = math.ceil(max_length / 2) if max_length else 0
model, model_type = self.get_model_type()
# TODO: learning_info: dict -> class
learning_info = self.get_parameters_from_segments(dataframe, labeled, deleted, model, model_type)
self.do_fit(dataframe, labeled, deleted, learning_info)
logging.debug('fit complete successful with self.state: {} for analytic unit: {}'.format(self.state, id))
@ -169,7 +203,7 @@ class Model(ABC):
learning_info['pattern_height'].append(utils.find_confidence(aligned_segment)[1])
learning_info['patterns_value'].append(aligned_segment.values.max())
if model == 'jump' or model == 'drop':
pattern_height, pattern_length = utils.find_parameters(segment.data, segment.start, model)
pattern_height, pattern_length = utils.find_parameters(segment.data, segment.from_index, model)
learning_info['pattern_height'].append(pattern_height)
learning_info['pattern_width'].append(pattern_length)
learning_info['patterns_value'].append(aligned_segment.values[self.state.window_size])

10
analytics/analytics/models/peak_model.py

@ -1,4 +1,4 @@
from models import Model, ModelState
from models import Model, ModelState, AnalyticSegment
import scipy.signal
from scipy.fftpack import fft
@ -45,7 +45,13 @@ class PeakModel(Model):
def get_state(self, cache: Optional[dict] = None) -> PeakModelState:
return PeakModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: List[dict], deleted_segments: List[dict], learning_info: dict) -> None:
def do_fit(
self,
dataframe: pd.DataFrame,
labeled_segments: List[AnalyticSegment],
deleted_segments: List[AnalyticSegment],
learning_info: dict
) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state.window_size

10
analytics/analytics/models/trough_model.py

@ -1,4 +1,4 @@
from models import Model, ModelState
from models import Model, ModelState, AnalyticSegment
import scipy.signal
from scipy.fftpack import fft
@ -45,7 +45,13 @@ class TroughModel(Model):
def get_state(self, cache: Optional[dict] = None) -> TroughModelState:
return TroughModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: List[dict], deleted_segments: List[dict], learning_info: dict) -> None:
def do_fit(
self,
dataframe: pd.DataFrame,
labeled_segments: List[AnalyticSegment],
deleted_segments: List[AnalyticSegment],
learning_info: dict
) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state.window_size

1
analytics/analytics/utils/__init__.py

@ -1,4 +1,3 @@
from utils.common import *
from utils.segments import *
from utils.time import *
from utils.dataframe import *

31
analytics/analytics/utils/common.py

@ -11,6 +11,7 @@ import utils
import logging
from itertools import islice
from collections import deque
from analytic_types.segment import Segment
SHIFT_FACTOR = 0.05
CONFIDENCE_FACTOR = 0.5
@ -127,24 +128,22 @@ def close_filtering(pattern_list: List[int], win_size: int) -> List[Tuple[int, i
s.append([pattern_list[i]])
return s
def merge_intersecting_intervals(intervals: List[Tuple[int, int]]) -> List[Tuple[int, int]]:
def merge_intersecting_segments(segments: List[Segment]) -> List[Segment]:
'''
At the entrance - list of intervals with start and end.
Find intersecting intervals in this list and merge it.
Find intersecting segments in segments list and merge it.
'''
if len(intervals) < 2:
return intervals
intervals = sorted(intervals)
last_couple = intervals[0]
for i in range(1,len(intervals)):
if intervals[i][0] <= last_couple[1]:
intervals[i][0] = min(last_couple[0], intervals[i][0])
intervals[i][1] = max(last_couple[1], intervals[i][1])
intervals[i-1] = []
last_couple = intervals[i]
intervals = [x for x in intervals if x != []]
return intervals
if len(segments) < 2:
return segments
segments = sorted(segments, key = lambda segment: segment.from_timestamp)
previous_segment = segments[0]
for i in range(1, len(segments)):
if segments[i].from_timestamp <= previous_segment.to_timestamp:
segments[i].from_timestamp = min(previous_segment.from_timestamp, segments[i].from_timestamp)
segments[i].to_timestamp = max(previous_segment.to_timestamp, segments[i].to_timestamp)
segments[i - 1] = None
previous_segment = segments[i]
segments = [x for x in segments if x is not None]
return segments
def get_start_and_end_of_segments(segments: List[List[int]]) -> List[Tuple[int, int]]:
'''

9
analytics/analytics/utils/segments.py

@ -1,9 +0,0 @@
import pandas as pd
from utils.common import timestamp_to_index
def parse_segment(segment: dict, dataframe: pd.DataFrame):
start = timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms'))
end = timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms'))
data = dataframe['value'][start: end + 1]
return {'from': start, 'to': end, 'data': data}

4
analytics/tests/test_detectors.py

@ -44,5 +44,5 @@ class TestAnomalyDetector(unittest.TestCase):
}
detector = anomaly_detector.AnomalyDetector()
detect_result = detector.detect(dataframe, cache)
result = [(1523889000005.0, 1523889000005.0)]
self.assertEqual(result, detect_result.segments)
result = [{ 'from': 1523889000005.0, 'to': 1523889000005.0 }]
self.assertEqual(result, detect_result.to_json()['segments'])

117
analytics/tests/test_utils.py

@ -1,3 +1,5 @@
from analytic_types.segment import Segment
import utils
import unittest
import numpy as np
@ -291,9 +293,8 @@ class TestUtils(unittest.TestCase):
segments = [[1, 2, 3, 4], [5, 6, 7], [8], [], [12, 12]]
result = [[1, 4], [5, 7], [8, 8], [12, 12]]
utils_result = utils.get_start_and_end_of_segments(segments)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
for got, expected in zip(utils_result, result):
self.assertEqual(got, expected)
def test_get_start_and_end_of_segments_empty(self):
segments = []
@ -301,73 +302,51 @@ class TestUtils(unittest.TestCase):
utils_result = utils.get_start_and_end_of_segments(segments)
self.assertEqual(result, utils_result)
def test_merge_intersecting_intervals(self):
index = [[10, 20], [30, 40]]
result = [[10, 20], [30, 40]]
utils_result = utils.merge_intersecting_intervals(index)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
def test_merge_intersecting_intervals_1(self):
index = [[10, 20], [13, 23], [15, 17], [20, 40]]
result = [[10, 40]]
utils_result = utils.merge_intersecting_intervals(index)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
def test_merge_intersecting_intervals_empty(self):
index = []
result = []
utils_result = utils.merge_intersecting_intervals(index)
self.assertEqual(result, utils_result)
def test_merge_intersecting_intervals_one(self):
index = [[10, 20]]
result = [[10, 20]]
utils_result = utils.merge_intersecting_intervals(index)
self.assertEqual(result, utils_result)
def test_merge_intersecting_intervals_2(self):
index = [[10, 20], [13, 23], [25, 30], [35, 40]]
result = [[10, 23], [25, 30], [35, 40]]
utils_result = utils.merge_intersecting_intervals(index)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
def test_merge_intersecting_intervals_3(self):
index = [[10, 50], [5, 40], [15, 25], [6, 50]]
result = [[5, 50]]
utils_result = utils.merge_intersecting_intervals(index)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
def test_merge_intersecting_intervals_4(self):
index = [[5, 10], [10, 20], [25, 50]]
result = [[5, 20], [25, 50]]
utils_result = utils.merge_intersecting_intervals(index)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
def test_merge_intersecting_intervals_5(self):
index = [[20, 40], [10, 15], [50, 60]]
result = [[10, 15], [20, 40], [50, 60]]
utils_result = utils.merge_intersecting_intervals(index)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
def test_merge_intersecting_segments(self):
test_cases = [
{
'index': [Segment(10, 20), Segment(30, 40)],
'result': [[10, 20], [30, 40]]
},
{
'index': [Segment(10, 20), Segment(13, 23), Segment(15, 17), Segment(20, 40)],
'result': [[10, 40]]
},
{
'index': [],
'result': []
},
{
'index': [Segment(10, 20)],
'result': [[10, 20]]
},
{
'index': [Segment(10, 20), Segment(13, 23), Segment(25, 30), Segment(35, 40)],
'result': [[10, 23], [25, 30], [35, 40]]
},
{
'index': [Segment(10, 50), Segment(5, 40), Segment(15, 25), Segment(6, 50)],
'result': [[5, 50]]
},
{
'index': [Segment(5, 10), Segment(10, 20), Segment(25, 50)],
'result': [[5, 20], [25, 50]]
},
{
'index': [Segment(20, 40), Segment(10, 15), Segment(50, 60)],
'result': [[10, 15], [20, 40], [50, 60]]
},
{
'index': [Segment(20, 40), Segment(10, 20), Segment(50, 60)],
'result': [[10, 40], [50, 60]]
},
]
def test_merge_intersecting_intervals_6(self):
index = [[20, 40], [10, 20], [50, 60]]
result = [[10, 40], [50, 60]]
utils_result = utils.merge_intersecting_intervals(index)
for idx, val in enumerate(utils_result):
self.assertEqual(result[idx][0], val[0])
self.assertEqual(result[idx][1], val[1])
for case in test_cases:
utils_result = utils.merge_intersecting_segments(case['index'])
for got, expected in zip(utils_result, case['result']):
self.assertEqual(got.from_timestamp, expected[0])
self.assertEqual(got.to_timestamp, expected[1])
if __name__ == '__main__':
unittest.main()

4
server/src/services/alert_service.ts

@ -26,9 +26,7 @@ export class Alert {
from: segment.from,
to: segment.to
};
if(segment.params) {
alert.params = segment.params;
}
return alert;
}
}

Loading…
Cancel
Save