Browse Source

Change the pattern filtering logic #366 (#423)

pull/1/head
Alexandr Velikiy 5 years ago committed by rozetko
parent
commit
81c3d79238
  1. 3
      analytics/analytics/models/drop_model.py
  2. 3
      analytics/analytics/models/jump_model.py
  3. 12
      analytics/analytics/models/model.py
  4. 30
      analytics/analytics/models/peak_model.py
  5. 20
      analytics/analytics/models/trough_model.py

3
analytics/analytics/models/drop_model.py

@ -47,6 +47,7 @@ class DropModel(Model):
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
height_list = learning_info['patterns_value']
del_conv_list = []
delete_pattern_timestamp = []
@ -58,7 +59,7 @@ class DropModel(Model):
del_conv_drop = scipy.signal.fftconvolve(deleted_drop, self.state['pattern_model'])
if len(del_conv_drop): del_conv_list.append(max(del_conv_drop))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list, height_list)
self.state['DROP_HEIGHT'] = int(min(learning_info['pattern_height'], default = 1))
self.state['DROP_LENGTH'] = int(max(learning_info['pattern_width'], default = 1))

3
analytics/analytics/models/jump_model.py

@ -48,6 +48,7 @@ class JumpModel(Model):
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
height_list = learning_info['patterns_value']
del_conv_list = []
delete_pattern_timestamp = []
@ -59,7 +60,7 @@ class JumpModel(Model):
del_conv_jump = scipy.signal.fftconvolve(deleted_jump, self.state['pattern_model'])
if len(del_conv_jump): del_conv_list.append(max(del_conv_jump))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list, height_list)
self.state['JUMP_HEIGHT'] = float(min(learning_info['pattern_height'], default = 1))
self.state['JUMP_LENGTH'] = int(max(learning_info['pattern_width'], default = 1))

12
analytics/analytics/models/model.py

@ -42,6 +42,10 @@ class Segment(AttrDict):
class Model(ABC):
HEIGHT_ERROR = 0.1
CONV_ERROR = 0.2
DEL_CONV_ERROR = 0.02
@abstractmethod
def do_fit(self, dataframe: pd.DataFrame, segments: list, cache: Optional[ModelCache], learning_info: dict) -> None:
pass
@ -97,11 +101,12 @@ class Model(ABC):
'cache': self.state
}
def _update_fiting_result(self, state: dict, confidences: list, convolve_list: list, del_conv_list: list) -> None:
def _update_fiting_result(self, state: dict, confidences: list, convolve_list: list, del_conv_list: list, height_list: list) -> None:
if type(state) is dict:
state['confidence'] = float(min(confidences, default = 1.5))
state['convolve_min'], state['convolve_max'] = utils.get_min_max(convolve_list, state['WINDOW_SIZE'])
state['conv_del_min'], state['conv_del_max'] = utils.get_min_max(del_conv_list, state['WINDOW_SIZE'])
state['conv_del_min'], state['conv_del_max'] = utils.get_min_max(del_conv_list, 0)
state['height_min'], state['height_max'] = utils.get_min_max(height_list, 0)
else:
raise ValueError('got non-dict as state for update fiting result: {}'.format(state))
@ -113,6 +118,7 @@ class Model(ABC):
'pattern_height': [],
'pattern_timestamp': [],
'segment_center_list': [],
'patterns_value': [],
}
data = dataframe['value']
for segment in labeled:
@ -127,9 +133,11 @@ class Model(ABC):
if model == 'peak' or model == 'trough':
learning_info['pattern_height'].append(utils.find_confidence(aligned_segment)[1])
learning_info['pattern_width'].append(utils.find_width(aligned_segment, model_type))
learning_info['patterns_value'].append(aligned_segment.values.max())
if model == 'jump' or model == 'drop':
pattern_height, pattern_length = utils.find_parameters(segment.data, segment.start, model)
learning_info['pattern_height'].append(pattern_height)
learning_info['pattern_width'].append(pattern_length)
learning_info['patterns_value'].append(aligned_segment.values[self.state['WINDOW_SIZE']])
return learning_info

30
analytics/analytics/models/peak_model.py

@ -20,11 +20,13 @@ class PeakModel(Model):
'pattern_center': [],
'pattern_model': [],
'confidence': 1.5,
'convolve_max': 570000,
'convolve_min': 530000,
'convolve_max': 0,
'convolve_min': 0,
'WINDOW_SIZE': 0,
'conv_del_min': 54000,
'conv_del_max': 55000,
'conv_del_min': 0,
'conv_del_max': 0,
'height_max': 0,
'height_min': 0,
}
def get_model_type(self) -> (str, bool):
@ -46,6 +48,7 @@ class PeakModel(Model):
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
height_list = learning_info['patterns_value']
del_conv_list = []
delete_pattern_width = []
@ -61,7 +64,7 @@ class PeakModel(Model):
delete_pattern_height.append(utils.find_confidence(deleted)[1])
delete_pattern_width.append(utils.find_width(deleted, True))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list, height_list)
def do_detect(self, dataframe: pd.DataFrame):
data = utils.cut_dataframe(dataframe)
@ -86,9 +89,15 @@ class PeakModel(Model):
close_patterns = utils.close_filtering(segments, variance_error)
segments = utils.best_pattern(close_patterns, data, 'max')
if len(segments) == 0 or len(self.state.get('pattern_center', [])) == 0:
if len(segments) == 0 or len(self.state.get('pattern_model', [])) == 0:
return []
pattern_data = self.state['pattern_model']
up_height = self.state['height_max'] * (1 + self.HEIGHT_ERROR)
low_height = self.state['height_min'] * (1 - self.HEIGHT_ERROR)
up_conv = self.state['convolve_max'] * (1 + 1.5 * self.CONV_ERROR)
low_conv = self.state['convolve_min'] * (1 - self.CONV_ERROR)
up_del_conv = self.state['conv_del_max'] * (1 + self.DEL_CONV_ERROR)
low_del_conv = self.state['conv_del_min'] * (1 - self.DEL_CONV_ERROR)
for segment in segments:
if segment > self.state['WINDOW_SIZE']:
convol_data = utils.get_interval(data, segment, self.state['WINDOW_SIZE'])
@ -102,9 +111,14 @@ class PeakModel(Model):
convol_data = utils.nan_to_zero(convol_data, nan_list)
pattern_data = utils.nan_to_zero(pattern_data, nan_list)
conv = scipy.signal.fftconvolve(convol_data, pattern_data)
if max(conv) > self.state['convolve_max'] * 1.2 or max(conv) < self.state['convolve_min'] * 0.9:
pattern_height = convol_data.values[self.state['WINDOW_SIZE']]
if pattern_height > up_height or pattern_height < low_height:
delete_list.append(segment)
elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
continue
if max(conv) > up_conv or max(conv) < low_conv:
delete_list.append(segment)
continue
if max(conv) < up_del_conv and max(conv) > low_del_conv:
delete_list.append(segment)
else:
delete_list.append(segment)

20
analytics/analytics/models/trough_model.py

@ -25,6 +25,8 @@ class TroughModel(Model):
'WINDOW_SIZE': 0,
'conv_del_min': 54000,
'conv_del_max': 55000,
'height_max': 0,
'height_min': 0,
}
def get_model_type(self) -> (str, bool):
@ -46,6 +48,7 @@ class TroughModel(Model):
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
height_list = learning_info['patterns_value']
del_conv_list = []
delete_pattern_width = []
@ -61,7 +64,7 @@ class TroughModel(Model):
delete_pattern_height.append(utils.find_confidence(deleted)[1])
delete_pattern_width.append(utils.find_width(deleted, False))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list, height_list)
def do_detect(self, dataframe: pd.DataFrame):
data = utils.cut_dataframe(dataframe)
@ -89,6 +92,12 @@ class TroughModel(Model):
segments = []
return segments
pattern_data = self.state['pattern_model']
up_height = self.state['height_max'] * (1 + self.HEIGHT_ERROR)
low_height = self.state['height_min'] * (1 - self.HEIGHT_ERROR)
up_conv = self.state['convolve_max'] * (1 + 1.5 * self.CONV_ERROR)
low_conv = self.state['convolve_min'] * (1 - self.CONV_ERROR)
up_del_conv = self.state['conv_del_max'] * (1 + self.DEL_CONV_ERROR)
low_del_conv = self.state['conv_del_min'] * (1 - self.DEL_CONV_ERROR)
for segment in segments:
if segment > self.state['WINDOW_SIZE']:
convol_data = utils.get_interval(data, segment, self.state['WINDOW_SIZE'])
@ -102,9 +111,14 @@ class TroughModel(Model):
convol_data = utils.nan_to_zero(convol_data, nan_list)
pattern_data = utils.nan_to_zero(pattern_data, nan_list)
conv = scipy.signal.fftconvolve(convol_data, pattern_data)
if max(conv) > self.state['convolve_max'] * 1.1 or max(conv) < self.state['convolve_min'] * 0.9:
pattern_height = convol_data.values.max()
if pattern_height > up_height or pattern_height < low_height:
delete_list.append(segment)
elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
continue
if max(conv) > up_conv or max(conv) < low_conv:
delete_list.append(segment)
continue
if max(conv) < up_del_conv and max(conv) > low_del_conv:
delete_list.append(segment)
else:
delete_list.append(segment)

Loading…
Cancel
Save