Browse Source

Update state during learning #406 (#408)

pull/1/head
Alexandr Velikiy 6 years ago committed by rozetko
parent
commit
7ab0db6ba9
  1. 21
      analytics/analytics/models/drop_model.py
  2. 21
      analytics/analytics/models/general_model.py
  3. 21
      analytics/analytics/models/jump_model.py
  4. 12
      analytics/analytics/models/model.py
  5. 21
      analytics/analytics/models/peak_model.py
  6. 21
      analytics/analytics/models/trough_model.py
  7. 9
      analytics/analytics/utils/common.py
  8. 54
      analytics/tests/test_dataset.py

21
analytics/analytics/models/drop_model.py

@ -15,14 +15,14 @@ class DropModel(Model):
super()
self.segments = []
self.state = {
'idrops': [],
'model_drop': [],
'pattern_center': [],
'pattern_model': [],
'confidence': 1.5,
'convolve_max': 200,
'convolve_min': 200,
'DROP_HEIGHT': 1,
'DROP_LENGTH': 1,
'WINDOW_SIZE': 240,
'WINDOW_SIZE': 0,
'conv_del_min': 54000,
'conv_del_max': 55000,
}
@ -42,10 +42,11 @@ class DropModel(Model):
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state['WINDOW_SIZE']
self.state['idrops'] = learning_info['segment_center_list']
self.state['model_drop'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['idrops'], self.state['model_drop'], data, window_size)
correlation_list = utils.get_correlation(self.state['idrops'], self.state['model_drop'], data, window_size)
last_pattern_center = self.state.get('pattern_center', [])
self.state['pattern_center'] = list(set(last_pattern_center + learning_info['segment_center_list']))
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
del_conv_list = []
delete_pattern_timestamp = []
@ -54,7 +55,7 @@ class DropModel(Model):
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted_drop = utils.get_interval(data, segment_cent_index, window_size)
deleted_drop = utils.subtract_min_without_nan(deleted_drop)
del_conv_drop = scipy.signal.fftconvolve(deleted_drop, self.state['model_drop'])
del_conv_drop = scipy.signal.fftconvolve(deleted_drop, self.state['pattern_model'])
if len(del_conv_drop): del_conv_list.append(max(del_conv_drop))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
@ -73,10 +74,10 @@ class DropModel(Model):
variance_error = self.state['WINDOW_SIZE']
close_patterns = utils.close_filtering(segments, variance_error)
segments = utils.best_pattern(close_patterns, data, 'min')
if len(segments) == 0 or len(self.state['idrops']) == 0 :
if len(segments) == 0 or len(self.state.get('pattern_center', [])) == 0:
segments = []
return segments
pattern_data = self.state['model_drop']
pattern_data = self.state['pattern_model']
for segment in segments:
if segment > self.state['WINDOW_SIZE'] and segment < (len(data) - self.state['WINDOW_SIZE']):
convol_data = utils.get_interval(data, segment, self.state['WINDOW_SIZE'])

21
analytics/analytics/models/general_model.py

@ -16,11 +16,11 @@ class GeneralModel(Model):
def __init__(self):
super()
self.state = {
'ipats': [],
'model_gen': [],
'pattern_center': [],
'pattern_model': [],
'convolve_max': 240,
'convolve_min': 200,
'WINDOW_SIZE': 240,
'WINDOW_SIZE': 0,
'conv_del_min': 100,
'conv_del_max': 120,
}
@ -40,10 +40,11 @@ class GeneralModel(Model):
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
self.state['ipats'] = learning_info['segment_center_list']
self.state['model_gen'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['ipats'], self.state['model_gen'], data, self.state['WINDOW_SIZE'])
correlation_list = utils.get_correlation(self.state['ipats'], self.state['model_gen'], data, self.state['WINDOW_SIZE'])
last_pattern_center = self.state.get('pattern_center', [])
self.state['pattern_center'] = list(set(last_pattern_center + learning_info['segment_center_list']))
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, self.state['WINDOW_SIZE'])
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, self.state['WINDOW_SIZE'])
del_conv_list = []
delete_pattern_timestamp = []
@ -52,7 +53,7 @@ class GeneralModel(Model):
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted_pat = utils.get_interval(data, del_mid_index, self.state['WINDOW_SIZE'])
deleted_pat = utils.subtract_min_without_nan(deleted_pat)
del_conv_pat = scipy.signal.fftconvolve(deleted_pat, self.state['model_gen'])
del_conv_pat = scipy.signal.fftconvolve(deleted_pat, self.state['pattern_model'])
if len(del_conv_pat): del_conv_list.append(max(del_conv_pat))
self.state['convolve_min'], self.state['convolve_max'] = utils.get_min_max(convolve_list, self.state['WINDOW_SIZE'] / 3)
@ -61,7 +62,7 @@ class GeneralModel(Model):
def do_detect(self, dataframe: pd.DataFrame) -> list:
data = utils.cut_dataframe(dataframe)
data = data['value']
pat_data = self.state['model_gen']
pat_data = self.state['pattern_model']
if pat_data.count(0) == len(pat_data):
raise ValueError('Labeled patterns must not be empty')
@ -77,7 +78,7 @@ class GeneralModel(Model):
return set(item + self.state['WINDOW_SIZE'] for item in filtered)
def __filter_detection(self, segments: list, data: list):
if len(segments) == 0 or len(self.state['ipats']) == 0:
if len(segments) == 0 or len(self.state.get('pattern_center', [])) == 0:
return []
delete_list = []
for val in segments:

21
analytics/analytics/models/jump_model.py

@ -16,14 +16,14 @@ class JumpModel(Model):
super()
self.segments = []
self.state = {
'ijumps': [],
'model_jump': [],
'pattern_center': [],
'pattern_model': [],
'confidence': 1.5,
'convolve_max': 230,
'convolve_min': 230,
'JUMP_HEIGHT': 1,
'JUMP_LENGTH': 1,
'WINDOW_SIZE': 240,
'WINDOW_SIZE': 0,
'conv_del_min': 54000,
'conv_del_max': 55000,
}
@ -43,10 +43,11 @@ class JumpModel(Model):
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state['WINDOW_SIZE']
self.state['ijumps'] = learning_info['segment_center_list']
self.state['model_jump'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['ijumps'], self.state['model_jump'], data, window_size)
correlation_list = utils.get_correlation(self.state['ijumps'], self.state['model_jump'], data, window_size)
last_pattern_center = self.state.get('pattern_center', [])
self.state['pattern_center'] = list(set(last_pattern_center + learning_info['segment_center_list']))
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
del_conv_list = []
delete_pattern_timestamp = []
@ -55,7 +56,7 @@ class JumpModel(Model):
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted_jump = utils.get_interval(data, segment_cent_index, window_size)
deleted_jump = utils.subtract_min_without_nan(deleted_jump)
del_conv_jump = scipy.signal.fftconvolve(deleted_jump, self.state['model_jump'])
del_conv_jump = scipy.signal.fftconvolve(deleted_jump, self.state['pattern_model'])
if len(del_conv_jump): del_conv_list.append(max(del_conv_jump))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
@ -75,10 +76,10 @@ class JumpModel(Model):
close_patterns = utils.close_filtering(segments, variance_error)
segments = utils.best_pattern(close_patterns, data, 'max')
if len(segments) == 0 or len(self.state['ijumps']) == 0 :
if len(segments) == 0 or len(self.state.get('pattern_center', [])) == 0:
segments = []
return segments
pattern_data = self.state['model_jump']
pattern_data = self.state['pattern_model']
upper_bound = self.state['convolve_max'] * 1.2
lower_bound = self.state['convolve_min'] * 0.8
delete_up_bound = self.state['conv_del_max'] * 1.02

12
analytics/analytics/models/model.py

@ -59,7 +59,8 @@ class Model(ABC):
pass
def fit(self, dataframe: pd.DataFrame, segments: list, cache: Optional[ModelCache]) -> ModelCache:
if type(cache) is ModelCache:
data = dataframe['value']
if type(cache) is ModelCache and cache:
self.state = cache
max_length = 0
labeled = []
@ -74,10 +75,15 @@ class Model(ABC):
max_length = max(segment.length, max_length)
if segment.labeled: labeled.append(segment)
if segment.deleted: deleted.append(segment)
self.state['WINDOW_SIZE'] = math.ceil(max_length / 2) if max_length else 0
if self.state.get('WINDOW_SIZE') == 0:
self.state['WINDOW_SIZE'] = math.ceil(max_length / 2) if max_length else 0
model, model_type = self.get_model_type()
learning_info = self.get_parameters_from_segments(dataframe, labeled, deleted, model, model_type)
if self.state.get('pattern_center') and self.state.get('pattern_model'):
for center in self.state['pattern_center']:
aligned_segment = utils.get_interval(data, center, self.state['WINDOW_SIZE'])
aligned_segment = utils.subtract_min_without_nan(aligned_segment)
learning_info['patterns_list'].append(aligned_segment)
self.do_fit(dataframe, labeled, deleted, learning_info)
return self.state

21
analytics/analytics/models/peak_model.py

@ -17,12 +17,12 @@ class PeakModel(Model):
super()
self.segments = []
self.state = {
'ipeaks': [],
'model_peak': [],
'pattern_center': [],
'pattern_model': [],
'confidence': 1.5,
'convolve_max': 570000,
'convolve_min': 530000,
'WINDOW_SIZE': 240,
'WINDOW_SIZE': 0,
'conv_del_min': 54000,
'conv_del_max': 55000,
}
@ -41,10 +41,11 @@ class PeakModel(Model):
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state['WINDOW_SIZE']
self.state['ipeaks'] = learning_info['segment_center_list']
self.state['model_peak'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['ipeaks'], self.state['model_peak'], data, window_size)
correlation_list = utils.get_correlation(self.state['ipeaks'], self.state['model_peak'], data, window_size)
last_pattern_center = self.state.get('pattern_center', [])
self.state['pattern_center'] = list(set(last_pattern_center + learning_info['segment_center_list']))
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
del_conv_list = []
delete_pattern_width = []
@ -55,7 +56,7 @@ class PeakModel(Model):
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted = utils.get_interval(data, del_max_index, window_size)
deleted = utils.subtract_min_without_nan(deleted)
del_conv = scipy.signal.fftconvolve(deleted, self.state['model_peak'])
del_conv = scipy.signal.fftconvolve(deleted, self.state['pattern_model'])
if len(del_conv): del_conv_list.append(max(del_conv))
delete_pattern_height.append(utils.find_confidence(deleted)[1])
delete_pattern_width.append(utils.find_width(deleted, True))
@ -85,9 +86,9 @@ class PeakModel(Model):
close_patterns = utils.close_filtering(segments, variance_error)
segments = utils.best_pattern(close_patterns, data, 'max')
if len(segments) == 0 or len(self.state['ipeaks']) == 0:
if len(segments) == 0 or len(self.state.get('pattern_center', [])) == 0:
return []
pattern_data = self.state['model_peak']
pattern_data = self.state['pattern_model']
for segment in segments:
if segment > self.state['WINDOW_SIZE']:
convol_data = utils.get_interval(data, segment, self.state['WINDOW_SIZE'])

21
analytics/analytics/models/trough_model.py

@ -17,12 +17,12 @@ class TroughModel(Model):
super()
self.segments = []
self.state = {
'itroughs': [],
'model_trough': [],
'pattern_center': [],
'pattern_model': [],
'confidence': 1.5,
'convolve_max': 570000,
'convolve_min': 530000,
'WINDOW_SIZE': 240,
'WINDOW_SIZE': 0,
'conv_del_min': 54000,
'conv_del_max': 55000,
}
@ -41,10 +41,11 @@ class TroughModel(Model):
data = utils.cut_dataframe(dataframe)
data = data['value']
window_size = self.state['WINDOW_SIZE']
self.state['itroughs'] = learning_info['segment_center_list']
self.state['model_trough'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['itroughs'], self.state['model_trough'], data, window_size)
correlation_list = utils.get_correlation(self.state['itroughs'], self.state['model_trough'], data, window_size)
last_pattern_center = self.state.get('pattern_center', [])
self.state['pattern_center'] = list(set(last_pattern_center + learning_info['segment_center_list']))
self.state['pattern_model'] = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
correlation_list = utils.get_correlation(self.state['pattern_center'], self.state['pattern_model'], data, window_size)
del_conv_list = []
delete_pattern_width = []
@ -55,7 +56,7 @@ class TroughModel(Model):
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted = utils.get_interval(data, del_min_index, window_size)
deleted = utils.subtract_min_without_nan(deleted)
del_conv = scipy.signal.fftconvolve(deleted, self.state['model_trough'])
del_conv = scipy.signal.fftconvolve(deleted, self.state['pattern_model'])
if len(del_conv): del_conv_list.append(max(del_conv))
delete_pattern_height.append(utils.find_confidence(deleted)[1])
delete_pattern_width.append(utils.find_width(deleted, False))
@ -84,10 +85,10 @@ class TroughModel(Model):
variance_error = self.state['WINDOW_SIZE']
close_patterns = utils.close_filtering(segments, variance_error)
segments = utils.best_pattern(close_patterns, data, 'min')
if len(segments) == 0 or len(self.state['itroughs']) == 0 :
if len(segments) == 0 or len(self.state.get('pattern_center', [])) == 0:
segments = []
return segments
pattern_data = self.state['model_trough']
pattern_data = self.state['pattern_model']
for segment in segments:
if segment > self.state['WINDOW_SIZE']:
convol_data = utils.get_interval(data, segment, self.state['WINDOW_SIZE'])

9
analytics/analytics/utils/common.py

@ -84,14 +84,15 @@ def get_av_model(patterns_list):
return []
x = len(patterns_list[0])
if len(patterns_list) > 1 and len(patterns_list[1]) != x:
raise NameError(
'All elements of patterns_list should have same length')
raise ValueError('All elements of patterns_list should have same length')
model_pat = []
for i in range(x):
av_val = []
for j in patterns_list:
av_val.append(j.values[i])
for val in patterns_list:
if type(val) == pd.Series:
val = val.values
av_val.append(val[i])
model_pat.append(ar_mean(av_val))
return model_pat

54
analytics/tests/test_dataset.py

@ -180,6 +180,60 @@ class TestDataset(unittest.TestCase):
max_pattern_index = max(model.do_detect(dataframe))
self.assertLessEqual(max_pattern_index, result)
def test_peak_model_for_cache(self):
cache = {
'pattern_center': [1, 6],
'model_peak': [1, 4, 0],
'confidence': 2,
'convolve_max': 8,
'convolve_min': 7,
'WINDOW_SIZE': 1,
'conv_del_min': 0,
'conv_del_max': 0,
}
data_val = [2.0, 5.0, 1.0, 1.0, 1.0, 2.0, 5.0, 1.0, 1.0, 2.0, 3.0, 7.0, 1.0, 1.0, 1.0]
dataframe = create_dataframe(data_val)
segments = [{'_id': 'Esl7uetLhx4lCqHa', 'analyticUnitId': 'opnICRJwOmwBELK8', 'from': 1523889000010, 'to': 1523889000012, 'labeled': True, 'deleted': False}]
model = models.PeakModel()
result = model.fit(dataframe, segments, cache)
self.assertEqual(len(result['pattern_center']), 3)
def test_trough_model_for_cache(self):
cache = {
'pattern_center': [2, 6],
'pattern_model': [5, 0.5, 4],
'confidence': 2,
'convolve_max': 8,
'convolve_min': 7,
'WINDOW_SIZE': 1,
'conv_del_min': 0,
'conv_del_max': 0,
}
data_val = [5.0, 5.0, 1.0, 4.0, 5.0, 5.0, 0.0, 4.0, 5.0, 5.0, 6.0, 1.0, 5.0, 5.0, 5.0]
dataframe = create_dataframe(data_val)
segments = [{'_id': 'Esl7uetLhx4lCqHa', 'analyticUnitId': 'opnICRJwOmwBELK8', 'from': 1523889000010, 'to': 1523889000012, 'labeled': True, 'deleted': False}]
model = models.TroughModel()
result = model.fit(dataframe, segments, cache)
self.assertEqual(len(result['pattern_center']), 3)
def test_jump_model_for_cache(self):
cache = {
'pattern_center': [2, 6],
'pattern_model': [5, 0.5, 4],
'confidence': 2,
'convolve_max': 8,
'convolve_min': 7,
'WINDOW_SIZE': 1,
'conv_del_min': 0,
'conv_del_max': 0,
}
data_val = [1.0, 1.0, 1.0, 4.0, 4.0, 0.0, 0.0, 5.0, 5.0, 0.0, 0.0, 4.0, 4.0, 4.0, 4.0]
dataframe = create_dataframe(data_val)
segments = [{'_id': 'Esl7uetLhx4lCqHa', 'analyticUnitId': 'opnICRJwOmwBELK8', 'from': 1523889000010, 'to': 1523889000012, 'labeled': True, 'deleted': False}]
model = models.JumpModel()
result = model.fit(dataframe, segments, cache)
self.assertEqual(len(result['pattern_center']), 3)
if __name__ == '__main__':
unittest.main()

Loading…
Cancel
Save