Browse Source

Models with filtering#186 (#218)

pull/1/head
Alexandr Velikiy 6 years ago committed by Evgeny Smyshlyaev
parent
commit
dd139e1feb
  1. 20
      analytics/models/drop_model.py
  2. 18
      analytics/models/jump_model.py
  3. 16
      analytics/models/peak_model.py
  4. 14
      analytics/models/trough_model.py
  5. 37
      analytics/utils/common.py

20
analytics/models/drop_model.py

@ -154,17 +154,9 @@ class DropModel(Model):
def __filter_prediction(self, segments: list, data: list): def __filter_prediction(self, segments: list, data: list):
delete_list = [] delete_list = []
variance_error = int(0.004 * len(data)) variance_error = self.state['WINDOW_SIZE']
if variance_error > self.state['WINDOW_SIZE']: close_patterns = utils.close_filtering(segments, variance_error)
variance_error = self.state['WINDOW_SIZE'] segments = utils.best_pat(close_patterns, data, 'min')
for i in range(1, len(segments)):
if segments[i] < segments[i - 1] + variance_error:
delete_list.append(segments[i])
# for item in delete_list:
# segments.remove(item)
delete_list = []
if len(segments) == 0 or len(self.idrops) == 0 : if len(segments) == 0 or len(self.idrops) == 0 :
segments = [] segments = []
return segments return segments
@ -186,8 +178,8 @@ class DropModel(Model):
delete_list.append(segment) delete_list.append(segment)
else: else:
delete_list.append(segment) delete_list.append(segment)
# TODO: implement filtering
# for item in delete_list: for item in delete_list:
# segments.remove(item) segments.remove(item)
return set(segments) return set(segments)

18
analytics/models/jump_model.py

@ -154,20 +154,13 @@ class JumpModel(Model):
def __filter_prediction(self, segments, data): def __filter_prediction(self, segments, data):
delete_list = [] delete_list = []
variance_error = int(0.004 * len(data)) variance_error = self.state['WINDOW_SIZE']
if variance_error > self.state['WINDOW_SIZE']: close_patterns = utils.close_filtering(segments, variance_error)
variance_error = self.state['WINDOW_SIZE'] segments = utils.best_pat(close_patterns, data, 'max')
for i in range(1, len(segments)):
if segments[i] < segments[i - 1] + variance_error:
delete_list.append(segments[i])
for item in delete_list:
segments.remove(item)
if len(segments) == 0 or len(self.ijumps) == 0 : if len(segments) == 0 or len(self.ijumps) == 0 :
segments = [] segments = []
return segments return segments
delete_list = []
pattern_data = self.model_jump pattern_data = self.model_jump
upper_bound = self.state['convolve_max'] * 1.2 upper_bound = self.state['convolve_max'] * 1.2
lower_bound = self.state['convolve_min'] * 0.8 lower_bound = self.state['convolve_min'] * 0.8
@ -186,11 +179,8 @@ class JumpModel(Model):
delete_list.append(segment) delete_list.append(segment)
else: else:
delete_list.append(segment) delete_list.append(segment)
for item in delete_list: for item in delete_list:
segments.remove(item) segments.remove(item)
# TODO: implement filtering
#for ijump in self.ijumps:
#segments.append(ijump)
return set(segments) return set(segments)

16
analytics/models/peak_model.py

@ -114,16 +114,10 @@ class PeakModel(Model):
def __filter_prediction(self, segments: list, data: list) -> list: def __filter_prediction(self, segments: list, data: list) -> list:
delete_list = [] delete_list = []
variance_error = int(0.004 * len(data)) variance_error = self.state['WINDOW_SIZE']
if variance_error > self.state['WINDOW_SIZE']: close_patterns = utils.close_filtering(segments, variance_error)
variance_error = self.state['WINDOW_SIZE'] segments = utils.best_pat(close_patterns, data, 'max')
for i in range(1, len(segments)):
if segments[i] < segments[i - 1] + variance_error:
delete_list.append(segments[i])
for item in delete_list:
segments.remove(item)
delete_list = []
if len(segments) == 0 or len(self.ipeaks) == 0: if len(segments) == 0 or len(self.ipeaks) == 0:
return [] return []
pattern_data = self.model_peak pattern_data = self.model_peak
@ -135,11 +129,9 @@ class PeakModel(Model):
if max(conv) > self.state['convolve_max'] * 1.05 or max(conv) < self.state['convolve_min'] * 0.95: if max(conv) > self.state['convolve_max'] * 1.05 or max(conv) < self.state['convolve_min'] * 0.95:
delete_list.append(segment) delete_list.append(segment)
elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98: elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
print("this must be deleted: {0}, index: {1}".format(max(conv), segment))
delete_list.append(segment) delete_list.append(segment)
else: else:
delete_list.append(segment) delete_list.append(segment)
# TODO: implement filtering
for item in delete_list: for item in delete_list:
segments.remove(item) segments.remove(item)

14
analytics/models/trough_model.py

@ -115,16 +115,9 @@ class TroughModel(Model):
def __filter_prediction(self, segments: list, data: list) -> list: def __filter_prediction(self, segments: list, data: list) -> list:
delete_list = [] delete_list = []
variance_error = int(0.004 * len(data)) variance_error = self.state['WINDOW_SIZE']
if variance_error > self.state['WINDOW_SIZE']: close_patterns = utils.close_filtering(segments, variance_error)
variance_error = self.state['WINDOW_SIZE'] segments = utils.best_pat(close_patterns, data, 'min')
for i in range(1, len(segments)):
if segments[i] < segments[i - 1] + variance_error:
delete_list.append(segments[i])
for item in delete_list:
segments.remove(item)
delete_list = []
if len(segments) == 0 or len(self.itroughs) == 0 : if len(segments) == 0 or len(self.itroughs) == 0 :
segments = [] segments = []
return segments return segments
@ -140,7 +133,6 @@ class TroughModel(Model):
delete_list.append(segment) delete_list.append(segment)
else: else:
delete_list.append(segment) delete_list.append(segment)
# TODO: implement filtering
for item in delete_list: for item in delete_list:
segments.remove(item) segments.remove(item)

37
analytics/utils/common.py

@ -1,6 +1,7 @@
import numpy as np import numpy as np
import pandas as pd import pandas as pd
def exponential_smoothing(series, alpha): def exponential_smoothing(series, alpha):
result = [series[0]] result = [series[0]]
for n in range(1, len(series)): for n in range(1, len(series)):
@ -211,7 +212,11 @@ def ar_mean(numbers):
def get_av_model(patterns_list): def get_av_model(patterns_list):
x = len(patterns_list[0]) x = len(patterns_list[0])
<<<<<<< HEAD
if len(pattern_list) > 1 and len(patterns_list[1]) != x:
=======
if len(patterns_list[1]) != x: if len(patterns_list[1]) != x:
>>>>>>> f3e8de3d4de8748ed7c9eb1b81e2d438e04f5f38
raise NameError('All elements of patterns_list should have same length') raise NameError('All elements of patterns_list should have same length')
model_pat = [] model_pat = []
for i in range(x): for i in range(x):
@ -220,3 +225,35 @@ def get_av_model(patterns_list):
av_val.append(j.values[i]) av_val.append(j.values[i])
model_pat.append(ar_mean(av_val)) model_pat.append(ar_mean(av_val))
return model_pat return model_pat
<<<<<<< HEAD
def close_filtering(pat_list, win_size):
s = [[pat_list[0]]]
k = 0
for i in range(1, len(pat_list)):
if pat_list[i] - win_size <= s[k][-1]:
s[k].append(pat_list[i])
else:
k += 1
s.append([pat_list[i]])
return s
def best_pat(pat_list, data, dir):
new_pat_list = []
for val in pat_list:
max_val = data[val[0]]
min_val = data[val[0]]
ind = 0
for i in val:
if dir == 'max':
if data[i] > max_val:
max_val = data[i]
ind = i
else:
if data[i] < min_val:
min_val = data[i]
ind = i
new_pat_list.append(ind)
return new_pat_list
=======
>>>>>>> f3e8de3d4de8748ed7c9eb1b81e2d438e04f5f38

Loading…
Cancel
Save