diff --git a/analytics/models/trough_model.py b/analytics/models/trough_model.py index c75c7a8..54748f0 100644 --- a/analytics/models/trough_model.py +++ b/analytics/models/trough_model.py @@ -8,6 +8,8 @@ import utils import numpy as np import pandas as pd +SMOOTHING_COEFF = 2400 +EXP_SMOOTHING_FACTOR = 0.01 class TroughModel(Model): @@ -15,6 +17,7 @@ class TroughModel(Model): super() self.segments = [] self.itroughs = [] + self.model_trough = [] self.state = { 'confidence': 1.5, 'convolve_max': 570000, @@ -26,6 +29,7 @@ class TroughModel(Model): data = dataframe['value'] confidences = [] convolve_list = [] + patterns_list = [] for segment in segments: if segment['labeled']: segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms')) @@ -39,14 +43,18 @@ class TroughModel(Model): confidences.append(0.2 * (segment_max - segment_min)) segment_min_index = segment_data.idxmin() self.itroughs.append(segment_min_index) - labeled_trough = data[segment_min_index - self.state['WINDOW_SIZE'] : segment_min_index + self.state['WINDOW_SIZE']] + labeled_trough = data[segment_min_index - self.state['WINDOW_SIZE'] : segment_min_index + self.state['WINDOW_SIZE'] + 1] labeled_trough = labeled_trough - min(labeled_trough) - auto_convolve = scipy.signal.fftconvolve(labeled_trough, labeled_trough) - first_trough = data[self.itroughs[0] - self.state['WINDOW_SIZE']: self.itroughs[0] + self.state['WINDOW_SIZE']] - first_trough = first_trough - min(first_trough) - convolve_trough = scipy.signal.fftconvolve(labeled_trough, first_trough) - convolve_list.append(max(auto_convolve)) - convolve_list.append(max(convolve_trough)) + patterns_list.append(labeled_trough) + + self.model_trough = utils.get_av_model(patterns_list) + for n in range(len(segments)): + labeled_trough = data[self.itroughs[n] - self.state['WINDOW_SIZE']: self.itroughs[n] + self.state['WINDOW_SIZE'] + 1] + labeled_trough = labeled_trough - min(labeled_trough) + auto_convolve = scipy.signal.fftconvolve(labeled_trough, labeled_trough) + convolve_trough = scipy.signal.fftconvolve(labeled_trough, self.model_trough) + convolve_list.append(max(auto_convolve)) + convolve_list.append(max(convolve_trough)) if len(confidences) > 0: self.state['confidence'] = float(min(confidences)) @@ -65,11 +73,11 @@ class TroughModel(Model): def do_predict(self, dataframe: pd.DataFrame): data = dataframe['value'] - window_size = 24 + window_size = int(len(data)/SMOOTHING_COEFF) #test ws on flat data all_mins = argrelextrema(np.array(data), np.less)[0] extrema_list = [] - for i in utils.exponential_smoothing(data - self.state['confidence'], 0.02): + for i in utils.exponential_smoothing(data - self.state['confidence'], EXP_SMOOTHING_FACTOR): extrema_list.append(i) segments = [] @@ -82,8 +90,8 @@ class TroughModel(Model): def __filter_prediction(self, segments: list, data: list) -> list: delete_list = [] variance_error = int(0.004 * len(data)) - if variance_error > 50: - variance_error = 50 + if variance_error > self.state['WINDOW_SIZE']: + variance_error = self.state['WINDOW_SIZE'] for i in range(1, len(segments)): if segments[i] < segments[i - 1] + variance_error: delete_list.append(segments[i]) @@ -94,14 +102,13 @@ class TroughModel(Model): if len(segments) == 0 or len(self.itroughs) == 0 : segments = [] return segments - pattern_data = data[self.itroughs[0] - self.state['WINDOW_SIZE'] : self.itroughs[0] + self.state['WINDOW_SIZE']] - pattern_data = pattern_data - min(pattern_data) + pattern_data = self.model_peak for segment in segments: if segment > self.state['WINDOW_SIZE']: - convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE']] + convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE'] + 1] convol_data = convol_data - min(convol_data) - conv = scipy.signal.fftconvolve(pattern_data, convol_data) - if max(conv) > self.state['convolve_max'] * 1.05 or max(conv) < self.state['convolve_min'] * 0.95: + conv = scipy.signal.fftconvolve(convol_data, pattern_data) + if max(conv) > self.state['convolve_max'] * 1.1 or max(conv) < self.state['convolve_min'] * 0.9: delete_list.append(segment) else: delete_list.append(segment)