Browse Source

Fix timestamps return in all models (convert from ns to ms)

pull/1/head
rozetko 6 years ago
parent
commit
48999a934d
  1. 4
      analytics/models/drop_model.py
  2. 3
      analytics/models/general_model.py
  3. 5
      analytics/models/jump_model.py
  4. 3
      analytics/models/peak_model.py
  5. 5
      analytics/models/trough_model.py

4
analytics/models/drop_model.py

@ -101,8 +101,10 @@ class DropModel(Model):
def do_predict(self, dataframe: pd.DataFrame): def do_predict(self, dataframe: pd.DataFrame):
data = dataframe['value'] data = dataframe['value']
possible_drops = utils.find_drop(data, self.state['DROP_HEIGHT'], self.state['DROP_LENGTH'] + 1) possible_drops = utils.find_drop(data, self.state['DROP_HEIGHT'], self.state['DROP_LENGTH'] + 1)
filtered = self.__filter_prediction(possible_drops, data) filtered = self.__filter_prediction(possible_drops, data)
return [(dataframe['timestamp'][x - 1].value, dataframe['timestamp'][x + 1].value) for x in filtered] # TODO: convert from ns to ms more proper way (not dividing by 10^6)
return [(dataframe['timestamp'][x - 1].value / 1000000, dataframe['timestamp'][x + 1].value / 1000000) for x in filtered]
def __filter_prediction(self, segments: list, data: list): def __filter_prediction(self, segments: list, data: list):
delete_list = [] delete_list = []

3
analytics/models/general_model.py

@ -79,7 +79,8 @@ class GeneralModel(Model):
all_conv_peaks = utils.peak_finder(self.all_conv, WINDOW_SIZE * 2) all_conv_peaks = utils.peak_finder(self.all_conv, WINDOW_SIZE * 2)
filtered = self.__filter_prediction(all_conv_peaks, data) filtered = self.__filter_prediction(all_conv_peaks, data)
return [(dataframe['timestamp'][x - 1].value, dataframe['timestamp'][x + 1].value) for x in filtered] # TODO: convert from ns to ms more proper way (not dividing by 10^6)
return [(dataframe['timestamp'][x - 1].value / 1000000, dataframe['timestamp'][x + 1].value / 1000000) for x in filtered]
def __filter_prediction(self, segments: list, data: list): def __filter_prediction(self, segments: list, data: list):
if len(segments) == 0 or len(self.ipats) == 0: if len(segments) == 0 or len(self.ipats) == 0:

5
analytics/models/jump_model.py

@ -105,9 +105,10 @@ class JumpModel(Model):
def do_predict(self, dataframe: pd.DataFrame): def do_predict(self, dataframe: pd.DataFrame):
data = dataframe['value'] data = dataframe['value']
possible_jumps = utils.find_jump(data, self.state['JUMP_HEIGHT'], self.state['JUMP_LENGTH'] + 1) possible_jumps = utils.find_jump(data, self.state['JUMP_HEIGHT'], self.state['JUMP_LENGTH'] + 1)
filtered = self.__filter_prediction(possible_jumps, data)
return [(dataframe['timestamp'][x - 1].value, dataframe['timestamp'][x + 1].value) for x in filtered] filtered = self.__filter_prediction(possible_jumps, data)
# TODO: convert from ns to ms more proper way (not dividing by 10^6)
return [(dataframe['timestamp'][x - 1].value / 1000000, dataframe['timestamp'][x + 1].value / 1000000) for x in filtered]
def __filter_prediction(self, segments, data): def __filter_prediction(self, segments, data):
delete_list = [] delete_list = []

3
analytics/models/peak_model.py

@ -82,7 +82,8 @@ class PeakModel(Model):
segments.append(i) segments.append(i)
filtered = self.__filter_prediction(segments, data) filtered = self.__filter_prediction(segments, data)
return [(dataframe['timestamp'][x - 1].value, dataframe['timestamp'][x + 1].value) for x in filtered] # TODO: convert from ns to ms more proper way (not dividing by 10^6)
return [(dataframe['timestamp'][x - 1].value / 1000000, dataframe['timestamp'][x + 1].value / 1000000) for x in filtered]
def __filter_prediction(self, segments: list, all_max_flatten_data: list): def __filter_prediction(self, segments: list, all_max_flatten_data: list):
delete_list = [] delete_list = []

5
analytics/models/trough_model.py

@ -79,9 +79,10 @@ class TroughModel(Model):
for i in all_mins: for i in all_mins:
if all_max_flatten_data[i] < extrema_list[i]: if all_max_flatten_data[i] < extrema_list[i]:
segments.append(i) segments.append(i)
test = dataframe['timestamp'][1].value
filtered = self.__filter_prediction(segments, data) filtered = self.__filter_prediction(segments, data)
return [(dataframe['timestamp'][x - 1].value, dataframe['timestamp'][x + 1].value) for x in filtered] # TODO: convert from ns to ms more proper way (not dividing by 10^6)
return [(dataframe['timestamp'][x - 1].value / 1000000, dataframe['timestamp'][x + 1].value / 1000000) for x in filtered]
def __filter_prediction(self, segments: list, all_max_flatten_data: list): def __filter_prediction(self, segments: list, all_max_flatten_data: list):
delete_list = [] delete_list = []

Loading…
Cancel
Save