Browse Source

Remove duplicate code from models #390 (#391)

just refactoring
pull/1/head
Alexandr Velikiy 5 years ago committed by Evgeny Smyshlyaev
parent
commit
963eaa6276
  1. 43
      analytics/analytics/models/drop_model.py
  2. 22
      analytics/analytics/models/general_model.py
  3. 43
      analytics/analytics/models/jump_model.py
  4. 41
      analytics/analytics/models/model.py
  5. 38
      analytics/analytics/models/peak_model.py
  6. 38
      analytics/analytics/models/trough_model.py

43
analytics/analytics/models/drop_model.py

@ -27,52 +27,39 @@ class DropModel(Model):
'conv_del_max': 55000,
}
def get_model_type(self) -> (str, bool):
model = 'drop'
type_model = False
return (model, type_model)
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
data = dataframe['value']
segment = data[start: end]
segment_center_index = utils.find_pattern_center(segment, start, 'drop')
return segment_center_index
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list) -> None:
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
confidences = []
convolve_list = []
correlation_list = []
drop_height_list = []
drop_length_list = []
patterns_list = []
pattern_timestamp = []
for segment in labeled_segments:
confidence = utils.find_confidence(segment.data)[0]
confidences.append(confidence)
segment_cent_index = segment.center_index
drop_height, drop_length = utils.find_parameters(segment.data, segment.start, 'drop')
drop_height_list.append(drop_height)
drop_length_list.append(drop_length)
self.idrops.append(segment_cent_index)
pattern_timestamp.append(segment.pattern_timestamp)
labeled_drop = utils.get_interval(data, segment_cent_index, self.state['WINDOW_SIZE'])
labeled_drop = utils.subtract_min_without_nan(labeled_drop)
patterns_list.append(labeled_drop)
self.model_drop = utils.get_av_model(patterns_list)
convolve_list = utils.get_convolve(self.idrops, self.model_drop, data, self.state['WINDOW_SIZE'])
correlation_list = utils.get_correlation(self.idrops, self.model_drop, data, self.state['WINDOW_SIZE'])
window_size = self.state['WINDOW_SIZE']
self.idrops = learning_info['segment_center_list']
self.model_drop = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.idrops, self.model_drop, data, window_size)
correlation_list = utils.get_correlation(self.idrops, self.model_drop, data, window_size)
del_conv_list = []
delete_pattern_timestamp = []
for segment in deleted_segments:
segment_cent_index = segment.center_index
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted_drop = utils.get_interval(data, segment_cent_index, self.state['WINDOW_SIZE'])
deleted_drop = utils.get_interval(data, segment_cent_index, window_size)
deleted_drop = utils.subtract_min_without_nan(deleted_drop)
del_conv_drop = scipy.signal.fftconvolve(deleted_drop, self.model_drop)
if len(del_conv_drop): del_conv_list.append(max(del_conv_drop))
self._update_fiting_result(self.state, confidences, convolve_list, del_conv_list)
self.state['DROP_HEIGHT'] = int(min(drop_height_list, default = 1))
self.state['DROP_LENGTH'] = int(max(drop_length_list, default = 1))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
self.state['DROP_HEIGHT'] = int(min(learning_info['pattern_height'], default = 1))
self.state['DROP_LENGTH'] = int(max(learning_info['pattern_width'], default = 1))
def do_detect(self, dataframe: pd.DataFrame) -> list:
data = utils.cut_dataframe(dataframe)

22
analytics/analytics/models/general_model.py

@ -27,28 +27,22 @@ class GeneralModel(Model):
}
self.all_conv = []
def get_model_type(self) -> (str, bool):
model = 'general'
type_model = True
return (model, type_model)
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
data = dataframe['value']
segment = data[start: end]
center_ind = start + math.ceil((end - start) / 2)
return center_ind
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list) -> None:
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
convolve_list = []
correlation_list = []
patterns_list = []
pattern_timestamp = []
for segment in labeled_segments:
center_ind = segment.center_index
self.ipats.append(center_ind)
pattern_timestamp.append(segment.pattern_timestamp)
segment_data = utils.get_interval(data, center_ind, self.state['WINDOW_SIZE'])
segment_data = utils.subtract_min_without_nan(segment_data)
patterns_list.append(segment_data)
self.model_gen = utils.get_av_model(patterns_list)
self.ipats = learning_info['segment_center_list']
self.model_gen = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.ipats, self.model_gen, data, self.state['WINDOW_SIZE'])
correlation_list = utils.get_correlation(self.ipats, self.model_gen, data, self.state['WINDOW_SIZE'])

43
analytics/analytics/models/jump_model.py

@ -28,52 +28,39 @@ class JumpModel(Model):
'conv_del_max': 55000,
}
def get_model_type(self) -> (str, bool):
model = 'jump'
type_model = True
return (model, type_model)
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
data = dataframe['value']
segment = data[start: end]
segment_center_index = utils.find_pattern_center(segment, start, 'jump')
return segment_center_index
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list) -> None:
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
confidences = []
convolve_list = []
correlation_list = []
jump_height_list = []
jump_length_list = []
patterns_list = []
pattern_timestamp = []
for segment in labeled_segments:
confidence = utils.find_confidence(segment.data)[0]
confidences.append(confidence)
segment_cent_index = segment.center_index
jump_height, jump_length = utils.find_parameters(segment.data, segment.start, 'jump')
jump_height_list.append(jump_height)
jump_length_list.append(jump_length)
self.ijumps.append(segment_cent_index)
pattern_timestamp.append(segment.pattern_timestamp)
labeled_jump = utils.get_interval(data, segment_cent_index, self.state['WINDOW_SIZE'])
labeled_jump = utils.subtract_min_without_nan(labeled_jump)
patterns_list.append(labeled_jump)
self.model_jump = utils.get_av_model(patterns_list)
convolve_list = utils.get_convolve(self.ijumps, self.model_jump, data, self.state['WINDOW_SIZE'])
correlation_list = utils.get_correlation(self.ijumps, self.model_jump, data, self.state['WINDOW_SIZE'])
window_size = self.state['WINDOW_SIZE']
self.ijumps = learning_info['segment_center_list']
self.model_jump = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.ijumps, self.model_jump, data, window_size)
correlation_list = utils.get_correlation(self.ijumps, self.model_jump, data, window_size)
del_conv_list = []
delete_pattern_timestamp = []
for segment in deleted_segments:
segment_cent_index = segment.center_index
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted_jump = utils.get_interval(data, segment_cent_index, self.state['WINDOW_SIZE'])
deleted_jump = utils.get_interval(data, segment_cent_index, window_size)
deleted_jump = utils.subtract_min_without_nan(deleted_jump)
del_conv_jump = scipy.signal.fftconvolve(deleted_jump, self.model_jump)
if len(del_conv_jump): del_conv_list.append(max(del_conv_jump))
self._update_fiting_result(self.state, confidences, convolve_list, del_conv_list)
self.state['JUMP_HEIGHT'] = float(min(jump_height_list, default = 1))
self.state['JUMP_LENGTH'] = int(max(jump_length_list, default = 1))
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
self.state['JUMP_HEIGHT'] = float(min(learning_info['pattern_height'], default = 1))
self.state['JUMP_LENGTH'] = int(max(learning_info['pattern_width'], default = 1))
def do_detect(self, dataframe: pd.DataFrame) -> list:
data = utils.cut_dataframe(dataframe)

41
analytics/analytics/models/model.py

@ -43,7 +43,7 @@ class Segment(AttrDict):
class Model(ABC):
@abstractmethod
def do_fit(self, dataframe: pd.DataFrame, segments: list, cache: Optional[ModelCache]) -> None:
def do_fit(self, dataframe: pd.DataFrame, segments: list, cache: Optional[ModelCache], learning_info: dict) -> None:
pass
@abstractmethod
@ -54,10 +54,13 @@ class Model(ABC):
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
pass
@abstractmethod
def get_model_type(self) -> (str, bool):
pass
def fit(self, dataframe: pd.DataFrame, segments: list, cache: Optional[ModelCache]) -> ModelCache:
if type(cache) is ModelCache:
self.state = cache
max_length = 0
labeled = []
deleted = []
@ -72,9 +75,10 @@ class Model(ABC):
if segment.labeled: labeled.append(segment)
if segment.deleted: deleted.append(segment)
self.state['WINDOW_SIZE'] = math.ceil(max_length / 2) if max_length else 0
self.do_fit(dataframe, labeled, deleted)
model, model_type = self.get_model_type()
learning_info = self.get_parameters_from_segments(dataframe, labeled, deleted, model, model_type)
self.do_fit(dataframe, labeled, deleted, learning_info)
return self.state
def detect(self, dataframe: pd.DataFrame, cache: Optional[ModelCache]) -> dict:
@ -100,3 +104,32 @@ class Model(ABC):
state['conv_del_min'], state['conv_del_max'] = utils.get_min_max(del_conv_list, state['WINDOW_SIZE'])
else:
raise ValueError('got non-dict as state for update fiting result: {}'.format(state))
def get_parameters_from_segments(self, dataframe: pd.DataFrame, labeled: list, deleted: list, model: str, model_type: bool) -> dict:
learning_info = {
'confidence': [],
'patterns_list': [],
'pattern_width': [],
'pattern_height': [],
'pattern_timestamp': [],
'segment_center_list': [],
}
data = dataframe['value']
for segment in labeled:
confidence = utils.find_confidence(segment.data)[0]
learning_info['confidence'].append(confidence)
segment_center = segment.center_index
learning_info['segment_center_list'].append(segment_center)
learning_info['pattern_timestamp'].append(segment.pattern_timestamp)
aligned_segment = utils.get_interval(data, segment_center, self.state['WINDOW_SIZE'])
aligned_segment = utils.subtract_min_without_nan(aligned_segment)
learning_info['patterns_list'].append(aligned_segment)
if model == 'peak' or model == 'trough':
learning_info['pattern_height'].append(utils.find_confidence(aligned_segment)[1])
learning_info['pattern_width'].append(utils.find_width(aligned_segment, model_type))
if model == 'jump' or model == 'drop':
pattern_height, pattern_length = utils.find_parameters(segment.data, segment.start, model)
learning_info['pattern_height'].append(pattern_height)
learning_info['pattern_width'].append(pattern_length)
return learning_info

38
analytics/analytics/models/peak_model.py

@ -27,36 +27,24 @@ class PeakModel(Model):
'conv_del_max': 55000,
}
def get_model_type(self) -> (str, bool):
model = 'peak'
type_model = True
return (model, type_model)
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
data = dataframe['value']
segment = data[start: end]
return segment.idxmax()
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list) -> None:
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
confidences = []
convolve_list = []
correlation_list = []
patterns_list = []
pattern_width = []
pattern_height = []
pattern_timestamp = []
for segment in labeled_segments:
confidence = utils.find_confidence(segment.data)[0]
confidences.append(confidence)
segment_max_index = segment.center_index
self.ipeaks.append(segment_max_index)
pattern_timestamp.append(segment.pattern_timestamp)
labeled = utils.get_interval(data, segment_max_index, self.state['WINDOW_SIZE'])
labeled = utils.subtract_min_without_nan(labeled)
patterns_list.append(labeled)
pattern_height.append(utils.find_confidence(labeled)[1])
pattern_width.append(utils.find_width(labeled, True))
self.model = utils.get_av_model(patterns_list)
convolve_list = utils.get_convolve(self.ipeaks, self.model, data, self.state['WINDOW_SIZE'])
correlation_list = utils.get_correlation(self.ipeaks, self.model, data, self.state['WINDOW_SIZE'])
window_size = self.state['WINDOW_SIZE']
self.ipeaks = learning_info['segment_center_list']
self.model = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.ipeaks, self.model, data, window_size)
correlation_list = utils.get_correlation(self.ipeaks, self.model, data, window_size)
del_conv_list = []
delete_pattern_width = []
@ -65,14 +53,14 @@ class PeakModel(Model):
for segment in deleted_segments:
del_max_index = segment.center_index
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted = utils.get_interval(data, del_max_index, self.state['WINDOW_SIZE'])
deleted = utils.get_interval(data, del_max_index, window_size)
deleted = utils.subtract_min_without_nan(deleted)
del_conv = scipy.signal.fftconvolve(deleted, self.model)
if len(del_conv): del_conv_list.append(max(del_conv))
delete_pattern_height.append(utils.find_confidence(deleted)[1])
delete_pattern_width.append(utils.find_width(deleted, True))
self._update_fiting_result(self.state, confidences, convolve_list, del_conv_list)
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
def do_detect(self, dataframe: pd.DataFrame):
data = utils.cut_dataframe(dataframe)

38
analytics/analytics/models/trough_model.py

@ -27,36 +27,24 @@ class TroughModel(Model):
'conv_del_max': 55000,
}
def get_model_type(self) -> (str, bool):
model = 'trough'
type_model = False
return (model, type_model)
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
data = dataframe['value']
segment = data[start: end]
return segment.idxmin()
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list) -> None:
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']
confidences = []
convolve_list = []
correlation_list = []
patterns_list = []
pattern_width = []
pattern_height = []
pattern_timestamp = []
for segment in labeled_segments:
confidence = utils.find_confidence(segment.data)[0]
confidences.append(confidence)
segment_min_index = segment.center_index
self.itroughs.append(segment_min_index)
pattern_timestamp.append(segment.pattern_timestamp)
labeled = utils.get_interval(data, segment_min_index, self.state['WINDOW_SIZE'])
labeled = utils.subtract_min_without_nan(labeled)
patterns_list.append(labeled)
pattern_height.append(utils.find_confidence(labeled)[1])
pattern_width.append(utils.find_width(labeled, False))
self.model = utils.get_av_model(patterns_list)
convolve_list = utils.get_convolve(self.itroughs, self.model, data, self.state['WINDOW_SIZE'])
correlation_list = utils.get_correlation(self.itroughs, self.model, data, self.state['WINDOW_SIZE'])
window_size = self.state['WINDOW_SIZE']
self.itroughs = learning_info['segment_center_list']
self.model = utils.get_av_model(learning_info['patterns_list'])
convolve_list = utils.get_convolve(self.itroughs, self.model, data, window_size)
correlation_list = utils.get_correlation(self.itroughs, self.model, data, window_size)
del_conv_list = []
delete_pattern_width = []
@ -65,14 +53,14 @@ class TroughModel(Model):
for segment in deleted_segments:
del_min_index = segment.center_index
delete_pattern_timestamp.append(segment.pattern_timestamp)
deleted = utils.get_interval(data, del_min_index, self.state['WINDOW_SIZE'])
deleted = utils.get_interval(data, del_min_index, window_size)
deleted = utils.subtract_min_without_nan(deleted)
del_conv = scipy.signal.fftconvolve(deleted, self.model)
if len(del_conv): del_conv_list.append(max(del_conv))
delete_pattern_height.append(utils.find_confidence(deleted)[1])
delete_pattern_width.append(utils.find_width(deleted, False))
self._update_fiting_result(self.state, confidences, convolve_list, del_conv_list)
self._update_fiting_result(self.state, learning_info['confidence'], convolve_list, del_conv_list)
def do_detect(self, dataframe: pd.DataFrame):
data = utils.cut_dataframe(dataframe)

Loading…
Cancel
Save