Browse Source

Fix #231: .count(np.NaN) -> .isnull().sum()

pull/1/head
rozetko 6 years ago
parent
commit
9986642659
  1. 4
      analytics/analytics/models/drop_model.py
  2. 2
      analytics/analytics/models/general_model.py
  3. 4
      analytics/analytics/models/jump_model.py
  4. 6
      analytics/analytics/models/peak_model.py
  5. 6
      analytics/analytics/models/trough_model.py

4
analytics/analytics/models/drop_model.py

@ -39,7 +39,7 @@ class DropModel(Model):
segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms')) segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms'))
segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms')) segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms'))
segment_data = data[segment_from_index: segment_to_index + 1] segment_data = data[segment_from_index: segment_to_index + 1]
percent_of_nans = segment_data.count(np.NaN) / len(segment_data) percent_of_nans = segment_data.isnull().sum() / len(segment_data)
if percent_of_nans > 0 or len(segment_data) == 0: if percent_of_nans > 0 or len(segment_data) == 0:
continue continue
segment_min = min(segment_data) segment_min = min(segment_data)
@ -164,7 +164,7 @@ class DropModel(Model):
for segment in segments: for segment in segments:
if segment > self.state['WINDOW_SIZE'] and segment < (len(data) - self.state['WINDOW_SIZE']): if segment > self.state['WINDOW_SIZE'] and segment < (len(data) - self.state['WINDOW_SIZE']):
convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE'] + 1] convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE'] + 1]
percent_of_nans = convol_data.count(np.NaN) / len(convol_data) percent_of_nans = convol_data.isnull().sum() / len(convol_data)
if percent_of_nans > 0.5: if percent_of_nans > 0.5:
delete_list.append(segment) delete_list.append(segment)
continue continue

2
analytics/analytics/models/general_model.py

@ -37,7 +37,7 @@ class GeneralModel(Model):
segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms')) segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms'))
segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms')) segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms'))
segment_data = data[segment_from_index: segment_to_index + 1] segment_data = data[segment_from_index: segment_to_index + 1]
percent_of_nans = segment_data.count(np.NaN) / len(segment_data) percent_of_nans = segment_data.isnull().sum() / len(segment_data)
if percent_of_nans > 0 or len(segment_data) == 0: if percent_of_nans > 0 or len(segment_data) == 0:
continue continue
x = segment_from_index + math.ceil((segment_to_index - segment_from_index) / 2) x = segment_from_index + math.ceil((segment_to_index - segment_from_index) / 2)

4
analytics/analytics/models/jump_model.py

@ -39,7 +39,7 @@ class JumpModel(Model):
for segment in segments: for segment in segments:
if segment['labeled']: if segment['labeled']:
segment_from_index, segment_to_index, segment_data = parse_segment(segment, dataframe) segment_from_index, segment_to_index, segment_data = parse_segment(segment, dataframe)
percent_of_nans = segment_data.count(np.NaN) / len(segment_data) percent_of_nans = segment_data.isnull().sum() / len(segment_data)
if percent_of_nans > 0 or len(segment_data) == 0: if percent_of_nans > 0 or len(segment_data) == 0:
continue continue
segment_min = min(segment_data) segment_min = min(segment_data)
@ -170,7 +170,7 @@ class JumpModel(Model):
for segment in segments: for segment in segments:
if segment > self.state['WINDOW_SIZE'] and segment < (len(data) - self.state['WINDOW_SIZE']): if segment > self.state['WINDOW_SIZE'] and segment < (len(data) - self.state['WINDOW_SIZE']):
convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE'] + 1] convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE'] + 1]
percent_of_nans = convol_data.count(np.NaN) / len(convol_data) percent_of_nans = convol_data.isnull().sum() / len(convol_data)
if percent_of_nans > 0.5: if percent_of_nans > 0.5:
delete_list.append(segment) delete_list.append(segment)
continue continue

6
analytics/analytics/models/peak_model.py

@ -37,7 +37,7 @@ class PeakModel(Model):
segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms')) segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms'))
segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms')) segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms'))
segment_data = data[segment_from_index: segment_to_index + 1] segment_data = data[segment_from_index: segment_to_index + 1]
percent_of_nans = segment_data.count(np.NaN) / len(segment_data) percent_of_nans = segment_data.isnull().sum() / len(segment_data)
if percent_of_nans > 0 or len(segment_data) == 0: if percent_of_nans > 0 or len(segment_data) == 0:
continue continue
segment_min = min(segment_data) segment_min = min(segment_data)
@ -48,7 +48,7 @@ class PeakModel(Model):
labeled_peak = data[segment_max_index - self.state['WINDOW_SIZE']: segment_max_index + self.state['WINDOW_SIZE'] + 1] labeled_peak = data[segment_max_index - self.state['WINDOW_SIZE']: segment_max_index + self.state['WINDOW_SIZE'] + 1]
labeled_peak = labeled_peak - min(labeled_peak) labeled_peak = labeled_peak - min(labeled_peak)
patterns_list.append(labeled_peak) patterns_list.append(labeled_peak)
self.model_peak = utils.get_av_model(patterns_list) self.model_peak = utils.get_av_model(patterns_list)
for n in range(len(segments)): #labeled segments for n in range(len(segments)): #labeled segments
labeled_peak = data[self.ipeaks[n] - self.state['WINDOW_SIZE']: self.ipeaks[n] + self.state['WINDOW_SIZE'] + 1] labeled_peak = data[self.ipeaks[n] - self.state['WINDOW_SIZE']: self.ipeaks[n] + self.state['WINDOW_SIZE'] + 1]
@ -126,7 +126,7 @@ class PeakModel(Model):
if segment > self.state['WINDOW_SIZE']: if segment > self.state['WINDOW_SIZE']:
convol_data = data[segment - self.state['WINDOW_SIZE']: segment + self.state['WINDOW_SIZE'] + 1] convol_data = data[segment - self.state['WINDOW_SIZE']: segment + self.state['WINDOW_SIZE'] + 1]
convol_data = convol_data - min(convol_data) convol_data = convol_data - min(convol_data)
percent_of_nans = convol_data.count(np.NaN) / len(convol_data) percent_of_nans = convol_data.isnull().sum() / len(convol_data)
if percent_of_nans > 0.5: if percent_of_nans > 0.5:
delete_list.append(segment) delete_list.append(segment)
continue continue

6
analytics/analytics/models/trough_model.py

@ -37,7 +37,7 @@ class TroughModel(Model):
segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms')) segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms'))
segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms')) segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms'))
segment_data = data[segment_from_index: segment_to_index + 1] segment_data = data[segment_from_index: segment_to_index + 1]
percent_of_nans = segment_data.count(np.NaN) / len(segment_data) percent_of_nans = segment_data.isnull().sum() / len(segment_data)
if percent_of_nans > 0 or len(segment_data) == 0: if percent_of_nans > 0 or len(segment_data) == 0:
continue continue
segment_min = min(segment_data) segment_min = min(segment_data)
@ -64,7 +64,7 @@ class TroughModel(Model):
segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms')) segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms'))
segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms')) segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms'))
segment_data = data[segment_from_index: segment_to_index + 1] segment_data = data[segment_from_index: segment_to_index + 1]
percent_of_nans = segment_data.count(np.NaN) / len(segment_data) percent_of_nans = segment_data.isnull().sum() / len(segment_data)
if percent_of_nans > 0 or len(segment_data) == 0: if percent_of_nans > 0 or len(segment_data) == 0:
continue continue
del_min_index = segment_data.idxmin() del_min_index = segment_data.idxmin()
@ -127,7 +127,7 @@ class TroughModel(Model):
if segment > self.state['WINDOW_SIZE']: if segment > self.state['WINDOW_SIZE']:
convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE'] + 1] convol_data = data[segment - self.state['WINDOW_SIZE'] : segment + self.state['WINDOW_SIZE'] + 1]
convol_data = convol_data - min(convol_data) convol_data = convol_data - min(convol_data)
percent_of_nans = convol_data.count(np.NaN) / len(convol_data) percent_of_nans = convol_data.isnull().sum() / len(convol_data)
if percent_of_nans > 0.5: if percent_of_nans > 0.5:
delete_list.append(segment) delete_list.append(segment)
continue continue

Loading…
Cancel
Save