Browse Source

151 fix error on second learning (#5)

pull/1/head
rozetko 6 years ago committed by GitHub
parent
commit
f27f5561dd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      README.md
  2. 10
      analytics/anomaly_model.py
  3. 3
      analytics/data_preprocessor.py
  4. 4
      analytics/data_provider.py

5
README.md

@ -66,3 +66,8 @@ export HASTIC_PORT=<port_you_want_to_run_server_on>
cd ./hastic-server/server
npm start
```
#### Known bugs & issues
- If you add labeled segments while learning - it fails
- Dataset doesn't get updated after 1st learning

10
analytics/anomaly_model.py

@ -64,14 +64,12 @@ class AnomalyModel:
if len(anomalies) > 0:
confidence = 0.0
min_time, max_time = self.anomalies_box(anomalies)
start_index = dataframe[dataframe['timestamp'] >= min_time].index[0]
stop_index = dataframe[dataframe['timestamp'] > max_time].index[0]
start_index, stop_index = self.preprocessor.expand_indexes(start_index, stop_index)
dataframe = dataframe[start_index:stop_index]
dataframe = dataframe[dataframe['timestamp'] <= max_time]
dataframe = dataframe[dataframe['timestamp'] >= min_time]
train_augmented = self.preprocessor.get_augmented_data(
start_index,
stop_index,
dataframe.index[0],
dataframe.index[-1],
anomalies
)

3
analytics/data_preprocessor.py

@ -83,8 +83,7 @@ class data_preprocessor:
anomaly_index = current_index
rows = dataframe[anomaly_index]
indexes = np.floor_divide(rows.index, self.frame_size)
# indexes = np.unique(rows.index)
indexes = np.unique(rows.index)
return indexes
def inverse_transform_anomalies(self, prediction):

4
analytics/data_provider.py

@ -28,7 +28,7 @@ class DataProvider:
if after_time is None or after_time <= last_chunk_time:
chunk = self.__load_chunk(chunk_index)
if after_time is not None:
chunk = chunk[chunk['timestamp'] > after_time]
chunk = chunk[chunk['timestamp'] >= after_time]
result = pd.concat([result, chunk])
return result
@ -36,7 +36,7 @@ class DataProvider:
for chunk_index, last_chunk_time in self.chunk_last_times.items():
if after_time < last_chunk_time:
chunk = self.__load_chunk(chunk_index)
chunk = chunk[chunk['timestamp'] > after_time]
chunk = chunk[chunk['timestamp'] >= after_time]
return chunk.index[0]
return self.size()

Loading…
Cancel
Save