Browse Source

Add ModelState children classes #576 (#579)

pull/1/head
Alexandr Velikiy 6 years ago committed by rozetko
parent
commit
7f15bf3996
  1. 2
      analytics/analytics/models/__init__.py
  2. 35
      analytics/analytics/models/drop_model.py
  3. 20
      analytics/analytics/models/general_model.py
  4. 34
      analytics/analytics/models/jump_model.py
  5. 4
      analytics/analytics/models/model.py
  6. 36
      analytics/analytics/models/peak_model.py
  7. 36
      analytics/analytics/models/trough_model.py

2
analytics/analytics/models/__init__.py

@ -1,4 +1,4 @@
from models.model import Model, ModelCache
from models.model import Model, ModelCache, ModelState
from models.drop_model import DropModel
from models.peak_model import PeakModel
from models.jump_model import JumpModel

35
analytics/analytics/models/drop_model.py

@ -1,14 +1,42 @@
from models import Model
from models import Model, ModelState
import scipy.signal
from scipy.fftpack import fft
from scipy.signal import argrelextrema
from scipy.stats import gaussian_kde
from typing import Optional
import utils
import numpy as np
import pandas as pd
class DropModelState(ModelState):
def __init__(
self,
confidence: float = 0,
drop_height: float = 0,
drop_length: float = 0,
**kwargs
):
super().__init__(**kwargs)
self.confidence = confidence
self.drop_height = drop_height
self.drop_length = drop_length
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'drop_height': self.drop_height,
'drop_length': self.drop_length,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return DropModelState(**json)
class DropModel(Model):
def __init__(self):
@ -38,6 +66,9 @@ class DropModel(Model):
segment_center_index = utils.find_pattern_center(segment, start, 'drop')
return segment_center_index
def get_cache(self, cache: Optional[dict] = None) -> DropModelState:
return DropModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict, id: str) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']

20
analytics/analytics/models/general_model.py

@ -1,4 +1,4 @@
from models import Model
from models import Model, ModelState
from typing import Union, List, Generator
import utils
import numpy as np
@ -7,6 +7,7 @@ import scipy.signal
from scipy.fftpack import fft
from scipy.signal import argrelextrema
from scipy.stats.stats import pearsonr
from typing import Optional
import math
from scipy.stats import gaussian_kde
from scipy.stats import norm
@ -16,6 +17,20 @@ from analytic_types import AnalyticUnitId
PEARSON_FACTOR = 0.7
class GeneralModelState(ModelState):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def to_json(self) -> dict:
return super().to_json()
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return GeneralModelState(**json)
class GeneralModel(Model):
def __init__(self):
@ -41,6 +56,9 @@ class GeneralModel(Model):
center_ind = start + math.ceil((end - start) / 2)
return center_ind
def get_cache(self, cache: Optional[dict] = None) -> GeneralModelState:
return GeneralModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict, id: AnalyticUnitId) -> None:
logging.debug('Start method do_fit for analytic unit: {}'.format(id))
data = utils.cut_dataframe(dataframe)

34
analytics/analytics/models/jump_model.py

@ -1,14 +1,43 @@
from models import Model
from models import Model, ModelState
import utils
import numpy as np
import pandas as pd
import scipy.signal
from scipy.fftpack import fft
from typing import Optional
import math
from scipy.signal import argrelextrema
from scipy.stats import gaussian_kde
class JumpModelState(ModelState):
def __init__(
self,
confidence: float = 0,
jump_height: float = 0,
jump_length: float = 0,
**kwargs
):
super().__init__(**kwargs)
self.confidence = confidence
self.jump_height = jump_height
self.jump_length = jump_length
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'jump_height': self.jump_height,
'jump_length': self.jump_length,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return JumpModelState(**json)
class JumpModel(Model):
@ -39,6 +68,9 @@ class JumpModel(Model):
segment_center_index = utils.find_pattern_center(segment, start, 'jump')
return segment_center_index
def get_cache(self, cache: Optional[dict] = None) -> JumpModelState:
return JumpModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict, id: str) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']

4
analytics/analytics/models/model.py

@ -101,6 +101,10 @@ class Model(ABC):
def get_model_type(self) -> (str, bool):
pass
@abstractmethod
def get_cache(self, cache: Optional[dict] = None) -> ModelState:
pass
def fit(self, dataframe: pd.DataFrame, segments: list, id: AnalyticUnitId, cache: Optional[ModelCache]) -> ModelCache:
logging.debug('Start method fit for analytic unit {}'.format(id))
data = dataframe['value']

36
analytics/analytics/models/peak_model.py

@ -1,9 +1,9 @@
from models import Model
from models import Model, ModelState
import scipy.signal
from scipy.fftpack import fft
from scipy.signal import argrelextrema
from typing import Optional, List
import utils
import numpy as np
import pandas as pd
@ -11,6 +11,35 @@ import pandas as pd
SMOOTHING_COEFF = 2400
EXP_SMOOTHING_FACTOR = 0.01
class PeakModelState(ModelState):
def __init__(
self,
confidence: float = 0,
height_max: float = 0,
height_min: float = 0,
**kwargs
):
super().__init__(**kwargs)
self.confidence = confidence
self.height_max = height_max
self.height_min = height_min
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'height_max': self.height_max,
'height_min': self.height_min,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return PeakModelState(**json)
class PeakModel(Model):
def __init__(self):
@ -39,6 +68,9 @@ class PeakModel(Model):
segment = data[start: end]
return segment.idxmax()
def get_cache(self, cache: Optional[dict] = None) -> PeakModelState:
return PeakModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict, id: str) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']

36
analytics/analytics/models/trough_model.py

@ -1,9 +1,9 @@
from models import Model
from models import Model, ModelState
import scipy.signal
from scipy.fftpack import fft
from scipy.signal import argrelextrema
from typing import Optional
import utils
import numpy as np
import pandas as pd
@ -11,6 +11,35 @@ import pandas as pd
SMOOTHING_COEFF = 2400
EXP_SMOOTHING_FACTOR = 0.01
class TroughModelState(ModelState):
def __init__(
self,
confidence: float = 0,
height_max: float = 0,
height_min: float = 0,
**kwargs
):
super().__init__(**kwargs)
self.confidence = confidence
self.height_max = height_max
self.height_min = height_min
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'height_max': self.height_max,
'height_min': self.height_min,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return TroughModelState(**json)
class TroughModel(Model):
def __init__(self):
@ -39,6 +68,9 @@ class TroughModel(Model):
segment = data[start: end]
return segment.idxmin()
def get_cache(self, cache: Optional[dict] = None) -> TroughModelState:
return TroughModelState.from_json(cache)
def do_fit(self, dataframe: pd.DataFrame, labeled_segments: list, deleted_segments: list, learning_info: dict, id: str) -> None:
data = utils.cut_dataframe(dataframe)
data = data['value']

Loading…
Cancel
Save