Browse Source

jsonclass usage in models + fixes in meta (#583)

* jsonclass usage in models + fixes in meta

* remove some empty lines
pull/1/head
Alexey Velikiy 5 years ago committed by GitHub
parent
commit
30108ae643
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 21
      analytics/analytics/models/drop_model.py
  2. 24
      analytics/analytics/models/general_model.py
  3. 18
      analytics/analytics/models/jump_model.py
  4. 33
      analytics/analytics/models/model.py
  5. 17
      analytics/analytics/models/peak_model.py
  6. 17
      analytics/analytics/models/trough_model.py
  7. 28
      analytics/analytics/services/server_service.py
  8. 26
      analytics/analytics/utils/meta.py

21
analytics/analytics/models/drop_model.py

@ -6,9 +6,12 @@ from scipy.signal import argrelextrema
from scipy.stats import gaussian_kde
from typing import Optional
import utils
import utils.meta
import numpy as np
import pandas as pd
@utils.meta.JSONClass
class DropModelState(ModelState):
def __init__(
@ -23,20 +26,6 @@ class DropModelState(ModelState):
self.drop_height = drop_height
self.drop_length = drop_length
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'drop_height': self.drop_height,
'drop_length': self.drop_length,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return DropModelState(**json)
class DropModel(Model):
def __init__(self):
@ -54,12 +43,12 @@ class DropModel(Model):
'conv_del_min': 54000,
'conv_del_max': 55000,
}
def get_model_type(self) -> (str, bool):
model = 'drop'
type_model = False
return (model, type_model)
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
data = dataframe['value']
segment = data[start: end]

24
analytics/analytics/models/general_model.py

@ -1,40 +1,34 @@
from analytic_types import AnalyticUnitId
from models import Model, ModelState
from typing import Union, List, Generator
import utils
import utils.meta
import numpy as np
import pandas as pd
import scipy.signal
from scipy.fftpack import fft
from scipy.signal import argrelextrema
from scipy.stats.stats import pearsonr
from typing import Optional
import math
from scipy.stats import gaussian_kde
from scipy.stats import norm
import logging
from analytic_types import AnalyticUnitId
from typing import Optional
import math
PEARSON_FACTOR = 0.7
class GeneralModelState(ModelState):
@utils.meta.JSONClass
class GeneralModelState(ModelState):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def to_json(self) -> dict:
return super().to_json()
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return GeneralModelState(**json)
class GeneralModel(Model):
def __init__(self):
super()
self.state = {
'pattern_center': [],
'pattern_model': [],
@ -44,12 +38,12 @@ class GeneralModel(Model):
'conv_del_min': 0,
'conv_del_max': 0,
}
def get_model_type(self) -> (str, bool):
model = 'general'
type_model = True
return (model, type_model)
def find_segment_center(self, dataframe: pd.DataFrame, start: int, end: int) -> int:
data = dataframe['value']
segment = data[start: end]

18
analytics/analytics/models/jump_model.py

@ -1,6 +1,7 @@
from models import Model, ModelState
import utils
import utils.meta
import numpy as np
import pandas as pd
import scipy.signal
@ -10,8 +11,9 @@ import math
from scipy.signal import argrelextrema
from scipy.stats import gaussian_kde
class JumpModelState(ModelState):
@utils.meta.JSONClass
class JumpModelState(ModelState):
def __init__(
self,
confidence: float = 0,
@ -24,20 +26,6 @@ class JumpModelState(ModelState):
self.jump_height = jump_height
self.jump_length = jump_length
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'jump_height': self.jump_height,
'jump_length': self.jump_length,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return JumpModelState(**json)
class JumpModel(Model):

33
analytics/analytics/models/model.py

@ -8,17 +8,18 @@ import math
import logging
from analytic_types import AnalyticUnitId
import utils.meta
ModelCache = dict
class Segment(AttrDict):
__percent_of_nans = 0
def __init__(self, dataframe: pd.DataFrame, segment_map: dict, center_finder = None):
self.update(segment_map)
self.start = utils.timestamp_to_index(dataframe, pd.to_datetime(self['from'], unit='ms'))
self.end = utils.timestamp_to_index(dataframe, pd.to_datetime(self['to'], unit='ms'))
self.length = abs(self.end - self.start)
self.__percent_of_nans = 0
if callable(center_finder):
self.center_index = center_finder(dataframe, self.start, self.end)
@ -26,7 +27,7 @@ class Segment(AttrDict):
else:
self.center_index = self.start + math.ceil(self.length / 2)
self.pattern_timestamp = dataframe['timestamp'][self.center_index]
assert len(dataframe['value']) >= self.end + 1, \
'segment {}-{} out of dataframe length={}'.format(self.start, self.end+1, len(dataframe['value']))
@ -42,10 +43,12 @@ class Segment(AttrDict):
nan_list = utils.find_nan_indexes(self.data)
self.data = utils.nan_to_zero(self.data, nan_list)
@utils.meta.JSONClass
class ModelState():
def __init__(
self,
self,
pattern_center: List[int] = [],
pattern_model: List[float] = [],
convolve_max: float = 0,
@ -62,22 +65,6 @@ class ModelState():
self.conv_del_min = conv_del_min
self.conv_del_max = conv_del_max
def to_json(self) -> dict:
return {
'pattern_center': self.pattern_center,
'pattern_model': self.pattern_model,
'convolve_max': self.convolve_max,
'convolve_min': self.convolve_min,
'window_size': self.window_size,
'conv_del_min': self.conv_del_min,
'conv_del_max': self.conv_del_max,
}
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return ModelState(**json)
class Model(ABC):
@ -127,7 +114,7 @@ class Model(ABC):
assert len(labeled) > 0, f'labeled list empty, skip fitting for {id}'
if self.state.get('WINDOW_SIZE') == 0:
if self.state.get('WINDOW_SIZE') == 0:
self.state['WINDOW_SIZE'] = math.ceil(max_length / 2) if max_length else 0
model, model_type = self.get_model_type()
learning_info = self.get_parameters_from_segments(dataframe, labeled, deleted, model, model_type)
@ -167,7 +154,7 @@ class Model(ABC):
state['height_min'], state['height_max'] = utils.get_min_max(height_list, 0)
else:
raise ValueError('got non-dict as state for update fiting result: {}'.format(state))
def get_parameters_from_segments(self, dataframe: pd.DataFrame, labeled: list, deleted: list, model: str, model_type: bool) -> dict:
logging.debug('Start parsing segments')
learning_info = {
@ -203,4 +190,4 @@ class Model(ABC):
learning_info['patterns_value'].append(aligned_segment.values[self.state['WINDOW_SIZE']])
logging.debug('Parsing segments ended correctly with learning_info: {}'.format(learning_info))
return learning_info

17
analytics/analytics/models/peak_model.py

@ -5,12 +5,15 @@ from scipy.fftpack import fft
from scipy.signal import argrelextrema
from typing import Optional, List
import utils
import utils.meta
import numpy as np
import pandas as pd
SMOOTHING_COEFF = 2400
EXP_SMOOTHING_FACTOR = 0.01
@utils.meta.JSONClass
class PeakModelState(ModelState):
def __init__(
@ -25,20 +28,6 @@ class PeakModelState(ModelState):
self.height_max = height_max
self.height_min = height_min
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'height_max': self.height_max,
'height_min': self.height_min,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return PeakModelState(**json)
class PeakModel(Model):

17
analytics/analytics/models/trough_model.py

@ -5,12 +5,15 @@ from scipy.fftpack import fft
from scipy.signal import argrelextrema
from typing import Optional
import utils
import utils.meta
import numpy as np
import pandas as pd
SMOOTHING_COEFF = 2400
EXP_SMOOTHING_FACTOR = 0.01
@utils.meta.JSONClass
class TroughModelState(ModelState):
def __init__(
@ -25,20 +28,6 @@ class TroughModelState(ModelState):
self.height_max = height_max
self.height_min = height_min
def to_json(self) -> dict:
json = super().to_json()
json.update({
'confidence': self.confidence,
'height_max': self.height_max,
'height_min': self.height_min,
})
return json
@staticmethod
def from_json(json: Optional[dict] = None):
if json is None:
json = {}
return TroughModelState(**json)
class TroughModel(Model):

28
analytics/analytics/services/server_service.py

@ -9,6 +9,7 @@ import asyncio
import traceback
import utils.concurrent
import utils.meta
from typing import Optional
@ -19,32 +20,13 @@ PARSE_MESSAGE_OR_SAVE_LOOP_INTERRUPTED = False
SERVER_SOCKET_RECV_LOOP_INTERRUPTED = False
@utils.meta.JSONClass
class ServerMessage:
def __init__(self, method: str, payload: object = None, request_id: int = None):
self.method = method
self.payload = payload
self.request_id = request_id
def toJSON(self) -> dict:
result = {
'method': self.method
}
if self.payload is not None:
result['payload'] = self.payload
if self.request_id is not None:
result['requestId'] = self.request_id
return result
@staticmethod
def fromJSON(json: dict):
method = json['method']
payload = None
request_id = None
if 'payload' in json:
payload = json['payload']
if 'requestId' in json:
request_id = json['requestId']
return ServerMessage(method, payload, request_id)
class ServerService(utils.concurrent.AsyncZmqActor):
@ -63,8 +45,8 @@ class ServerService(utils.concurrent.AsyncZmqActor):
# in theory, we can try to use zmq.proxy:
# zmq.proxy(self.__actor_socket, self.__server_socket)
# and do here something like:
# self.__actor_socket.send_string(json.dumps(message.toJSON()))
await self._put_message_to_thread(json.dumps(message.toJSON()))
# self.__actor_socket.send_string(json.dumps(message.to_json()))
await self._put_message_to_thread(json.dumps(message.to_json()))
async def send_request_to_server(self, message: ServerMessage) -> object:
if message.request_id is not None:
@ -118,7 +100,7 @@ class ServerService(utils.concurrent.AsyncZmqActor):
def __parse_message_or_save(self, text: str) -> Optional[ServerMessage]:
try:
message_object = json.loads(text)
message = ServerMessage.fromJSON(message_object)
message = ServerMessage.from_json(message_object)
if message.request_id is not None:
self.__responses[message_object['requestId']] = message.payload
return None

26
analytics/analytics/utils/meta.py

@ -1,15 +1,24 @@
from inspect import signature, Parameter
from functools import wraps
from typing import Optional
from re import match
import re
CAMEL_REGEX = re.compile(r'([A-Z])')
UNDERSCORE_REGEX = re.compile(r'_([a-z])')
def camel_to_underscore(name):
return CAMEL_REGEX.sub(lambda x: '_' + x.group(1).lower(), name)
def underscore_to_camel(name):
return UNDERSCORE_REGEX.sub(lambda x: x.group(1).upper(), name)
def is_field_private(field_name: str) -> Optional[str]:
m = match(r'_[^(__)]+__', field_name)
m = re.match(r'_[^(__)]+__', field_name)
return m is not None
def inited_params(target_init):
target_params = signature(target_init).parameters.values()
target_params = signature(target_init).parameters.values()
if len(target_params) < 1:
raise ValueError('init function mush have at least self parameter')
if len(target_params) == 1:
@ -41,12 +50,15 @@ def JSONClass(target_class):
where all None - values and private fileds are skipped
"""
return {
k: v for k, v in self.__dict__.items()
underscore_to_camel(k): v for k, v in self.__dict__.items()
if v is not None and not is_field_private(k)
}
def from_json(json_object: dict) -> target_class:
return target_class(**json_object)
def from_json(json_object: Optional[dict]) -> target_class:
if json_object is None:
json_object = {}
init_object = { camel_to_underscore(k): v for k, v in json_object.items() }
return target_class(**init_object)
# target_class.__init__ = inited_params(target_class.__init__)
target_class.to_json = to_json

Loading…
Cancel
Save