|
|
@ -7,84 +7,65 @@ import traceback |
|
|
|
import time |
|
|
|
import time |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger('WORKER') |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger('AnalyticUnitWorker') |
|
|
|
|
|
|
|
|
|
|
|
class AnalyticUnitWorker(object): |
|
|
|
|
|
|
|
detectors_cache = {} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: get task as an object built from json |
|
|
|
class AnalyticUnitWorker: |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_detector(self, analytic_unit_id, pattern_type): |
|
|
|
|
|
|
|
if analytic_unit_id not in self.detectors_cache: |
|
|
|
|
|
|
|
if pattern_type == 'GENERAL': |
|
|
|
|
|
|
|
detector = detectors.GeneralDetector(analytic_unit_id) |
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
detector = detectors.PatternDetector(analytic_unit_id, pattern_type) |
|
|
|
|
|
|
|
self.detectors_cache[analytic_unit_id] = detector |
|
|
|
|
|
|
|
return self.detectors_cache[analytic_unit_id] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self, detector: detectors.Detector): |
|
|
|
|
|
|
|
pass |
|
|
|
|
|
|
|
|
|
|
|
async def do_task(self, task): |
|
|
|
async def do_task(self, task): |
|
|
|
try: |
|
|
|
try: |
|
|
|
type = task['type'] |
|
|
|
type = task['type'] |
|
|
|
analytic_unit_id = task['analyticUnitId'] |
|
|
|
analytic_unit_id = task['analyticUnitId'] |
|
|
|
payload = task['payload'] |
|
|
|
payload = task['payload'] |
|
|
|
if type == "PREDICT": |
|
|
|
if type == "PREDICT": |
|
|
|
result = await self.do_predict(analytic_unit_id, payload) |
|
|
|
result_payload = await self.do_predict(analytic_unit_id, payload) |
|
|
|
elif type == "LEARN": |
|
|
|
elif type == "LEARN": |
|
|
|
result = await self.do_learn(analytic_unit_id, payload) |
|
|
|
result_payload = await self.do_learn(analytic_unit_id, payload) |
|
|
|
else: |
|
|
|
else: |
|
|
|
result = { |
|
|
|
raise ValueError('Unknown task type %s' % type) |
|
|
|
'status': "FAILED", |
|
|
|
|
|
|
|
'error': "unknown type " + str(type) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
except Exception as e: |
|
|
|
except Exception as e: |
|
|
|
#traceback.extract_stack() |
|
|
|
#traceback.extract_stack() |
|
|
|
error_text = traceback.format_exc() |
|
|
|
error_text = traceback.format_exc() |
|
|
|
logger.error("do_task Exception: '%s'" % error_text) |
|
|
|
logger.error("do_task Exception: '%s'" % error_text) |
|
|
|
# TODO: move result to a class which renders to json for messaging to analytics |
|
|
|
# TODO: move result to a class which renders to json for messaging to analytics |
|
|
|
result = { |
|
|
|
result = { |
|
|
|
'task': type, |
|
|
|
|
|
|
|
'status': "FAILED", |
|
|
|
'status': "FAILED", |
|
|
|
'analyticUnitId': analytic_unit_id, |
|
|
|
|
|
|
|
'error': str(e) |
|
|
|
'error': str(e) |
|
|
|
} |
|
|
|
} |
|
|
|
return result |
|
|
|
return { |
|
|
|
|
|
|
|
'status': 'SUCCESS', |
|
|
|
|
|
|
|
'payload': result_payload |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
async def do_learn(self, analytic_unit_id, payload): |
|
|
|
async def do_learn(self, analytic_unit_id, payload) -> None: |
|
|
|
pattern = payload['pattern'] |
|
|
|
pattern = payload['pattern'] |
|
|
|
segments = payload['segments'] |
|
|
|
segments = payload['segments'] |
|
|
|
data = payload['data'] # [time, value][] |
|
|
|
data = payload['data'] # [time, value][] |
|
|
|
|
|
|
|
|
|
|
|
detector = self.get_detector(analytic_unit_id, pattern) |
|
|
|
detector = self.get_detector(analytic_unit_id, pattern) |
|
|
|
detector.synchronize_data() |
|
|
|
await detector.learn(segments) |
|
|
|
last_prediction_time = await detector.learn(segments) |
|
|
|
|
|
|
|
# TODO: we should not do predict before labeling in all models, not just in drops |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if pattern == 'DROP' and len(segments) == 0: |
|
|
|
|
|
|
|
# TODO: move result to a class which renders to json for messaging to analytics |
|
|
|
|
|
|
|
result = { |
|
|
|
|
|
|
|
'status': 'SUCCESS', |
|
|
|
|
|
|
|
'analyticUnitId': analytic_unit_id, |
|
|
|
|
|
|
|
'segments': [], |
|
|
|
|
|
|
|
'lastPredictionTime': last_prediction_time |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
result = await self.do_predict(analytic_unit_id, last_prediction_time, pattern) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
result['task'] = 'LEARN' |
|
|
|
|
|
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def do_predict(self, analytic_unit_id, payload): |
|
|
|
async def do_predict(self, analytic_unit_id, payload): |
|
|
|
pattern = payload['pattern'] |
|
|
|
pattern = payload['pattern'] |
|
|
|
last_prediction_time = payload['lastPredictionTime'] |
|
|
|
data = payload['data'] # [time, value][] |
|
|
|
|
|
|
|
|
|
|
|
detector = self.get_detector(analytic_unit_id, pattern) |
|
|
|
detector = self.get_detector(analytic_unit_id, pattern) |
|
|
|
detector.synchronize_data() |
|
|
|
segments, last_prediction_time = await detector.predict(data) |
|
|
|
segments, last_prediction_time = await detector.predict(last_prediction_time) |
|
|
|
|
|
|
|
return { |
|
|
|
return { |
|
|
|
'task': 'PREDICT', |
|
|
|
|
|
|
|
'status': 'SUCCESS', |
|
|
|
|
|
|
|
'analyticUnitId': analytic_unit_id, |
|
|
|
|
|
|
|
'segments': segments, |
|
|
|
'segments': segments, |
|
|
|
'lastPredictionTime': last_prediction_time |
|
|
|
'lastPredictionTime': last_prediction_time |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
def get_detector(self, analytic_unit_id, pattern_type): |
|
|
|
|
|
|
|
if analytic_unit_id not in self.detectors_cache: |
|
|
|
|
|
|
|
if pattern_type == 'GENERAL': |
|
|
|
|
|
|
|
detector = detectors.GeneralDetector(analytic_unit_id) |
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
detector = detectors.PatternDetector(analytic_unit_id, pattern_type) |
|
|
|
|
|
|
|
self.detectors_cache[analytic_unit_id] = detector |
|
|
|
|
|
|
|
return self.detectors_cache[analytic_unit_id] |
|
|
|
|
|
|
|