Browse Source

Fix webhooks (#341)

pull/1/head
rozetko 5 years ago committed by GitHub
parent
commit
9b6f5482bc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      .env
  2. 1
      .gitignore
  3. 12
      analytics/analytics/analytic_unit_manager.py
  4. 1
      analytics/analytics/buckets/data_bucket.py
  5. 17
      analytics/analytics/detectors/threshold_detector.py
  6. 2
      docker-compose.yml
  7. 4
      server/src/controllers/analytics_controller.ts
  8. 27
      server/src/services/data_puller.ts
  9. 5
      server/src/services/notification_service.ts

3
.env

@ -1 +1,4 @@
ZMQ_CONNECTION_STRING=tcp://analytics:8002 ZMQ_CONNECTION_STRING=tcp://analytics:8002
HASTIC_API_KEY=eyJrIjoiVDRUTUlKSjJ5N3dYTDdsd1JyWWRBNHFkb0VSeDBNTTYiLCJuIjoiaGFzdGljLXNlcnZlciIsImlkIjoxfQ==
HASTIC_WEBHOOK_URL=http://localhost:8888
HASTIC_WEBHOOK_TYPE=application/json

1
.gitignore vendored

@ -1,6 +1,7 @@
data data
dist dist
config.json config.json
.env
node_modules/ node_modules/

12
analytics/analytics/analytic_unit_manager.py

@ -1,7 +1,8 @@
from typing import Dict from typing import Dict
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import logging, traceback import logging as log
import traceback
from concurrent.futures import Executor, ThreadPoolExecutor from concurrent.futures import Executor, ThreadPoolExecutor
import detectors import detectors
@ -9,7 +10,7 @@ from analytic_unit_worker import AnalyticUnitWorker
from models import ModelCache from models import ModelCache
logger = logging.getLogger('AnalyticUnitManager') logger = log.getLogger('AnalyticUnitManager')
WORKERS_EXECUTORS = 20 WORKERS_EXECUTORS = 20
AnalyticUnitId = str AnalyticUnitId = str
@ -73,7 +74,8 @@ class AnalyticUnitManager:
if task['type'] == 'PUSH': if task['type'] == 'PUSH':
# TODO: do it a better way # TODO: do it a better way
res = await worker.recieve_data(data, payload['cache']) res = await worker.recieve_data(data, payload['cache'])
return res.update({ 'analyticUnitId': analytic_unit_id }) res.update({ 'analyticUnitId': analytic_unit_id })
return res
elif task['type'] == 'LEARN': elif task['type'] == 'LEARN':
if 'segments' in payload: if 'segments' in payload:
return await worker.do_train(payload['segments'], data, payload['cache']) return await worker.do_train(payload['segments'], data, payload['cache'])
@ -89,13 +91,13 @@ class AnalyticUnitManager:
async def handle_analytic_task(self, task): async def handle_analytic_task(self, task):
try: try:
result_payload = await self.__handle_analytic_task(task) result_payload = await self.__handle_analytic_task(task)
return { result_message = {
'status': 'SUCCESS', 'status': 'SUCCESS',
'payload': result_payload 'payload': result_payload
} }
return result_message
except Exception as e: except Exception as e:
error_text = traceback.format_exc() error_text = traceback.format_exc()
logger.error("handle_analytic_task exception: '%s'" % error_text)
# TODO: move result to a class which renders to json for messaging to analytics # TODO: move result to a class which renders to json for messaging to analytics
return { return {
'status': 'FAILED', 'status': 'FAILED',

1
analytics/analytics/buckets/data_bucket.py

@ -6,6 +6,7 @@ class DataBucket(object):
def __init__(self): def __init__(self):
self.data = pd.DataFrame([], columns=['timestamp', 'value']) self.data = pd.DataFrame([], columns=['timestamp', 'value'])
def receive_data(self, data: pd.DataFrame): def receive_data(self, data: pd.DataFrame):
self.data = self.data.append(data, ignore_index=True) self.data = self.data.append(data, ignore_index=True)

17
analytics/analytics/detectors/threshold_detector.py

@ -1,13 +1,14 @@
import logging import logging as log
import pandas as pd import pandas as pd
from typing import Optional from typing import Optional
from detectors import Detector from detectors import Detector
from models import ModelCache from models import ModelCache
from time import time
logger = logging.getLogger('THRESHOLD_DETECTOR') logger = log.getLogger('THRESHOLD_DETECTOR')
class ThresholdDetector(Detector): class ThresholdDetector(Detector):
@ -16,6 +17,7 @@ class ThresholdDetector(Detector):
pass pass
def train(self, dataframe: pd.DataFrame, threshold: dict, cache: Optional[ModelCache]) -> ModelCache: def train(self, dataframe: pd.DataFrame, threshold: dict, cache: Optional[ModelCache]) -> ModelCache:
log.debug('run train for threshold detector')
return { return {
'cache': { 'cache': {
'value': threshold['value'], 'value': threshold['value'],
@ -24,6 +26,7 @@ class ThresholdDetector(Detector):
} }
def detect(self, dataframe: pd.DataFrame, cache: Optional[ModelCache]) -> dict: def detect(self, dataframe: pd.DataFrame, cache: Optional[ModelCache]) -> dict:
log.debug('run detect for threshold detector')
value = cache['value'] value = cache['value']
condition = cache['condition'] condition = cache['condition']
@ -32,10 +35,9 @@ class ThresholdDetector(Detector):
return dict() return dict()
last_entry = dataframe_without_nans.iloc[-1] last_entry = dataframe_without_nans.iloc[-1]
last_value = last_entry['value'] last_value = last_entry['value']
# TODO: convert from nanoseconds to millisecond in a better way: not by dividing by 10^6
last_time = last_entry['timestamp'].value / 1000000
segment = ({ 'from': last_time, 'to': last_time }) now = int(time()) * 1000
segment = ({ 'from': now, 'to': now })
segments = [] segments = []
if condition == '>': if condition == '>':
if last_value > value: if last_value > value:
@ -52,12 +54,13 @@ class ThresholdDetector(Detector):
elif condition == '<': elif condition == '<':
if last_value < value: if last_value < value:
segments.append(segment) segments.append(segment)
log.debug('seg {}'.format(segments))
return { return {
'cache': cache, 'cache': cache,
'segments': segments, 'segments': segments,
'lastDetectionTime': last_time 'lastDetectionTime': now
} }
def recieve_data(self, data: pd.DataFrame, cache: Optional[ModelCache]) -> Optional[dict]: def recieve_data(self, data: pd.DataFrame, cache: Optional[ModelCache]) -> Optional[dict]:
log.debug('threshhold recieve data')
return self.detect(data, cache) return self.detect(data, cache)

2
docker-compose.yml

@ -12,10 +12,12 @@ services:
- 8000:8000 - 8000:8000
volumes: volumes:
- data-volume:/var/www/data - data-volume:/var/www/data
restart: always
analytics: analytics:
image: hastic/analytics:latest image: hastic/analytics:latest
build: analytics build: analytics
restart: always
volumes: volumes:
data-volume: data-volume:

4
server/src/controllers/analytics_controller.ts

@ -302,8 +302,10 @@ async function processDetectionResult(analyticUnitId: AnalyticUnit.AnalyticUnitI
try { try {
sendWebhook(analyticUnit.name, _.last(segments)); sendWebhook(analyticUnit.name, _.last(segments));
} catch(err) { } catch(err) {
console.error(`Error while sending webhook: ${err.message}`); console.error(`error while sending webhook: ${err.message}`);
} }
} else {
console.debug(`skip sending webhook for ${analyticUnitId}`);
} }
return { return {
lastDetectionTime: detectionResult.lastDetectionTime, lastDetectionTime: detectionResult.lastDetectionTime,

27
server/src/services/data_puller.ts

@ -43,7 +43,10 @@ export class DataPuller {
panelUrl = unit.panelUrl; panelUrl = unit.panelUrl;
} }
return queryByMetric(unit.metric, panelUrl, from, to, HASTIC_API_KEY); let startTime = Date.now();
let data = queryByMetric(unit.metric, panelUrl, from, to, HASTIC_API_KEY);
console.log(`data puller: query took ${Date.now() - startTime}ms for unit id ${unit.id}`);
return data;
} }
@ -55,6 +58,7 @@ export class DataPuller {
try { try {
this.analyticsService.sendTask(task); this.analyticsService.sendTask(task);
console.log(`data puller successfuly pushed data for unit id: ${unit.id}`);
} catch(e) { } catch(e) {
console.log(`data puller got error while push data ${e.message}`); console.log(`data puller got error while push data ${e.message}`);
} }
@ -70,15 +74,16 @@ export class DataPuller {
this._runAnalyticUnitPuller(analyticUnit); this._runAnalyticUnitPuller(analyticUnit);
}); });
console.log('Data puller started'); console.log('data puller started');
} }
public stopPuller() { public stopPuller() {
this._unitTimes = {}; this._unitTimes = {};
console.log('Data puller stopped'); console.log('data puller stopped');
} }
private async _runAnalyticUnitPuller(analyticUnit: AnalyticUnit.AnalyticUnit) { private async _runAnalyticUnitPuller(analyticUnit: AnalyticUnit.AnalyticUnit) {
console.debug(`run data puller for analytic unit ${analyticUnit.id}`);
// TODO: lastDetectionTime can be in ns // TODO: lastDetectionTime can be in ns
const time = analyticUnit.lastDetectionTime + 1 || Date.now(); const time = analyticUnit.lastDetectionTime + 1 || Date.now();
this._unitTimes[analyticUnit.id] = time; this._unitTimes[analyticUnit.id] = time;
@ -89,6 +94,7 @@ export class DataPuller {
for await (const data of dataGenerator) { for await (const data of dataGenerator) {
if(!_.has(this._unitTimes, analyticUnit.id)) { if(!_.has(this._unitTimes, analyticUnit.id)) {
console.log(`data puller: ${analyticUnit.id} not in _unitTimes, break`);
break; break;
} }
@ -119,20 +125,21 @@ export class DataPuller {
async * getDataGenerator(analyticUnit: AnalyticUnit.AnalyticUnit, duration: number): async * getDataGenerator(analyticUnit: AnalyticUnit.AnalyticUnit, duration: number):
AsyncIterableIterator<MetricDataChunk> { AsyncIterableIterator<MetricDataChunk> {
if(!this.analyticsService.ready) { const getData = async () => {
return { if(!this.analyticsService.ready) {
columns: [], console.debug(`data generator: analytic service not ready, return empty result while wait service`);
values: [] return {
columns: [],
values: []
};
} }
}
const getData = async () => {
try { try {
const time = this._unitTimes[analyticUnit.id] const time = this._unitTimes[analyticUnit.id]
const now = Date.now(); const now = Date.now();
return await this.pullData(analyticUnit, time, now); return await this.pullData(analyticUnit, time, now);
} catch(err) { } catch(err) {
throw new Error(`Error while pulling data: ${err.message}`); throw new Error(`error while pulling data: ${err.message}`);
} }
} }

5
server/src/services/notification_service.ts

@ -13,7 +13,7 @@ export async function sendWebhook(analyticUnitName: string, segment: Segment) {
to: segment.to to: segment.to
}; };
console.log(`Sending alert: ${JSON.stringify(alert)}`); console.log(`Sending alert name:${alert.analyticUnitName} from:${new Date(alert.from)} to:${new Date(alert.to)}`);
if(HASTIC_WEBHOOK_URL === null) { if(HASTIC_WEBHOOK_URL === null) {
throw new Error(`Can't send alert, HASTIC_WEBHOOK_URL is undefined`); throw new Error(`Can't send alert, HASTIC_WEBHOOK_URL is undefined`);
@ -37,8 +37,7 @@ export async function sendWebhook(analyticUnitName: string, segment: Segment) {
}; };
try { try {
const response = await axios(options); await axios(options);
console.log(response);
} catch(err) { } catch(err) {
console.error(`Can't send alert to ${HASTIC_WEBHOOK_URL}. Error: ${err.message}`); console.error(`Can't send alert to ${HASTIC_WEBHOOK_URL}. Error: ${err.message}`);
} }

Loading…
Cancel
Save