Browse Source

Add src

pull/1/head
rozetko 6 years ago
parent
commit
af4ab07edb
  1. 3
      .gitignore
  2. 29
      README.md
  3. 198
      REST.md
  4. 27
      server/README.md
  5. 10
      server/build/dev-server.js
  6. 52
      server/build/webpack.base.conf.js
  7. 4
      server/build/webpack.dev.conf.js
  8. 3
      server/build/webpack.prod.conf.js
  9. 5093
      server/package-lock.json
  10. 32
      server/package.json
  11. 9
      server/src/config.ts
  12. 31
      server/src/index.ts
  13. 62
      server/src/routes/alerts.ts
  14. 136
      server/src/routes/anomalies.ts
  15. 80
      server/src/routes/segments.ts
  16. 58
      server/src/services/alerts.ts
  17. 141
      server/src/services/analytics.ts
  18. 117
      server/src/services/anomalyType.ts
  19. 55
      server/src/services/json.ts
  20. 27
      server/src/services/metrics.ts
  21. 140
      server/src/services/notification.ts
  22. 75
      server/src/services/segments.ts
  23. 10
      server/tsconfig.json
  24. 11
      src/.gitignore
  25. 5
      src/add_anomaly.py
  26. 157
      src/anomaly_model.py
  27. 255
      src/data_preprocessor.py
  28. 220
      src/data_provider.py
  29. 52
      src/learn.py
  30. 127
      src/pattern_detection_model.py
  31. 71
      src/peaks_detector.py
  32. 83
      src/predict.py
  33. 46
      src/prophet_algorithm.py
  34. 231
      src/step_detect.py
  35. 188
      src/step_detector.py
  36. 71
      src/supervised_algorithm.py
  37. 131
      src/worker.py

3
.gitignore vendored

@ -0,0 +1,3 @@
node_modules/
dist/
.vscode/

29
README.md

@ -0,0 +1,29 @@
# Hastic server
Implementation of basic pattern recognition and unsupervised learning for anomamaly detection.
Implementation of analytic unit for Hastic.
see [REST API](REST.md)
## Build & run
### Analytic unit
Python3 project
```
pip3 install pandas
pip3 install influxdb
```
### Server
Node.js project
```
cd server
npm install
npm run build
npm start
```

198
REST.md

@ -0,0 +1,198 @@
# Hastic server REST API
## /anomalies
### Get anomalies
`GET /anomalies?id=<anomaly_id>[&name=<anomaly_name>]`
NOTE: `name` param is deprecated, use `id` instead
Return data format:
```
{
"name": "<anomaly_name>",
"metric": "<metric_id>",
"status": "<str>"
}
```
status field can be one of:
- `learning`
- `ready`
- `failed`
### Get anomaly status
`GET /anomalies/status?id=<anomaly_id>[&name=<anomaly_name>]`
NOTE: `name` param is deprecated, use `id` instead
Return data format:
```
{
"status": <str>
}
```
status field can be one of:
- `learning`
- `ready`
- `failed`
### Add anomaly
`POST /anomalies`
Data format:
```
{
"name": "cpu_utilization_supervised",
"metric": {
"datasource": "influx accelerometer",
"targets": [
<targets>
]
},
"panelUrl": "http://grafana.example.com/d/oNZ35bWiz/new-dashboard-copy?panelId=2&fullscreen"
}
```
`targets` example:
```
{
"alias": "command",
"groupBy": [],
"measurement": "data",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"command"
],
"type": "field"
}
]
],
"tags": []
}
```
Return data format:
```
{
"anomaly_id": "<anomaly_id>"
}
```
### Delete anpmalies
`DELETE /anomalies`
Data format:
```
{
"id": "<anomaly_id>",
"name": "<anomaly_name>" // deprecated, use id instead
}
```
Return data format:
```
Success
```
## /segments
### Get segments
`GET /segments?anomaly_id=<anomaly_id>[&last_segment=<id>][&from=<time_from>][&to=<time_to>]`
Return data format:
```
{
"segments": [
{
"id": 0,
"start": 1392765184318,
"finish": 1397243699000,
"labeled": true
},
...
]
}
```
### Update segments
`PATCH /segments`
Data format:
```
{
"anomaly_id": "<anomaly_id>",
"name": "<anomaly_name>", // deprecated, use id instead
"added_segments": [
{
"start": 1397164656000,
"finish": 1397243699000
},
...
],
"removed_segments": [3, 9]
}
```
Return data format:
```
{
"added_ids": [12, ...]
}
```
## /alerts
### Check if alert is enabled for anomaly
`GET /alerts?anomaly_id=<anomaly_id>`
Return data format:
```
{
"enable": true
}
```
### Enable / disable alert for anomaly
`POST /alerts`
Data format:
```
{
"anomaly_id": "<anomaly_id>",
"enable": true
}
```
Return data format:
```
{
"status": "Ok"
}
```

27
server/README.md

@ -0,0 +1,27 @@
# Hastic server
REST server for managing data for analytics.
Running on 8000 port.
# Build
```
npm install
npm run build
```
# Run
```
npm start
```
# Development
You should have `nodemon` module installed to run development server.
```
npm i -g nodemon
npm run dev
```

10
server/build/dev-server.js

@ -0,0 +1,10 @@
const { spawn } = require('child_process');
const webpack = spawn('webpack', ['--config', 'build/webpack.dev.conf.js'], {
stdio: 'inherit',
shell: true
});
//webpack.stdout.pipe(process.stdout);
const nodemon = spawn('nodemon', ['../dist/server', '--watch', 'server.js']);
nodemon.stdout.pipe(process.stdout);

52
server/build/webpack.base.conf.js

@ -0,0 +1,52 @@
const path = require('path');
const fs = require('fs');
const webpack = require('webpack');
function resolve(p) {
return path.join(__dirname, '/../', p);
}
module.exports = {
target: 'node',
node: {
__dirname: false,
__filename: false,
},
context: resolve('./src'),
entry: './index',
devtool: 'inline-source-map',
output: {
filename: "server.js",
path: resolve('dist')
},
externals: [
function(context, request, callback) {
if(request[0] == '.') {
callback();
} else {
callback(null, "require('" + request + "')");
}
}
],
plugins: [
new webpack.optimize.OccurrenceOrderPlugin(),
new webpack.HotModuleReplacementPlugin(),
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify('development')
})
],
resolve: {
extensions: [".ts", ".js"]
},
module: {
rules: [
{
test: /\.ts$/,
loader: "ts-loader",
exclude: /node_modules/
}
]
}
}

4
server/build/webpack.dev.conf.js

@ -0,0 +1,4 @@
var base = require('./webpack.base.conf');
base.watch = true;
module.exports = base;

3
server/build/webpack.prod.conf.js

@ -0,0 +1,3 @@
var base = require('./webpack.base.conf');
module.exports = base;

5093
server/package-lock.json generated

File diff suppressed because it is too large Load Diff

32
server/package.json

@ -0,0 +1,32 @@
{
"name": "hastic-server",
"version": "1.0.0",
"description": "REST server for managing data for analytics",
"scripts": {
"start": "node dist/server.js",
"dev": "node build/dev-server.js",
"build": "webpack --config build/webpack.prod.conf.js"
},
"repository": {
"type": "git",
"url": "git+https://github.com/hastic/hastic-server.git"
},
"author": "CorpGlory",
"license": "ISC",
"bugs": {
"url": "https://github.com/hastic/hastic-server/issues"
},
"homepage": "https://github.com/hastic/hastic-server#readme",
"dependencies": {
"express": "^4.16.3",
"fast-csv": "^2.4.1",
"telegraf": "^3.21.0"
},
"devDependencies": {
"@types/express": "^4.11.1",
"nodemon": "^1.17.3",
"ts-loader": "^3.5.0",
"typescript": "^2.8.3",
"webpack": "^3.5.6"
}
}

9
server/src/config.ts

@ -0,0 +1,9 @@
import * as path from 'path';
const DATA_PATH = path.join(__dirname, '../data');
const ANALYTICS_PATH = path.join(__dirname, '../../src');
const ANOMALIES_PATH = path.join(ANALYTICS_PATH, 'anomalies');
const SEGMENTS_PATH = path.join(ANALYTICS_PATH, 'segments');
const METRICS_PATH = path.join(ANALYTICS_PATH, 'metrics');
export { DATA_PATH, ANALYTICS_PATH, ANOMALIES_PATH, SEGMENTS_PATH, METRICS_PATH }

31
server/src/index.ts

@ -0,0 +1,31 @@
import * as express from 'express';
import * as bodyParser from 'body-parser';
import { router as anomaliesRouter } from './routes/anomalies';
import { router as segmentsRouter } from './routes/segments';
import { router as alertsRouter } from './routes/alerts';
import { tgBotInit } from './services/notification';
const app = express();
const PORT = 8000;
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use(function (req, res, next) {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, PATCH, OPTIONS');
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
next();
});
app.use('/anomalies', anomaliesRouter);
app.use('/segments', segmentsRouter);
app.use('/alerts', alertsRouter);
app.use('/', (req, res) => { res.send('Analytic unit works') });
app.listen(PORT, () => {
console.log(`Server is running on :${PORT}`)
});
tgBotInit();

62
server/src/routes/alerts.ts

@ -0,0 +1,62 @@
import * as express from 'express';
import {AnomalyId, getAnomalyIdByName, loadAnomalyById} from '../services/anomalyType';
import { getAlertsAnomalies, saveAlertsAnomalies } from '../services/alerts';
function getAlert(req, res) {
try {
let anomalyId: AnomalyId = req.query.anomaly_id;
let anomaly = loadAnomalyById(anomalyId)
if (anomaly == null) {
anomalyId = getAnomalyIdByName(anomalyId.toLowerCase());
}
let alertsAnomalies = getAlertsAnomalies();
let pos = alertsAnomalies.indexOf(anomalyId);
let enable: boolean = (pos !== -1);
res.status(200).send({
enable
});
} catch(e) {
res.status(500).send({
code: 500,
message: 'Internal error'
});
}
}
function changeAlert(req, res) {
try {
let anomalyId: AnomalyId = req.body.anomaly_id;
let enable: boolean = req.body.enable;
let anomaly = loadAnomalyById(anomalyId)
if (anomaly == null) {
anomalyId = getAnomalyIdByName(anomalyId.toLowerCase());
}
let alertsAnomalies = getAlertsAnomalies();
let pos: number = alertsAnomalies.indexOf(anomalyId);
if(enable && pos == -1) {
alertsAnomalies.push(anomalyId);
saveAlertsAnomalies(alertsAnomalies);
} else if(!enable && pos > -1) {
alertsAnomalies.splice(pos, 1);
saveAlertsAnomalies(alertsAnomalies);
}
res.status(200).send({
status: 'Ok'
});
} catch(e) {
res.status(500).send({
code: 500,
message: 'Internal error'
});
}
}
export const router = express.Router();
router.get('/', getAlert);
router.post('/', changeAlert);

136
server/src/routes/anomalies.ts

@ -0,0 +1,136 @@
import * as express from 'express';
import {
Metric,
Anomaly,
saveAnomaly,
insertAnomaly, removeAnomaly, loadAnomalyByName, loadAnomalyById, getAnomalyIdByName
} from '../services/anomalyType';
import { runLearning } from '../services/analytics'
import { saveTargets } from '../services/metrics';
async function sendAnomalyTypeStatus(req, res) {
let id = req.query.id;
let name = req.query.name;
try {
let anomaly: Anomaly;
if(id !== undefined) {
anomaly = loadAnomalyById(id);
} else {
anomaly = loadAnomalyByName(name);
}
if(anomaly === null) {
res.status(404).send({
code: 404,
message: 'Not found'
});
return;
}
if(anomaly.status === undefined) {
throw new Error('No status for ' + name);
}
res.status(200).send({ status: anomaly.status });
} catch(e) {
console.error(e);
// TODO: better send 404 when we know than isn`t found
res.status(500).send({ error: 'Can`t return anything' });
}
}
async function getAnomaly(req, res) {
try {
let id = req.query.id;
let name = req.query.name;
let anomaly:Anomaly;
if(id !== undefined) {
anomaly = loadAnomalyById(id);
} else {
anomaly = loadAnomalyByName(name.toLowerCase());
}
if(anomaly === null) {
res.status(404).send({
code: 404,
message: 'Not found'
});
return;
}
let payload = JSON.stringify({
name: anomaly.name,
metric: anomaly.metric,
status: anomaly.status
});
res.status(200).send(payload)
} catch(e) {
console.error(e);
// TODO: better send 404 when we know than isn`t found
res.status(500).send('Can`t get anything');
}
}
async function createAnomaly(req, res) {
try {
const metric:Metric = {
datasource: req.body.metric.datasource,
targets: saveTargets(req.body.metric.targets)
};
const anomaly:Anomaly = {
name: req.body.name,
panelUrl: req.body.panelUrl,
metric: metric,
status: 'learning',
last_prediction_time: 0,
next_id: 0
};
let anomalyId = insertAnomaly(anomaly);
if(anomalyId === null) {
res.status(403).send({
code: 403,
message: 'Already exists'
});
}
let payload = JSON.stringify({ anomaly_id: anomalyId })
res.status(200).send(payload);
runLearning(anomalyId);
} catch(e) {
res.status(500).send({
code: 500,
message: 'Internal error'
});
}
}
function deleteAnomaly(req, res) {
try {
let id = req.query.id;
let name = req.query.name;
if(id !== undefined) {
removeAnomaly(id);
} else {
removeAnomaly(name.toLowerCase());
}
res.status(200).send({
code: 200,
message: 'Success'
});
} catch(e) {
res.status(500).send({
code: 500,
message: 'Internal error'
});
}
}
export const router = express.Router();
router.get('/status', sendAnomalyTypeStatus);
router.get('/', getAnomaly);
router.post('/', createAnomaly);
router.delete('/', deleteAnomaly);

80
server/src/routes/segments.ts

@ -0,0 +1,80 @@
import * as express from 'express';
import {
getLabeledSegments,
insertSegments,
removeSegments,
} from '../services/segments';
import {runLearning} from '../services/analytics';
import {Anomaly, AnomalyId, getAnomalyIdByName, loadAnomalyById} from '../services/anomalyType';
async function sendSegments(req, res) {
try {
let anomalyId: AnomalyId = req.query.anomaly_id;
let anomaly:Anomaly = loadAnomalyById(anomalyId);
if(anomaly === null) {
anomalyId = getAnomalyIdByName(anomalyId);
}
let lastSegmentId = req.query.last_segment;
let timeFrom = req.query.from;
let timeTo = req.query.to;
let segments = getLabeledSegments(anomalyId);
// Id filtering
if(lastSegmentId !== undefined) {
segments = segments.filter(el => el.id > lastSegmentId);
}
// Time filtering
if(timeFrom !== undefined) {
segments = segments.filter(el => el.finish > timeFrom);
}
if(timeTo !== undefined) {
segments = segments.filter(el => el.start < timeTo);
}
let payload = JSON.stringify({
segments
});
res.status(200).send(payload);
} catch(e) {
res.status(500).send({
code: 500,
message: 'Internal error'
});
}
}
async function updateSegments(req, res) {
try {
let segmentsUpdate = req.body;
let anomalyId = segmentsUpdate.anomaly_id;
let anomalyName = segmentsUpdate.name;
if(anomalyId === undefined) {
anomalyId = getAnomalyIdByName(anomalyName.toLowerCase());
}
let addedIds = insertSegments(anomalyId, segmentsUpdate.added_segments, true);
removeSegments(anomalyId, segmentsUpdate.removed_segments);
let payload = JSON.stringify({ added_ids: addedIds });
res.status(200).send(payload);
runLearning(anomalyId);
} catch(e) {
res.status(500).send({
code: 500,
message: 'Internal error'
});
}
}
export const router = express.Router();
router.get('/', sendSegments);
router.patch('/', updateSegments);

58
server/src/services/alerts.ts

@ -0,0 +1,58 @@
import { getJsonDataSync, writeJsonDataSync } from './json';
import * as path from 'path';
import { AnomalyId } from './anomalyType';
import { ANOMALIES_PATH } from '../config';
import { runPredict } from './analytics';
import { sendNotification } from './notification';
import { getLabeledSegments } from './segments';
function getAlertsAnomalies() : AnomalyId[] {
return getJsonDataSync(path.join(ANOMALIES_PATH, `alerts_anomalies.json`));
}
function saveAlertsAnomalies(anomalies: AnomalyId[]) {
return writeJsonDataSync(path.join(ANOMALIES_PATH, `alerts_anomalies.json`), anomalies);
}
function processAlerts(anomalyId) {
let segments = getLabeledSegments(anomalyId);
const currentTime = new Date().getTime();
const activeAlert = activeAlerts.has(anomalyId);
let newActiveAlert = false;
if(segments.length > 0) {
let lastSegment = segments[segments.length - 1];
if(lastSegment.finish >= currentTime - alertTimeout) {
newActiveAlert = true;
}
}
if(!activeAlert && newActiveAlert) {
activeAlerts.add(anomalyId);
sendNotification(anomalyId, true);
} else if(activeAlert && !newActiveAlert) {
activeAlerts.delete(anomalyId);
sendNotification(anomalyId, false);
}
}
async function alertsTick() {
let alertsAnomalies = getAlertsAnomalies();
for (let anomalyId of alertsAnomalies) {
try {
await runPredict(anomalyId);
processAlerts(anomalyId);
} catch (e) {
console.error(e);
}
}
setTimeout(alertsTick, 5000);
}
const alertTimeout = 60000; // ms
const activeAlerts = new Set<string>();
setTimeout(alertsTick, 5000);
export { getAlertsAnomalies, saveAlertsAnomalies }

141
server/src/services/analytics.ts

@ -0,0 +1,141 @@
import { spawn } from 'child_process'
import { ANALYTICS_PATH } from '../config'
import {
Anomaly,
AnomalyId, getAnomalyTypeInfo,
loadAnomalyById,
setAnomalyPredictionTime,
setAnomalyStatus
} from './anomalyType'
import { getTarget } from './metrics';
import { getLabeledSegments, insertSegments, removeSegments } from './segments';
import { split, map, mapSync } from 'event-stream'
const learnWorker = spawn('python3', ['worker.py'], { cwd: ANALYTICS_PATH })
learnWorker.stdout.pipe(split())
.pipe(
mapSync(function(line){
console.log(line)
onMessage(line)
})
);
learnWorker.stderr.on('data', data => console.error(`worker stderr: ${data}`));
const taskMap = {};
let nextTaskId = 0;
function onMessage(data) {
let response = JSON.parse(data);
let taskId = response.__task_id;
// let anomalyName = response.anomaly_name;
// let task = response.task;
let status = response.status;
if(status === 'success' || status === 'failed') {
if(taskId in taskMap) {
let resolver = taskMap[taskId];
resolver(response);
delete taskMap[taskId];
}
}
}
function runTask(task) : Promise<any> {
let anomaly:Anomaly = loadAnomalyById(task.anomaly_id);
task.metric = {
datasource: anomaly.metric.datasource,
targets: anomaly.metric.targets.map(t => getTarget(t))
};
task.__task_id = nextTaskId++;
let command = JSON.stringify(task)
learnWorker.stdin.write(`${command}\n`);
return new Promise<Object>((resolve, reject) => {
taskMap[task.__task_id] = resolve
})
}
async function runLearning(anomalyId:AnomalyId) {
let segments = getLabeledSegments(anomalyId);
setAnomalyStatus(anomalyId, 'learning');
let anomaly:Anomaly = loadAnomalyById(anomalyId);
let analyticsType = "anomalies";
let preset = undefined;
if (anomaly.name.includes("jumps")) {
analyticsType = "patterns";
preset = "steps"
}
if (anomaly.name.includes("cliffs") || anomaly.name.includes("drops")) {
analyticsType = "patterns";
preset = "cliffs"
}
if (anomaly.name.includes("peaks")) {
analyticsType = "patterns";
preset = "peaks"
}
let task = {
type: 'learn',
anomaly_id: anomalyId,
analytics_type: analyticsType,
preset,
segments: segments
};
let result = await runTask(task);
if (result.status === 'success') {
setAnomalyStatus(anomalyId, 'ready');
insertSegments(anomalyId, result.segments, false);
setAnomalyPredictionTime(anomalyId, result.last_prediction_time);
} else {
setAnomalyStatus(anomalyId, 'failed');
}
}
async function runPredict(anomalyId:AnomalyId) {
let anomaly:Anomaly = loadAnomalyById(anomalyId);
let analyticsType = "anomalies";
let preset = undefined;
if (anomaly.name.includes("jump")) {
analyticsType = "patterns";
preset = "steps"
}
if (anomaly.name.includes("cliffs") || anomaly.name.includes("drops")) {
analyticsType = "patterns";
preset = "cliffs"
}
if (anomaly.name.includes("peaks")) {
analyticsType = "patterns";
preset = "peaks"
}
let task = {
type: 'predict',
anomaly_id: anomalyId,
analytics_type: analyticsType,
preset,
last_prediction_time: anomaly.last_prediction_time
};
let result = await runTask(task);
if(result.status === 'failed') {
return [];
}
// Merging segments
let segments = getLabeledSegments(anomalyId);
if(segments.length > 0 && result.segments.length > 0) {
let lastOldSegment = segments[segments.length - 1];
let firstNewSegment = result.segments[0];
if(firstNewSegment.start <= lastOldSegment.finish) {
result.segments[0].start = lastOldSegment.start;
removeSegments(anomalyId, [lastOldSegment.id]);
}
}
insertSegments(anomalyId, result.segments, false);
setAnomalyPredictionTime(anomalyId, result.last_prediction_time);
return result.segments;
}
export { runLearning, runPredict }

117
server/src/services/anomalyType.ts

@ -0,0 +1,117 @@
import * as path from 'path'
import { getJsonDataSync, writeJsonDataSync } from './json'
import { ANOMALIES_PATH } from '../config'
import * as fs from 'fs'
import * as crypto from 'crypto';
export type Metric = {
datasource: string,
targets: string[]
}
export type Anomaly = {
name: string,
panelUrl: string,
metric: Metric,
status: string,
last_prediction_time: number,
next_id: number
}
export type AnomalyId = string;
let anomaliesNameToIdMap = {};
function loadAnomaliesMap() {
let filename = path.join(ANOMALIES_PATH, `all_anomalies.json`);
anomaliesNameToIdMap = getJsonDataSync(filename);
}
function saveAnomaliesMap() {
let filename = path.join(ANOMALIES_PATH, `all_anomalies.json`);
writeJsonDataSync(filename, anomaliesNameToIdMap);
}
function getAnomalyIdByName(anomalyName:string) : AnomalyId {
loadAnomaliesMap();
anomalyName = anomalyName.toLowerCase();
if(anomalyName in anomaliesNameToIdMap) {
return anomaliesNameToIdMap[anomalyName];
}
return anomalyName;
}
function insertAnomaly(anomaly: Anomaly) : AnomalyId {
const hashString = anomaly.name + (new Date()).toString();
const anomalyId:AnomalyId = crypto.createHash('md5').update(hashString).digest('hex');
anomaliesNameToIdMap[anomaly.name] = anomalyId;
saveAnomaliesMap();
// return anomalyId
// const anomalyId:AnomalyId = anomaly.name;
let filename = path.join(ANOMALIES_PATH, `${anomalyId}.json`);
if(fs.existsSync(filename)) {
return null;
}
saveAnomaly(anomalyId, anomaly);
return anomalyId;
}
function removeAnomaly(anomalyId:AnomalyId) {
let filename = path.join(ANOMALIES_PATH, `${anomalyId}.json`);
fs.unlinkSync(filename);
}
function saveAnomaly(anomalyId: AnomalyId, anomaly: Anomaly) {
let filename = path.join(ANOMALIES_PATH, `${anomalyId}.json`);
return writeJsonDataSync(filename, anomaly);
}
function loadAnomalyById(anomalyId: AnomalyId) : Anomaly {
let filename = path.join(ANOMALIES_PATH, `${anomalyId}.json`);
if(!fs.existsSync(filename)) {
return null;
}
return getJsonDataSync(filename);
}
function loadAnomalyByName(anomalyName: string) : Anomaly {
let anomalyId = getAnomalyIdByName(anomalyName);
return loadAnomalyById(anomalyId);
}
function saveAnomalyTypeInfo(info) {
console.log('Saving');
let filename = path.join(ANOMALIES_PATH, `${info.name}.json`);
if(info.next_id === undefined) {
info.next_id = 0;
}
if(info.last_prediction_time === undefined) {
info.last_prediction_time = 0;
}
return writeJsonDataSync(filename, info);
}
function getAnomalyTypeInfo(name) {
return getJsonDataSync(path.join(ANOMALIES_PATH, `${name}.json`));
}
function setAnomalyStatus(anomalyId:AnomalyId, status:string) {
let info = loadAnomalyById(anomalyId);
info.status = status;
saveAnomaly(anomalyId, info);
}
function setAnomalyPredictionTime(anomalyId:AnomalyId, lastPredictionTime:number) {
let info = loadAnomalyById(anomalyId);
info.last_prediction_time = lastPredictionTime;
saveAnomaly(anomalyId, info);
}
export {
saveAnomaly, loadAnomalyById, loadAnomalyByName, insertAnomaly, removeAnomaly, saveAnomalyTypeInfo,
getAnomalyTypeInfo, getAnomalyIdByName, setAnomalyStatus, setAnomalyPredictionTime
}

55
server/src/services/json.ts

@ -0,0 +1,55 @@
import * as fs from 'fs';
async function getJsonData(filename: string): Promise<Object> {
var data = await new Promise<string>((resolve, reject) => {
fs.readFile(filename, 'utf8', (err, data) => {
if(err) {
console.error(err);
reject('Can`t read file');
} else {
resolve(data);
}
});
});
try {
return JSON.parse(data);
} catch(e) {
console.error(e);
throw new Error('Wrong file format');
}
}
function writeJsonData(filename: string, data: Object) {
return new Promise((resolve, reject) => {
fs.writeFile(filename, JSON.stringify(data), 'utf8', (err) => {
if(err) {
console.error(err);
reject('Cat`t write file');
} else {
resolve();
}
});
})
}
function getJsonDataSync(filename: string) {
let data = fs.readFileSync(filename, 'utf8');
try {
return JSON.parse(data);
} catch(e) {
console.error(e);
throw new Error('Wrong file format');
}
}
function writeJsonDataSync(filename: string, data: Object) {
fs.writeFileSync(filename, JSON.stringify(data));
}
export {
getJsonData,
writeJsonData,
getJsonDataSync,
writeJsonDataSync
}

27
server/src/services/metrics.ts

@ -0,0 +1,27 @@
import * as path from 'path';
import { getJsonDataSync, writeJsonDataSync } from './json';
import { METRICS_PATH } from '../config';
import * as crypto from 'crypto';
function saveTargets(targets) {
let metrics = [];
for (let target of targets) {
metrics.push(saveTarget(target));
}
return metrics;
}
function saveTarget(target) {
//const md5 = crypto.createHash('md5')
const targetId = crypto.createHash('md5').update(JSON.stringify(target)).digest('hex');
let filename = path.join(METRICS_PATH, `${targetId}.json`);
writeJsonDataSync(filename, target);
return targetId;
}
function getTarget(targetId) {
let filename = path.join(METRICS_PATH, `${targetId}.json`);
return getJsonDataSync(filename);
}
export { saveTargets, getTarget }

140
server/src/services/notification.ts

@ -0,0 +1,140 @@
//import * as Telegraf from 'telegraf'
import * as path from 'path';
import { DATA_PATH } from '../config';
import { getJsonDataSync, writeJsonDataSync } from './json';
import { AnomalyId } from './anomalyType';
type SubscriberId = string;
type SubscribersMap = Map< AnomalyId, SubscriberId[] >;
type BotConfig = {
token: string,
subscriptions: SubscribersMap
};
function sendNotification(anomalyName, active) {
console.log('Notification ' + anomalyName);
if(anomalyName in botConfig.subscriptions) {
let notificationMessage;
if(active) {
notificationMessage = 'Alert! Anomaly type ' + anomalyName;
} else {
notificationMessage = 'Ok! Anomaly type ' + anomalyName;
}
for (let SubscriberId of botConfig.subscriptions[anomalyName]) {
bot.telegram.sendMessage(SubscriberId, notificationMessage);
}
}
}
function loadBotConfig() : BotConfig {
let filename = path.join(DATA_PATH, `bot_config.json`);
let jsonData;
try {
jsonData = getJsonDataSync(filename);
} catch(e) {
console.error(e.message);
jsonData = [];
}
return jsonData;
}
function saveBotConfig(botConfig: BotConfig) {
let filename = path.join(DATA_PATH, `bot_config.json`);
try {
writeJsonDataSync(filename, botConfig);
} catch(e) {
console.error(e.message);
}
}
const commandArgs = (ctx, next) => {
try {
if(ctx.updateType === 'message') {
const text = ctx.update.message.text;
if(text !== undefined && text.startsWith('/')) {
const match = text.match(/^\/([^\s]+)\s?(.+)?/);
let args = [];
let command;
if(match !== null) {
if(match[1]) {
command = match[1];
}
if(match[2]) {
args = match[2].split(' ');
}
}
ctx.state.command = {
raw: text,
command,
args,
};
}
}
return next(ctx);
} catch (e) {
}
};
function addNotification(ctx) {
console.log('addNotification')
let command = ctx.state.command;
let chatId = ctx.chat.id;
if(command.args.length > 0) {
for (let anomalyName of command.args) {
if(!(anomalyName in botConfig.subscriptions)) {
botConfig.subscriptions[anomalyName] = []
}
if(botConfig.subscriptions[anomalyName].includes(chatId)) {
return ctx.reply('You are already subscribed on alerts from anomaly ' + command.args)
} else {
botConfig.subscriptions[anomalyName].push(chatId);
saveBotConfig(botConfig);
}
}
return ctx.reply('You have been successfully subscribed on alerts from anomaly ' + command.args)
} else {
return ctx.reply('You should use syntax: \/addNotification <anomaly_name>')
}
}
function removeNotification(ctx) {
let command = ctx.state.command;
let chatId = ctx.chat.id;
if(command.args.length > 0) {
for (let anomalyName of command.args) {
if(anomalyName in botConfig.subscriptions) {
botConfig.subscriptions[anomalyName] = botConfig.subscriptions[anomalyName].filter(el => el !== chatId);
saveBotConfig(botConfig);
}
}
return ctx.reply('You have been successfully unsubscribed from alerts from ' + command.args);
} else {
return ctx.reply('You should use syntax: \/removeNotification <anomaly_name>');
}
}
const Telegraf = require('telegraf');
let botConfig: BotConfig;
let bot;
function tgBotInit() {
try {
botConfig = loadBotConfig();
bot = new Telegraf(botConfig.token);
bot.use(commandArgs);
bot.command('addNotification', addNotification);
bot.command('removeNotification', removeNotification);
bot.startPolling();
} catch(e) {
// TODO: handle exception
}
}
export { sendNotification, tgBotInit }

75
server/src/services/segments.ts

@ -0,0 +1,75 @@
import * as path from 'path';
import { getJsonDataSync, writeJsonDataSync } from './json';
import { SEGMENTS_PATH } from '../config';
import { AnomalyId, loadAnomalyById, saveAnomaly } from './anomalyType';
function getLabeledSegments(anomalyId: AnomalyId) {
let filename = path.join(SEGMENTS_PATH, `${anomalyId}_labeled.json`);
let segments = [];
try {
segments = getJsonDataSync(filename);
for (let segment of segments) {
if (segment.labeled === undefined) {
segment.labeled = false;
}
}
} catch (e) {
console.error(e.message);
}
return segments;
}
function getPredictedSegments(anomalyId: AnomalyId) {
let filename = path.join(SEGMENTS_PATH, `${anomalyId}_segments.json`);
let jsonData;
try {
jsonData = getJsonDataSync(filename);
} catch(e) {
console.error(e.message);
jsonData = [];
}
return jsonData;
}
function saveSegments(anomalyId: AnomalyId, segments) {
let filename = path.join(SEGMENTS_PATH, `${anomalyId}_labeled.json`);
try {
return writeJsonDataSync(filename, segments);
} catch(e) {
console.error(e.message);
throw new Error('Can`t write to db');
}
}
function insertSegments(anomalyId: AnomalyId, addedSegments, labeled:boolean) {
// Set status
let info = loadAnomalyById(anomalyId);
let segments = getLabeledSegments(anomalyId);
let nextId = info.next_id;
let addedIds = []
for (let segment of addedSegments) {
segment.id = nextId;
segment.labeled = labeled;
addedIds.push(nextId);
nextId++;
segments.push(segment);
}
info.next_id = nextId;
saveSegments(anomalyId, segments);
saveAnomaly(anomalyId, info);
return addedIds;
}
function removeSegments(anomalyId: AnomalyId, removedSegments) {
let segments = getLabeledSegments(anomalyId);
for (let segmentId of removedSegments) {
segments = segments.filter(el => el.id !== segmentId);
}
saveSegments(anomalyId, segments);
}
export { getLabeledSegments, getPredictedSegments, saveSegments, insertSegments, removeSegments }

10
server/tsconfig.json

@ -0,0 +1,10 @@
{
"compilerOptions": {
"outDir": "./dist/",
"sourceMap": true,
"noImplicitAny": false,
"module": "commonjs",
"target": "es2015",
"allowJs": true
}
}

11
src/.gitignore vendored

@ -0,0 +1,11 @@
anomalies/
segments/
datasets/
datasources/
models/
metrics/
__pycache__/
*.pyc
*.txt
*.log
tasks.csv

5
src/add_anomaly.py

@ -0,0 +1,5 @@
from worker import worker
if __name__ == "__main__":
w = worker()
w.do_task({"type": "learn", "anomaly_name": "cpu_utilization_supervised", "segments": []})

157
src/anomaly_model.py

@ -0,0 +1,157 @@
import os.path
from data_provider import DataProvider
from data_preprocessor import data_preprocessor
import json
import pandas as pd
import logging
datasource_folder = "datasources/"
dataset_folder = "datasets/"
anomalies_folder = "anomalies/"
models_folder = "models/"
metrics_folder = "metrics/"
logger = logging.getLogger('analytic_toolset')
def anomalies_to_timestamp(anomalies):
for anomaly in anomalies:
anomaly['start'] = int(anomaly['start'].timestamp() * 1000)
anomaly['finish'] = int(anomaly['finish'].timestamp() * 1000)
return anomalies
class AnomalyModel:
def __init__(self, anomaly_name):
self.anomaly_name = anomaly_name
self.load_anomaly_config()
datasource = self.anomaly_config['metric']['datasource']
metric_name = self.anomaly_config['metric']['targets'][0]
dbconfig_filename = os.path.join(datasource_folder, datasource + ".json")
target_filename = os.path.join(metrics_folder, metric_name + ".json")
dataset_filename = os.path.join(dataset_folder, metric_name + ".csv")
augmented_path = os.path.join(dataset_folder, metric_name + "_augmented.csv")
with open(dbconfig_filename, 'r') as config_file:
dbconfig = json.load(config_file)
with open(target_filename, 'r') as file:
target = json.load(file)
self.data_prov = DataProvider(dbconfig, target, dataset_filename)
self.preprocessor = data_preprocessor(self.data_prov, augmented_path)
self.model = None
self.__load_model()
def anomalies_box(self, anomalies):
max_time = 0
min_time = float("inf")
for anomaly in anomalies:
max_time = max(max_time, anomaly['finish'])
min_time = min(min_time, anomaly['start'])
min_time = pd.to_datetime(min_time, unit='ms')
max_time = pd.to_datetime(max_time, unit='ms')
return min_time, max_time
def learn(self, anomalies):
logger.info("Start to learn for anomaly_name='%s'" % self.anomaly_name)
confidence = 0.02
dataframe = self.data_prov.get_dataframe()
start_index, stop_index = 0, len(dataframe)
if len(anomalies) > 0:
confidence = 0.0
min_time, max_time = self.anomalies_box(anomalies)
start_index = dataframe[dataframe['timestamp'] >= min_time].index[0]
stop_index = dataframe[dataframe['timestamp'] > max_time].index[0]
start_index, stop_index = self.preprocessor.expand_indexes(start_index, stop_index)
dataframe = dataframe[start_index:stop_index]
train_augmented = self.preprocessor.get_augmented_data(
start_index,
stop_index,