Compare commits

..

35 Commits

Author SHA1 Message Date
rozetko 67ac4e073f 2.0.4 1 year ago
rozetko 813c50d36d fix grafana errors not caught by tsdb-kit 1 year ago
rozetko b771a81962 Merge pull request 'SQL: support Grafana aggregation macros' (#13) from sql-support-grafana-aggregation-macros into master 1 year ago
rozetko 32425542ae 2.0.3 1 year ago
rozetko 3c768d4d92 tests for Grafana SQL aggregation macros 1 year ago
rozetko 7a01a86ddb SQL: support Grafana aggregation macros ($__timeGroup and $__timeGroupAlias) 1 year ago
rozetko ad6b77b89d 2.0.2 2 years ago
rozetko 3ca05e5ca2 Merge branch 'master' of code.corpglory.net:hastic/tsdb-kit 2 years ago
rozetko f0b7fc9b61 2.0.1 2 years ago
rozetko f8367d9e58 Merge pull request 'SQL: support Grafana 9' (#12) from update-sql-exporter-to-work-with-grafana-9 into master 2 years ago
rozetko 6bf90e69bb upd SQL test 2 years ago
rozetko 7c89c455ed update SQL connector to support Grafana 9 2 years ago
rozetko f6679a98be Merge pull request 'CLI-arguments && docs && Prometheus auth' (#8) from cli-arguments into master 2 years ago
rozetko fe51c32f2a 2.0.0 2 years ago
rozetko 4b38cbdb34 Prometheus: support auth 2 years ago
rozetko eac608eab8 upd readme 2 years ago
rozetko 864537766d version from package.json 2 years ago
rozetko afcf696ae5 new `dev` scripts 2 years ago
rozetko 7b487f9fbb 1.2.0-beta2 2 years ago
rozetko 8629f86332 shabang to bin 2 years ago
rozetko 6448f52095 upd npmignore 2 years ago
rozetko 5212d7a89a 1.2.0-beta 2 years ago
rozetko d68faaa8ec upd readme 2 years ago
rozetko 82dfc20b2f init CLI arguments 2 years ago
rozetko e38c2db4e6 Merge pull request 'QueryService refactoring' (#6) from query-service-refactoring into master 2 years ago
rozetko fb3b597b3f QueryService refactoring: 2 years ago
rozetko bc6988b31b Merge pull request 'better names' (#5) from better-names into master 2 years ago
rozetko 087e001534 rename files 2 years ago
rozetko 5e20b4de96 better names 2 years ago
rozetko 8112975b12 Merge pull request 'direct Prometheus queries' (#1) from direct-prometheus-queries into master 2 years ago
rozetko b8f1f27f1a some refactoring 2 years ago
rozetko de97390d1e sample bin file 2 years ago
rozetko fbf6c4949f upd types 2 years ago
rozetko 61562a9c94 fix bin build 2 years ago
rozetko 3889b3dd64 rm package-lock 2 years ago
  1. 9
      .npmignore
  2. 32
      README.md
  3. 8
      bin.tsconfig.json
  4. 6
      lib.tsconfig.json
  5. 4885
      package-lock.json
  6. 21
      package.json
  7. 13
      spec/elasticsearch.jest.ts
  8. 10
      spec/graphite.jest.ts
  9. 17
      spec/influxdb.jest.ts
  10. 20
      spec/prometheus.jest.ts
  11. 125
      spec/sql.jest.ts
  12. 2
      spec/utils.jest.ts
  13. 31
      src/connectors/connector_factory.ts
  14. 44
      src/connectors/elasticsearch.ts
  15. 12
      src/connectors/graphite.ts
  16. 58
      src/connectors/index.ts
  17. 13
      src/connectors/influxdb.ts
  18. 5
      src/connectors/mysql.ts
  19. 5
      src/connectors/postgres.ts
  20. 28
      src/connectors/prometheus.ts
  21. 31
      src/connectors/sql.ts
  22. 8
      src/connectors/utils.ts
  23. 134
      src/grafana_service.ts
  24. 58
      src/index.ts
  25. 40
      src/metrics/metric.ts
  26. 78
      src/metrics/metrics_factory.ts
  27. 5
      src/metrics/mysql_metric.ts
  28. 5
      src/metrics/postgres_metric.ts
  29. 62
      src/models/query_config.ts
  30. 11
      src/services/query_service/base.ts
  31. 52
      src/services/query_service/direct.ts
  32. 64
      src/services/query_service/grafana.ts
  33. 23
      src/services/query_service/query_service_factory.ts
  34. 50
      src/tsdb-kit/index.ts
  35. 15
      src/types.ts
  36. 33
      webpack.config.js
  37. 683
      yarn.lock

9
.npmignore

@ -2,7 +2,8 @@ src
spec spec
.travis.yml .travis.yml
jest.config.js jest.config.js
tsconfig.lib.json lib.tsconfig.json
tsconfig.bin.json bin.tsconfig.json
tsconfig.jest.json webpack.config.js
yarn.lock
.vscode

32
README.md

@ -1,21 +1,43 @@
# tsdb-kit # tsdb-kit
[![Build Status](https://travis-ci.org/CorpGlory/tsdb-kit.svg?branch=master)](https://travis-ci.org/CorpGlory/tsdb-kit) TSDB-kit is a node.js library and CLI-tool for querying timeseries-datasources.
Node.js library and utilities for running Grafana datasources on backend. ## Features
You can send your datasource metrics from Grafana to compile it on Node.js and query your datasource via Grafana API in background.
User gets a unified interface to all datasources. Library gives single output format: fields order, time units, etc - can query datasources directly or using Grafana as proxy
- can be used as a lib from your node.js-code or as a CLI-tool
- user gets a unified interface to all datasources. Library gives single output format: fields order, time units, etc.
## Supported datasources ## Supported datasources
### Direct
* Prometheus
### Grafana
* Influxdb * Influxdb
* Graphite * Graphite
* Prometheus * Prometheus
* PostgreSQL / TimescaleDB / MySQL * PostgreSQL / TimescaleDB / MySQL
* ElasticSearch * ElasticSearch
Please write us at ping@corpglory.com if you want your datasource to be supported: Please write us at ping@corpglory.com if you want your datasource to be supported
## Usage
### Lib (TODO)
### CLI
For now, CLI supports only direct Prometheus queries
For example:
`npx @corpglory/tsdb-kit -U http://localhost:9090 -q '100-(avg by (instance) (irate(node_cpu_seconds_total{mode="idle"}[5m])) * 100)' -u my_user -p my_password`
## Development (TODO)
## Projects based on library ## Projects based on library
* [grafana-data-exporter](https://github.com/CorpGlory/grafana-data-exporter) * [grafana-data-exporter](https://github.com/CorpGlory/grafana-data-exporter)

8
bin.tsconfig.json

@ -1,10 +1,10 @@
{ {
"compilerOptions": { "compilerOptions": {
"moduleResolution": "node",
"sourceMap": true, "sourceMap": true,
"target": "es2015", "target": "es6",
"declaration": false, "declaration": false,
"outFile": "bin/tsdb-kit.js" "skipLibCheck": true
}, },
"include": [ "src/**/*.ts" ], "include": [ "src/**/*.ts" ]
"exclude": [ "src/index.ts" ]
} }

6
lib.tsconfig.json

@ -2,9 +2,11 @@
"compilerOptions": { "compilerOptions": {
"sourceMap": true, "sourceMap": true,
"module": "commonjs", "module": "commonjs",
"target": "es2015", "moduleResolution": "node",
"target": "esnext",
"declaration": true, "declaration": true,
"outDir": "lib" "outDir": "lib",
"skipLibCheck": true
}, },
"include": [ "src/**/*.ts" ], "include": [ "src/**/*.ts" ],
"exclude": [ "src/tsdb-kit" ] "exclude": [ "src/tsdb-kit" ]

4885
package-lock.json generated

File diff suppressed because it is too large Load Diff

21
package.json

@ -1,17 +1,18 @@
{ {
"name": "@corpglory/tsdb-kit", "name": "@corpglory/tsdb-kit",
"version": "1.1.1", "version": "2.0.4",
"description": "", "description": "",
"scripts": { "scripts": {
"build": "yarn build:lib && yarn build:bin", "build": "yarn build:lib && yarn build:bin",
"build:lib": "tsc --p lib.tsconfig.json", "build:lib": "tsc --p lib.tsconfig.json",
"build:bin": "tsc --p bin.tsconfig.json", "build:bin": "webpack --config webpack.config.js",
"dev": "tsc -w", "dev:lib": "tsc --p lib.tsconfig.json -w",
"dev:bin": "webpack --watch --config webpack.config.js",
"test": "jest" "test": "jest"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/CorpGlory/tsdb-kit.git" "url": "git+https://code.corpglory.net/hastic/tsdb-kit.git"
}, },
"author": { "author": {
"name": "CorpGlory Inc." "name": "CorpGlory Inc."
@ -21,20 +22,24 @@
}, },
"license": "Apache-2.0", "license": "Apache-2.0",
"bugs": { "bugs": {
"url": "https://github.com/CorpGlory/tsdb-kit/issues" "url": "https://code.corpglory.net/hastic/tsdb-kit/issues"
}, },
"homepage": "https://github.com/CorpGlory/tsdb-kit", "homepage": "https://code.corpglory.net/hastic/tsdb-kit",
"dependencies": { "dependencies": {
"axios": "^0.21.1", "axios": "^0.18.0",
"moment": "^2.22.2", "moment": "^2.22.2",
"url": "^0.11.0" "url": "^0.11.0"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^26.0.15", "@types/jest": "^26.0.15",
"@types/lodash": "^4.14.165", "@types/lodash": "^4.14.165",
"argparse": "^2.0.1",
"jest": "^26.6.3", "jest": "^26.6.3",
"ts-jest": "^26.4.4", "ts-jest": "^26.4.4",
"typescript": "^4.1.2" "ts-loader": "^9.3.1",
"typescript": "^4.1.2",
"webpack": "^5.74.0",
"webpack-cli": "^4.10.0"
}, },
"main": "./lib/index.js", "main": "./lib/index.js",
"bin": { "bin": {

13
spec/elasticsearch.jest.ts

@ -1,6 +1,7 @@
import { ElasticsearchMetric } from '../src/metrics/elasticsearch_metric'; import { ElasticsearchConnector } from '../src/connectors/elasticsearch';
import { Datasource } from '../src/metrics/metric'; import { Datasource, DatasourceType } from '../src/connectors';
import 'jest';
import * as _ from 'lodash'; import * as _ from 'lodash';
describe('simple query', function(){ describe('simple query', function(){
@ -62,7 +63,7 @@ describe('simple query', function(){
} }
} }
}], }],
type: "elasticsearch" type: DatasourceType.ELASTICSEARCH
}; };
datasource.data = datasource.data.map(d => JSON.stringify(d)).join('\n'); datasource.data = datasource.data.map(d => JSON.stringify(d)).join('\n');
@ -162,7 +163,7 @@ describe('simple query', function(){
} }
}]; }];
let elasticMetric = new ElasticsearchMetric(datasource, targets); let connector = new ElasticsearchConnector(datasource, targets);
it('check correct time processing', function() { it('check correct time processing', function() {
const expectedQueryTemplate = _.cloneDeep(queryTemplate); const expectedQueryTemplate = _.cloneDeep(queryTemplate);
@ -191,7 +192,7 @@ describe('simple query', function(){
} }
}; };
let result = elasticMetric.getQuery(from, to, limit, offset); let result = connector.getQuery(from, to, limit, offset);
expect(result).toEqual(expectedQuery); expect(result).toEqual(expectedQuery);
}); });
@ -264,6 +265,6 @@ describe('simple query', function(){
] ]
}; };
expect(elasticMetric.getResults(result)).toEqual(expectedResult); expect(connector.parseResponse(result)).toEqual(expectedResult);
}); });
}); });

10
spec/graphite.jest.ts

@ -1,11 +1,13 @@
import { Datasource, Metric } from '../src/index'; import { Datasource, DatasourceType } from '../src/index';
import { GraphiteConnector } from '../src/connectors/graphite';
import 'jest'; import 'jest';
describe('correct Graphite query', function() { describe('correct Graphite query', function() {
let datasource: Datasource = { let datasource: Datasource = {
url: 'http://example.com:1234', url: 'http://example.com:1234',
type: 'graphite', type: DatasourceType.GRAPHITE,
params: { params: {
db: '', db: '',
q: '', q: '',
@ -15,10 +17,10 @@ describe('correct Graphite query', function() {
}; };
let target = `target=template(hosts.$hostname.cpu, hostname="worker1")`; let target = `target=template(hosts.$hostname.cpu, hostname="worker1")`;
let query = new Metric(datasource, [target]); let connector = new GraphiteConnector(datasource, [target]);
it("test simple query with time clause", function () { it("test simple query with time clause", function () {
expect(query.metricQuery.getQuery(1534809600000, 1537488000000, 500, 0).url).toBe( expect(connector.getQuery(1534809600000, 1537488000000, 500, 0).url).toBe(
`${datasource.url}?target=${target}&from=1534809600&until=1537488000&maxDataPoints=500` `${datasource.url}?target=${target}&from=1534809600&until=1537488000&maxDataPoints=500`
) )
}); });

17
spec/targets.jest.ts → spec/influxdb.jest.ts

@ -1,4 +1,5 @@
import { Datasource, Metric } from '../src/index'; import { Datasource, DatasourceType } from '../src/index';
import { InfluxdbConnector } from '../src/connectors/influxdb';
import 'jest'; import 'jest';
@ -6,7 +7,7 @@ import 'jest';
describe('Correct InfluxDB query', function() { describe('Correct InfluxDB query', function() {
let datasource: Datasource = { let datasource: Datasource = {
url: 'url', url: 'url',
type: 'influxdb', type: DatasourceType.INFLUXDB,
params: { params: {
db: 'db', db: 'db',
q: `SELECT mean("value") FROM "db" WHERE time > xxx AND time <= xxx LIMIT 100 OFFSET 20`, q: `SELECT mean("value") FROM "db" WHERE time > xxx AND time <= xxx LIMIT 100 OFFSET 20`,
@ -17,24 +18,24 @@ describe('Correct InfluxDB query', function() {
let target = 'mean("value")'; let target = 'mean("value")';
it("test query with two time expressions", function() { it("test query with two time expressions", function() {
let query = new Metric(datasource, [target]); const connector = new InfluxdbConnector(datasource, [target]);
expect(query.metricQuery.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe( expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "db" WHERE time >= 1534809600ms AND time <= 1537488000ms LIMIT 666 OFFSET 10` `SELECT mean("value") FROM "db" WHERE time >= 1534809600ms AND time <= 1537488000ms LIMIT 666 OFFSET 10`
) )
}); });
it('test query with one time expression', function() { it('test query with one time expression', function() {
datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time >= now() - 6h GROUP BY time(30s) fill(null)`; datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time >= now() - 6h GROUP BY time(30s) fill(null)`;
let query = new Metric(datasource, [target]); const connector = new InfluxdbConnector(datasource, [target]);
expect(query.metricQuery.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe( expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10` `SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10`
) )
}); });
it('test query with time expression', function() { it('test query with time expression', function() {
datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time>= now() - 6h AND time<xxx GROUP BY time(30s) fill(null)`; datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time>= now() - 6h AND time<xxx GROUP BY time(30s) fill(null)`;
let query = new Metric(datasource, [target]); const connector = new InfluxdbConnector(datasource, [target]);
expect(query.metricQuery.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe( expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10` `SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10`
) )
}); });

20
spec/prometheus.jest.ts

@ -1,15 +1,20 @@
import { PrometheusMetric } from '../src/metrics/prometheus_metric'; import { PrometheusConnector } from '../src/connectors/prometheus';
import { DatasourceType } from '../src/connectors';
import 'jest'; import 'jest';
describe('Test Prometheus time range processing', function() { describe('Test Prometheus time range processing', function() {
let datasource = { let datasource = {
type: 'prometheus', type: DatasourceType.PROMETHEUS,
url: 'api/datasources/proxy/4/api/v1/query_range?query=node_disk_io_time_ms&start=1543411320&end=1543432950&step=30' url: 'api/datasources/proxy/4/api/v1/query_range?query=node_disk_io_time_ms&start=1543411320&end=1543432950&step=30',
auth: {
username: 'my_user',
password: 'my_password',
}
} }
let targets = []; let targets = [];
let prometheus = new PrometheusMetric(datasource, targets); let prometheus = new PrometheusConnector(datasource, targets);
it('check that from/to present in url', function() { it('check that from/to present in url', function() {
let from = 1234567891234; //milliseconds let from = 1234567891234; //milliseconds
@ -18,4 +23,11 @@ describe('Test Prometheus time range processing', function() {
expect(query.url.indexOf(`start=${Math.floor(from / 1000)}`) !== -1).toBeTruthy(); expect(query.url.indexOf(`start=${Math.floor(from / 1000)}`) !== -1).toBeTruthy();
expect(query.url.indexOf(`end=${Math.floor(to / 1000)}`) !== -1).toBeTruthy(); expect(query.url.indexOf(`end=${Math.floor(to / 1000)}`) !== -1).toBeTruthy();
}); });
it('check that username/password present in query', function() {
let query = prometheus.getQuery(0, 0, 1000, 0);
expect(query.auth?.username).toBe('my_user');
expect(query.auth?.password).toBe('my_password');
})
}); });

125
spec/postgres.jest.ts → spec/sql.jest.ts

@ -1,5 +1,5 @@
import { PostgresMetric } from '../src/metrics/postgres_metric'; import { SqlConnector } from '../src/connectors/sql';
import { MetricQuery } from '../src/metrics/metric'; import { DatasourceType, DatasourceQuery } from '../src/connectors';
import 'jest'; import 'jest';
import * as _ from 'lodash'; import * as _ from 'lodash';
@ -11,8 +11,8 @@ describe('Test query creation', function() {
let offset = 0; let offset = 0;
let from = 1542983750857; let from = 1542983750857;
let to = 1542984313292; let to = 1542984313292;
let postgres = getMetricForSqlQuery(); let connector = getConnectorForSqlQuery();
let mQuery: MetricQuery = postgres.getQuery(from, to, limit, offset); let mQuery: DatasourceQuery = connector.getQuery(from, to, limit, offset);
it('test that payload placed to data field', function() { it('test that payload placed to data field', function() {
expect('data' in mQuery.schema).toBeTruthy(); expect('data' in mQuery.schema).toBeTruthy();
@ -31,37 +31,55 @@ describe('Test query creation', function() {
}); });
describe('Test result parsing', function() { describe('Test result parsing', function() {
let postgres = getMetricForSqlQuery(); let connector = getConnectorForSqlQuery();
let timestamps = [1542983800000, 1542983800060, 1542983800120] let timestamps = [1542983800000, 1542983800060, 1542983800120]
let response = { let response = {
data: { data: {
results: { results: {
A: { A: {
refId: 'A', frames: [
meta: {
rowCount:0,
sql: 'SELECT "time" AS "time", val FROM local ORDER BY 1'
},
series: [
{ {
name:"val", schema: {
points: [ refId: 'A',
[622, timestamps[0]], meta: {
[844, timestamps[1]], 'executedQueryString': 'SELECT\n \"time\" AS \"time\",\n eur\nFROM rate_test\nWHERE\n \"time\" >= 1669648679 AND \"time\" <= 1672240679\nORDER BY 1'
[648, timestamps[2]] },
] fields: [
{
name: 'Time',
type: 'time',
typeInfo: {
frame: 'time.Time',
nullable: true
}
},
{
name: 'eur',
type: 'number',
typeInfo: {
frame: 'float64',
nullable: true
}
}
]
},
data: {
values: [
[ timestamps[0], timestamps[1], timestamps[2] ],
[ 1.53, 1.17, 1.17 ],
]
}
} }
], ]
tables: 'null'
} }
} }
} }
} }
let result = postgres.getResults(response); let result = connector.parseResponse(response);
it('check results columns order', function() { it('check results columns order', function() {
let timestampColumnNumber = result.columns.indexOf('timestamp'); let timestampColumnNumber = result.columns.indexOf('Time');
expect(result.values.map(v => v[timestampColumnNumber])).toEqual(timestamps); expect(result.values.map(v => v[timestampColumnNumber])).toEqual(timestamps);
}); });
}); });
@ -144,6 +162,63 @@ describe('Test sql processing', function() {
check(original, expected); check(original, expected);
}); });
it('sql with $__timeGroup aggregation', function () {
const original = `SELECT
$__timeGroup("time", $__interval, NULL),
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1`;
const expected = `SELECT
"time",
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1 LIMIT ${limit} OFFSET ${offset}`;
check(original, expected);
});
it('sql with $__timeGroupAlias aggregation', function () {
const original = `SELECT
$__timeGroupAlias("time", $__interval),
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1`;
const expected = `SELECT
"time",
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1 LIMIT ${limit} OFFSET ${offset}`;
check(original, expected);
});
it('sql with $__timeGroupAlias aggregation and linebreaks', function () {
const original = `SELECT
$__timeGroupAlias(
any_field,
$__interval
),
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter(any_field)
GROUP BY 1
ORDER BY 1`;
const expected = `SELECT
any_field,
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter(any_field)
GROUP BY 1
ORDER BY 1 LIMIT ${limit} OFFSET ${offset}`;
check(original, expected);
});
it('complex sql with one select', function() { it('complex sql with one select', function() {
let original = `SELECT let original = `SELECT
statistics.created_at as time, statistics.created_at as time,
@ -216,11 +291,11 @@ describe('Test sql processing', function() {
}); });
function checkExpectation(original: string, expected: string, from: number, to: number, limit: number, offset: number) { function checkExpectation(original: string, expected: string, from: number, to: number, limit: number, offset: number) {
let metric = getMetricForSqlQuery(original); let metric = getConnectorForSqlQuery(original);
expect(metric.getQuery(from, to, limit, offset).schema.data.queries[0].rawSql).toBe(expected); expect(metric.getQuery(from, to, limit, offset).schema.data.queries[0].rawSql).toBe(expected);
} }
function getMetricForSqlQuery(query: string = ''): PostgresMetric { function getConnectorForSqlQuery(query: string = ''): SqlConnector {
const queryPayload = { const queryPayload = {
from: 1542983750857, from: 1542983750857,
to: 1542984313292 to: 1542984313292
@ -228,7 +303,7 @@ function getMetricForSqlQuery(query: string = ''): PostgresMetric {
const datasource = { const datasource = {
url: 'api/tsdb/query', url: 'api/tsdb/query',
type: 'postgres', type: DatasourceType.POSTGRES,
data: queryPayload data: queryPayload
}; };
@ -241,5 +316,5 @@ function getMetricForSqlQuery(query: string = ''): PostgresMetric {
format: 'time_series' format: 'time_series'
}]; }];
return new PostgresMetric(datasource, targets); return new SqlConnector(datasource, targets);
} }

2
spec/utils.jest.ts

@ -1,4 +1,4 @@
import { processSQLLimitOffset } from '../src/metrics/utils'; import { processSQLLimitOffset } from '../src/connectors/utils';
import 'jest'; import 'jest';

31
src/connectors/connector_factory.ts

@ -0,0 +1,31 @@
import { InfluxdbConnector } from './influxdb';
import { GraphiteConnector } from './graphite';
import { DatasourceConnector, DatasourceType } from '.';
import { PrometheusConnector } from './prometheus';
import { PostgresConnector } from './postgres';
import { ElasticsearchConnector } from './elasticsearch';
import { MysqlConnector } from './mysql';
import { QueryConfig } from '../models/query_config';
export function connectorFactory(
queryConfig: QueryConfig,
): DatasourceConnector {
const classMap = {
[DatasourceType.INFLUXDB]: InfluxdbConnector,
[DatasourceType.GRAPHITE]: GraphiteConnector,
[DatasourceType.PROMETHEUS]: PrometheusConnector,
[DatasourceType.POSTGRES]: PostgresConnector,
[DatasourceType.ELASTICSEARCH]: ElasticsearchConnector,
[DatasourceType.MYSQL]: MysqlConnector,
};
const datasource = queryConfig.datasource;
const targets = queryConfig.targets;
if(classMap[datasource.type] === undefined) {
console.error(`Datasources of type ${datasource.type} are not supported currently`);
throw new Error(`Datasources of type ${datasource.type} are not supported currently`);
} else {
return new classMap[datasource.type](datasource, targets);
}
}

44
src/metrics/elasticsearch_metric.ts → src/connectors/elasticsearch.ts

@ -1,12 +1,12 @@
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric'; import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { DataKitError } from '../grafana_service'; import { TsdbKitError } from '../types';
import * as _ from 'lodash'; import * as _ from 'lodash';
export type RangeFilter = { range: { [key: string]: { gte: String, lte: String } } }; export type RangeFilter = { range: { [key: string]: { gte: String, lte: String } } };
export type QueryStringFilter = { query_string: { analyze_wildcard: Boolean, query: String } }; export type QueryStringFilter = { query_string: { analyze_wildcard: Boolean, query: String } };
export type QueryConfig = { export type ElasticsearchQuery = {
size: number, size: number,
query: { query: {
bool: { bool: {
@ -18,33 +18,33 @@ export type QueryConfig = {
export type Aggregation = { export type Aggregation = {
date_histogram: { date_histogram: {
interval: String, interval: string,
field: String, field: string,
min_doc_count: Number, min_doc_count: number,
extended_bounds: { min: String, max: String }, extended_bounds: { min: string, max: string },
format: String format: string
} }
}; };
const DATE_HISTOGRAM_FIELD = 'date_histogram'; const DATE_HISTOGRAM_FIELD = 'date_histogram';
export class ElasticsearchMetric extends AbstractMetric { export class ElasticsearchConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[], id?: MetricId) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets, id); super(datasource, targets);
} }
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery { getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
let data = this.datasource.data.split('\n').map(d => d === '' ? d: JSON.parse(d)); let data = this.datasource.data.split('\n').map(d => d === '' ? d: JSON.parse(d));
if(data.length === 0) { if(data.length === 0) {
throw new DataKitError('Datasource data is empty'); throw new TsdbKitError('Datasource data is empty');
} }
const queryConfig: QueryConfig = data[1]; const query: ElasticsearchQuery = data[1];
queryConfig.size = 0; query.size = 0;
let timeField = null; let timeField: string | null = null;
let aggs = _.filter(queryConfig.aggs, f => _.has(f, DATE_HISTOGRAM_FIELD)); let aggs = _.filter(query.aggs, f => _.has(f, DATE_HISTOGRAM_FIELD));
_.each(aggs, (agg: Aggregation) => { _.each(aggs, (agg: Aggregation) => {
agg[DATE_HISTOGRAM_FIELD].extended_bounds = { agg[DATE_HISTOGRAM_FIELD].extended_bounds = {
min: from.toString(), min: from.toString(),
@ -63,9 +63,9 @@ export class ElasticsearchMetric extends AbstractMetric {
throw new Error('datasource time field not found'); throw new Error('datasource time field not found');
} }
let filters = queryConfig.query.bool.filter.filter(f => _.has(f, 'range')) as RangeFilter[]; let filters = query.query.bool.filter.filter(f => _.has(f, 'range')) as RangeFilter[];
if(filters.length === 0) { if(filters.length === 0) {
throw new DataKitError('Empty filters'); throw new TsdbKitError('Empty filters');
} }
let range = filters[0].range; let range = filters[0].range;
range[timeField].gte = from.toString(); range[timeField].gte = from.toString();
@ -86,7 +86,7 @@ export class ElasticsearchMetric extends AbstractMetric {
} }
} }
getResults(res): MetricResults { parseResponse(res): DataTable {
let columns = ['timestamp', 'target']; let columns = ['timestamp', 'target'];
let values = []; let values = [];
@ -106,7 +106,7 @@ export class ElasticsearchMetric extends AbstractMetric {
const bucketAggs = JSON.stringify(this.targets[0].bucketAggs); const bucketAggs = JSON.stringify(this.targets[0].bucketAggs);
const aggregationKeys = JSON.stringify(_.keys(aggregations)); const aggregationKeys = JSON.stringify(_.keys(aggregations));
console.error(`can't find related aggregation id. bucketAggs:${bucketAggs} aggregationKeys:${aggregationKeys}`); console.error(`can't find related aggregation id. bucketAggs:${bucketAggs} aggregationKeys:${aggregationKeys}`);
throw new DataKitError(`can't find related aggregation id`); throw new TsdbKitError(`can't find related aggregation id`);
} else { } else {
aggrgAgg = aggrgAgg[0].id; aggrgAgg = aggrgAgg[0].id;
} }
@ -114,7 +114,7 @@ export class ElasticsearchMetric extends AbstractMetric {
let agg = this.targets[0].metrics.filter(m => !m.hide).map(m => m.id); let agg = this.targets[0].metrics.filter(m => !m.hide).map(m => m.id);
if(agg.length > 1) { if(agg.length > 1) {
throw new DataKitError(`multiple series for metric are not supported currently: ${JSON.stringify(agg)}`); throw new TsdbKitError(`multiple series for metric are not supported currently: ${JSON.stringify(agg)}`);
} }
agg = agg[0]; agg = agg[0];

12
src/metrics/graphite_metric.ts → src/connectors/graphite.ts

@ -1,14 +1,14 @@
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric'; import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import * as _ from 'lodash'; import * as _ from 'lodash';
export class GraphiteMetric extends AbstractMetric { export class GraphiteConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[], id?: MetricId) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets, id); super(datasource, targets);
} }
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery { getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
let fromDate = Math.floor(from / 1000); let fromDate = Math.floor(from / 1000);
let toDate = Math.floor(to / 1000); let toDate = Math.floor(to / 1000);
@ -42,7 +42,7 @@ export class GraphiteMetric extends AbstractMetric {
} }
} }
getResults(res): MetricResults { parseResponse(res): DataTable {
if(res.data === undefined || res.data.length < 1) { if(res.data === undefined || res.data.length < 1) {
console.log('datasource return empty response, no data'); console.log('datasource return empty response, no data');

58
src/connectors/index.ts

@ -0,0 +1,58 @@
export enum QueryType {
DIRECT = 'direct',
GRAFANA = 'grafana',
}
export enum DatasourceType {
INFLUXDB = 'influxdb',
GRAPHITE = 'graphite',
PROMETHEUS = 'prometheus',
POSTGRES = 'postgres',
ELASTICSEARCH = 'elasticsearch',
MYSQL = 'mysql',
}
// TODO: Datasource: type -> class
export declare type Datasource = {
url: string;
type: DatasourceType;
params?: {
db: string;
q: string;
epoch: string;
};
data?: any;
datasourceId?: string;
auth?: any;
};
export type DatasourceQuery = {
url: string;
method: string;
schema: any;
headers?: any;
auth?: {
username: string;
password: string;
};
}
export type DataTable = {
values: (number | null)[][];
columns: string[];
}
export abstract class DatasourceConnector {
constructor(
public datasource: Datasource,
// TODO: Target type
public targets: any[],
) {}
/*
from / to - timestamp in ms
limit - max number of items in result
offset - number of items to skip from timerange start
*/
abstract getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery;
abstract parseResponse(res): DataTable;
}

13
src/metrics/influxdb_metric.ts → src/connectors/influxdb.ts

@ -1,16 +1,15 @@
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from "./metric"; import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { processSQLLimitOffset } from './utils'; import { processSQLLimitOffset } from './utils';
const INFLUX_QUERY_TIME_REGEX = /time ?[><=]+ ?[^A-Z]+(AND ?time ?[><=]+ ?[^A-Z]+)?/; const INFLUX_QUERY_TIME_REGEX = /time ?[><=]+ ?[^A-Z]+(AND ?time ?[><=]+ ?[^A-Z]+)?/;
export class InfluxdbConnector extends DatasourceConnector {
export class InfluxdbMetric extends AbstractMetric {
private _queryParts: string[]; private _queryParts: string[];
constructor(datasource: Datasource, targets: any[], id?: MetricId) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets, id); super(datasource, targets);
var queryStr = datasource.params.q; var queryStr = datasource.params.q;
this._queryParts = queryStr.split(INFLUX_QUERY_TIME_REGEX); this._queryParts = queryStr.split(INFLUX_QUERY_TIME_REGEX);
@ -24,7 +23,7 @@ export class InfluxdbMetric extends AbstractMetric {
} }
} }
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery { getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
let timeClause = `time >= ${from}ms AND time <= ${to}ms`; let timeClause = `time >= ${from}ms AND time <= ${to}ms`;
let q = `${this._queryParts[0]} ${timeClause} ${this._queryParts[2]}`; let q = `${this._queryParts[0]} ${timeClause} ${this._queryParts[2]}`;
q = processSQLLimitOffset(q, limit, offset); q = processSQLLimitOffset(q, limit, offset);
@ -41,7 +40,7 @@ export class InfluxdbMetric extends AbstractMetric {
} }
} }
getResults(res): MetricResults { parseResponse(res): DataTable {
let emptyResult = { let emptyResult = {
columns: ['timestamp', 'target'], columns: ['timestamp', 'target'],
values: [] values: []

5
src/connectors/mysql.ts

@ -0,0 +1,5 @@
import { SqlConnector } from './sql';
export class MysqlConnector extends SqlConnector {
}

5
src/connectors/postgres.ts

@ -0,0 +1,5 @@
import { SqlConnector } from './sql';
export class PostgresConnector extends SqlConnector {
}

28
src/metrics/prometheus_metric.ts → src/connectors/prometheus.ts

@ -1,15 +1,15 @@
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric'; import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
const QUERY_TIME_REGEX = /\&start=[^\&]*\&end=[^\&]*\&/; const QUERY_TIME_REGEX = /\&start=[^\&]*\&end=[^\&]*\&/;
export class PrometheusMetric extends AbstractMetric { export class PrometheusConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[], id?: MetricId) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets, id); super(datasource, targets);
} }
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery { getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
let url = this.datasource.url; let url = this.datasource.url;
from = Math.floor(from / 1000); //prometheus uses seconds for timestamp from = Math.floor(from / 1000); //prometheus uses seconds for timestamp
to = Math.floor(to / 1000); to = Math.floor(to / 1000);
@ -21,11 +21,12 @@ export class PrometheusMetric extends AbstractMetric {
method: 'GET', method: 'GET',
schema: { schema: {
params: this.datasource.params params: this.datasource.params
} },
auth: this.datasource.auth,
} }
} }
getResults(res): MetricResults { parseResponse(res): DataTable {
if(res.data === undefined || res.data.data.result.length < 1) { if(res.data === undefined || res.data.data.result.length < 1) {
console.log('datasource return empty response, no data'); console.log('datasource return empty response, no data');
@ -36,13 +37,13 @@ export class PrometheusMetric extends AbstractMetric {
} }
let result = res.data.data.result; let result = res.data.data.result;
let result_matrix = { let result_matrix: DataTable = {
columns: ['timestamp'], columns: ['timestamp'],
values: [] values: []
}; };
result.map(r => { result.map(r => {
let keys = []; let keys: string[] = [];
for(let key in r.metric) { for(let key in r.metric) {
keys.push(`${key}=${r.metric[key]}`); keys.push(`${key}=${r.metric[key]}`);
} }
@ -51,8 +52,8 @@ export class PrometheusMetric extends AbstractMetric {
let values = result.map(r => r.values); let values = result.map(r => r.values);
let timestamps = []; let timestamps: (number | null)[] = [];
values.map(v => v.map(row => timestamps.push(row[0]))); values.forEach(v => v.forEach((row: number[]) => timestamps.push(row[0])));
timestamps = timestamps.filter(function(item, i, ar) { timestamps = timestamps.filter(function(item, i, ar) {
return ar.indexOf(item) === i; //uniq values return ar.indexOf(item) === i; //uniq values
}); });
@ -70,12 +71,11 @@ export class PrometheusMetric extends AbstractMetric {
if(currentTimestamp === t) { if(currentTimestamp === t) {
row.push(+currentValue); row.push(+currentValue);
v.shift(); v.shift();
} } else {
else {
row.push(null); row.push(null);
} }
}); });
row[0] = +row[0] * 1000; //convert timestamp to ms row[0] = +(row[0] as number) * 1000; //convert timestamp to ms
result_matrix.values.push(row); result_matrix.values.push(row);
}; };
return result_matrix; return result_matrix;

31
src/metrics/sql_metric.ts → src/connectors/sql.ts

@ -1,15 +1,17 @@
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric'; import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { processSQLLimitOffset } from './utils'; import { processSQLLimitOffset } from './utils';
import * as _ from 'lodash'; import * as _ from 'lodash';
// for 26.09.2020 it works for all SQL datasources // as of 26.09.2020, it works for all SQL datasources
export class SqlMetric extends AbstractMetric { export class SqlConnector extends DatasourceConnector {
private _targetName: string; //save first target name, while multi metric not implemented private _targetName: string; //save first target name, while multi metric not implemented
private url: string = 'api/tsdb/query'; private url: string;
constructor(datasource: Datasource, targets: any[], id?: MetricId) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets, id); super(datasource, targets);
this.url = datasource.url;
if(targets.length === 0) { if(targets.length === 0) {
throw Error('got empty targets list'); throw Error('got empty targets list');
@ -17,7 +19,7 @@ export class SqlMetric extends AbstractMetric {
this._targetName = targets[0].refId; this._targetName = targets[0].refId;
} }
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery { getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
let queries = this.targets; let queries = this.targets;
_.forEach(queries, q => { _.forEach(queries, q => {
@ -34,13 +36,13 @@ export class SqlMetric extends AbstractMetric {
data: { data: {
from: String(from), from: String(from),
to: String(to), to: String(to),
queries: queries queries: queries,
} }
} }
}; };
} }
getResults(res): MetricResults { parseResponse(res): DataTable {
let emptyResult = { let emptyResult = {
columns: ['timestamp', 'target'], columns: ['timestamp', 'target'],
values: [] values: []
@ -53,16 +55,15 @@ export class SqlMetric extends AbstractMetric {
// TODO: support more than 1 metric (each res.data.results item is a metric) // TODO: support more than 1 metric (each res.data.results item is a metric)
let results = res.data.results[this._targetName]; let results = res.data.results[this._targetName];
if (!results.series) { if (_.isEmpty(results.frames)) {
return emptyResult; return emptyResult;
} }
let points = results.series[0].points; const frame = results.frames[0];
points.forEach(p => p.reverse());
return { return {
columns: ['timestamp', results.series[0].name], columns: frame.schema.fields.map(field => field.name),
values: points // @ts-ignore
values: _.zip(...frame.data.values),
}; };
} }
} }

8
src/metrics/utils.ts → src/connectors/utils.ts

@ -8,6 +8,12 @@ export function processSQLLimitOffset(sql: string, limit: number, offset: number
} }
sql = splits[0]; // removes ";" from EOL sql = splits[0]; // removes ";" from EOL
const reAggregation = /\$__timeGroup(?:Alias)?\(\s*([^,]+)\s*,\s*\$__interval[^\)]*\)/igm;
const occurence = reAggregation.exec(sql);
if(occurence) {
sql = sql.replace(reAggregation, occurence[1]);
}
let relim = /limit [0-9]+/ig; let relim = /limit [0-9]+/ig;
let reoff = /offset [0-9]+/ig; let reoff = /offset [0-9]+/ig;
@ -32,7 +38,7 @@ export function processSQLLimitOffset(sql: string, limit: number, offset: number
} }
function ensureParentheses(regex: RegExp, str: string): { index: number, length: number } { function ensureParentheses(regex: RegExp, str: string): { index: number, length: number } {
let occurence: RegExpExecArray; let occurence: RegExpExecArray | null;
while((occurence = regex.exec(str)) !== null) { while((occurence = regex.exec(str)) !== null) {
let leftPart = str.slice(0, occurence.index) let leftPart = str.slice(0, occurence.index)
let rightPart = str.slice(occurence.index + occurence[0].length); let rightPart = str.slice(occurence.index + occurence[0].length);

134
src/grafana_service.ts

@ -1,134 +0,0 @@
import { Metric } from './metrics/metrics_factory';
import { MetricQuery, Datasource } from './metrics/metric';
import { URL } from 'url';
import axios from 'axios';
import * as _ from 'lodash';
export class DataKitError extends Error {
constructor(
message: string,
public datasourceType?: string,
public datasourceUrl?: string
) {
super(message);
}
};
export class BadRange extends DataKitError {};
export class GrafanaUnavailable extends DataKitError {};
export class DatasourceUnavailable extends DataKitError {};
const CHUNK_SIZE = 50000;
/**
* @param metric to query to Grafana
* @returns { values: [time, value][], columns: string[] }
*/
export async function queryByMetric(
metric: Metric, url: string, from: number, to: number, apiKey: string
): Promise<{ values: [number, number][], columns: string[] }> {
if(from > to) {
throw new BadRange(
`Data-kit got wrong range: from ${from} > to ${to}`,
metric.datasource.type,
url
);
}
if(from === to) {
console.warn(`Data-kit got from === to`);
}
const grafanaUrl = getGrafanaUrl(url);
let data = {
values: [],
columns: []
};
while(true) {
let query = metric.metricQuery.getQuery(from, to, CHUNK_SIZE, data.values.length);
query.url = `${grafanaUrl}/${query.url}`;
let res = await queryGrafana(query, apiKey, metric.datasource);
let chunk = metric.metricQuery.getResults(res);
let values = chunk.values;
data.values = data.values.concat(values);
data.columns = chunk.columns;
if(values.length < CHUNK_SIZE) {
// because if we get less that we could, then there is nothing more
break;
}
}
return data;
}
async function queryGrafana(query: MetricQuery, apiKey: string, datasource: Datasource) {
let headers = { Authorization: `Bearer ${apiKey}` };
if(query.headers !== undefined) {
_.merge(headers, query.headers);
}
let axiosQuery = {
headers,
url: query.url,
method: query.method,
};
_.defaults(axiosQuery, query.schema);
try {
var res = await axios(axiosQuery);
} catch (e) {
const msg = `Data kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.errno === 'ECONNREFUSED') {
throw new GrafanaUnavailable(e.message);
}
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check the API_KEY. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
return res;
}
function getGrafanaUrl(url: string) {
const parsedUrl = new URL(url);
const path = parsedUrl.pathname;
const panelUrl = path.match(/^\/*([^\/]*)\/d\//);
if(panelUrl === null) {
return url;
}
const origin = parsedUrl.origin;
const grafanaSubPath = panelUrl[1];
if(grafanaSubPath.length > 0) {
return `${origin}/${grafanaSubPath}`;
}
return origin;
}

58
src/index.ts

@ -1,3 +1,55 @@
export { Metric } from './metrics/metrics_factory'; import { DataTable } from './connectors';
export { Datasource } from './metrics/metric' import { QueryConfig } from './models/query_config';
export { queryByMetric, GrafanaUnavailable, DatasourceUnavailable } from './grafana_service'; import { BadRange } from './types';
export { QueryConfig } from './models/query_config';
export { Datasource, DatasourceType, DataTable } from './connectors'
export { DatasourceUnavailable } from './types';
export { GrafanaUnavailable } from './services/query_service/grafana';
const CHUNK_SIZE = 50000;
/**
* @param queryConfig
* @returns { values: [time, value][], columns: string[] }
*/
export async function queryByConfig(
// TODO: check how did we wanna use `url` field
queryConfig: QueryConfig, url: string, from: number, to: number,
// TODO: we need an abstract DatasourceConfig class which will differ in direct and grafana queries
apiKey?: string
): Promise<DataTable> {
if(from > to) {
throw new BadRange(
`TSDB-kit got wrong range: from ${from} > to ${to}`,
queryConfig.datasource.type,
url
);
}
if(from === to) {
console.warn(`TSDB-kit got from === to`);
}
let data: DataTable = {
values: [],
columns: []
};
while(true) {
let query = queryConfig.datasourceConnector.getQuery(from, to, CHUNK_SIZE, data.values.length);
const res = await queryConfig.queryService.query(query, apiKey);
let chunk = queryConfig.datasourceConnector.parseResponse(res);
let values = chunk.values;
data.values = data.values.concat(values);
data.columns = chunk.columns;
if(values.length < CHUNK_SIZE) {
// because if we get less that we could, then there is nothing more
break;
}
}
return data;
}

40
src/metrics/metric.ts

@ -1,40 +0,0 @@
export declare type Datasource = {
url: string;
type: string;
params?: {
db: string;
q: string;
epoch: string;
};
data?: any;
datasourceId?: string;
};
export type MetricQuery = {
url: string;
method: string;
schema: any;
headers?: any;
}
export type MetricResults = {
values: any;
columns: any;
}
export type MetricId = string;
export abstract class AbstractMetric {
constructor(
public datasource: Datasource,
public targets: any[],
public id?: MetricId
) {};
abstract getQuery(from: number, to: number, limit: number, offset: number): MetricQuery;
/*
from / to - timestamp in ms
limit - max number of items in result
offset - number of items to skip from timerange start
*/
abstract getResults(res): MetricResults;
}

78
src/metrics/metrics_factory.ts

@ -1,78 +0,0 @@
import { InfluxdbMetric } from './influxdb_metric';
import { GraphiteMetric } from './graphite_metric';
import { AbstractMetric, Datasource, MetricId } from './metric';
import { PrometheusMetric } from './prometheus_metric';
import { PostgresMetric } from './postgres_metric';
import { ElasticsearchMetric } from './elasticsearch_metric';
import { MysqlMetric } from './mysql_metric';
export function metricFactory(
datasource: Datasource,
targets: any[],
id?: MetricId
): AbstractMetric {
let classMap = {
'influxdb': InfluxdbMetric,
'graphite': GraphiteMetric,
'prometheus': PrometheusMetric,
'postgres': PostgresMetric,
'elasticsearch': ElasticsearchMetric,
'mysql': MysqlMetric,
};
if(classMap[datasource.type] === undefined) {
console.error(`Datasources of type ${datasource.type} are not supported currently`);
throw new Error(`Datasources of type ${datasource.type} are not supported currently`);
} else {
return new classMap[datasource.type](datasource, targets, id);
}
}
export class Metric {
datasource: Datasource;
targets: any[];
id?: MetricId;
private _metricQuery: AbstractMetric = undefined;
constructor(datasource: Datasource, targets: any[], id?: MetricId) {
if(datasource === undefined) {
throw new Error('datasource is undefined');
}
if(targets === undefined) {
throw new Error('targets is undefined');
}
if(targets.length === 0) {
throw new Error('targets is empty');
}
this.datasource = datasource;
this.targets = targets;
this.id = id;
}
public get metricQuery() {
if(this._metricQuery === undefined) {
this._metricQuery = metricFactory(this.datasource, this.targets, this.id);
}
return this._metricQuery;
}
public toObject() {
return {
datasource: this.datasource,
targets: this.targets,
_id: this.id
};
}
static fromObject(obj: any): Metric {
if(obj === undefined) {
throw new Error('obj is undefined');
}
return new Metric(
obj.datasource,
obj.targets,
obj._id
);
}
}

5
src/metrics/mysql_metric.ts

@ -1,5 +0,0 @@
import { SqlMetric } from './sql_metric';
export class MysqlMetric extends SqlMetric {
}

5
src/metrics/postgres_metric.ts

@ -1,5 +0,0 @@
import { SqlMetric } from './sql_metric';
export class PostgresMetric extends SqlMetric {
}

62
src/models/query_config.ts

@ -0,0 +1,62 @@
import { Datasource, DatasourceConnector, QueryType } from '../connectors';
import { connectorFactory } from '../connectors/connector_factory';
import { QueryService } from '../services/query_service/base';
import { queryServiceFactory } from '../services/query_service/query_service_factory';
export class QueryConfig {
queryType: QueryType;
datasource: Datasource;
// TODO: Target type (depends on datasource type)
targets: any[];
private _datasourceConnector?: DatasourceConnector;
private _queryService?: QueryService;
constructor(queryType: QueryType, datasource: Datasource, targets: any[]) {
if(queryType === undefined) {
throw new Error('queryType is undefined');
}
if(datasource === undefined) {
throw new Error('datasource is undefined');
}
if(targets === undefined) {
throw new Error('targets is undefined');
}
this.queryType = queryType;
this.datasource = datasource;
this.targets = targets;
}
get datasourceConnector(): DatasourceConnector {
if(this._datasourceConnector === undefined) {
this._datasourceConnector = connectorFactory(this);
}
return this._datasourceConnector;
}
get queryService(): QueryService {
if(this._queryService === undefined) {
this._queryService = queryServiceFactory(this);
}
return this._queryService;
}
public toObject() {
return {
queryType: this.queryType,
datasource: this.datasource,
targets: this.targets,
};
}
static fromObject(obj: any): QueryConfig {
if(obj === undefined) {
throw new Error('obj is undefined');
}
return new QueryConfig(
obj.queryType,
obj.datasource,
obj.targets,
);
}
}

11
src/services/query_service/base.ts

@ -0,0 +1,11 @@
import { Datasource, DatasourceQuery } from '../../connectors';
import { AxiosResponse } from 'axios';
export abstract class QueryService {
constructor(protected _datasource: Datasource) { }
// TODO: we don't need `apiKey` here, we need some abstract auth config for both Direct and Grafana queries
abstract query(query: DatasourceQuery, apiKey?: string): Promise<AxiosResponse<any>>;
}

52
src/services/query_service/direct.ts

@ -0,0 +1,52 @@
import { QueryService } from './base';
import { DatasourceUnavailable } from '../../types';
import { Datasource, DatasourceQuery } from '../../connectors';
import axios, { AxiosResponse } from 'axios';
import * as _ from 'lodash';
export class DirectQueryService extends QueryService {
constructor(datasource: Datasource) {
super(datasource);
}
async query(query: DatasourceQuery): Promise<AxiosResponse<any>> {
// TODO: support auth
let axiosQuery = {
...query,
};
_.defaults(axiosQuery, query.schema);
try {
return axios(axiosQuery);
} catch(e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check credentials. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
this._datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}
}

64
src/services/query_service/grafana.ts

@ -0,0 +1,64 @@
import { QueryService } from './base';
import { Datasource, DatasourceQuery } from '../../connectors';
import { TsdbKitError, DatasourceUnavailable } from '../../types';
import axios, { AxiosResponse } from 'axios';
import * as _ from 'lodash';
export class GrafanaUnavailable extends TsdbKitError { };
export class GrafanaQueryService extends QueryService {
constructor(datasource: Datasource) {
super(datasource);
}
async query(query: DatasourceQuery, apiKey: string): Promise<AxiosResponse<any>> {
let headers = { Authorization: `Bearer ${apiKey}` };
if(query.headers !== undefined) {
_.merge(headers, query.headers);
}
let axiosQuery = {
headers,
url: query.url,
method: query.method,
};
_.defaults(axiosQuery, query.schema);
try {
const resp = await axios(axiosQuery);
return resp;
} catch (e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.errno === 'ECONNREFUSED') {
throw new GrafanaUnavailable(e.message);
}
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check the API_KEY. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
this._datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}
}

23
src/services/query_service/query_service_factory.ts

@ -0,0 +1,23 @@
import { QueryService } from './base';
import { DirectQueryService } from './direct';
import { GrafanaQueryService } from './grafana';
import { QueryType } from '../../connectors';
import { QueryConfig } from '../../models/query_config';
export function queryServiceFactory(
queryConfig: QueryConfig,
): QueryService {
const classMap = {
[QueryType.DIRECT]: DirectQueryService,
[QueryType.GRAFANA]: GrafanaQueryService,
};
const queryType = queryConfig.queryType;
const datasource = queryConfig.datasource;
if(classMap[queryType] === undefined) {
console.error(`Queries of type ${queryType} are not supported currently`);
throw new Error(`Queries of type ${queryType} are not supported currently`);
} else {
return new classMap[queryType](datasource);
}
}

50
src/tsdb-kit/index.ts

@ -1 +1,49 @@
console.log('Hello world'); import { queryByConfig, QueryConfig } from '..';
import { DatasourceType, QueryType } from '../connectors';
const { version } = require('../../package.json')
import { ArgumentParser } from 'argparse';
import * as _ from 'lodash';
const parser = new ArgumentParser();
parser.add_argument('-v', '--version', { action: 'version', version });
parser.add_argument('-U', '--url', { help: 'Datasource URL', required: true });
parser.add_argument('-q', '--query', { help: 'Query Template', required: true });
parser.add_argument('-f', '--from', { help: 'From timestamp (ms), e.g. 1660670020000. If not specified, `now-5m` is used' });
parser.add_argument('-t', '--to', { help: 'To timestamp (ms), e.g. 1660670026000. If not specified, `now` is used' });
parser.add_argument('-u', '--username', { help: 'Basic Auth Username' });
parser.add_argument('-p', '--password', { help: 'Basic Auth Password' });
const args = parser.parse_args();
const timeNowInMs = new Date().getTime();
const PROMETHEUS_URL = args.url;
const QUERY = args.query;
const FROM = args.from || timeNowInMs - 5 * 60 * 1000;
const TO = args.to || timeNowInMs;
const USERNAME = args.username;
const PASSWORD = args.password;
let auth;
if(USERNAME && PASSWORD) {
auth = { username: USERNAME, password: PASSWORD };
}
const datasource = {
type: DatasourceType.PROMETHEUS,
// TODO: remove PROMETHEUS_URL from here
url: `${PROMETHEUS_URL}/api/v1/query_range?query=${QUERY}&start=1543411320&end=1543432950&step=30`,
auth,
};
const targets = [];
const queryConfig = new QueryConfig(QueryType.DIRECT, datasource, targets);
queryByConfig(queryConfig, PROMETHEUS_URL, FROM, TO)
.then(res => {
console.log(res);
})
.catch(err => {
console.error('Query error: ', err);
});

15
src/types.ts

@ -0,0 +1,15 @@
import { DatasourceType } from './connectors';
export class TsdbKitError extends Error {
constructor(
message: string,
public datasourceType?: DatasourceType,
public datasourceUrl?: string
) {
super(message);
}
};
export class BadRange extends TsdbKitError {};
export class DatasourceUnavailable extends TsdbKitError {};

33
webpack.config.js

@ -0,0 +1,33 @@
const webpack = require('webpack');
const path = require('path');
module.exports = {
mode: 'development',
target: 'node',
devtool: 'inline-source-map',
entry: {
main: './src/tsdb-kit/index.ts',
},
output: {
path: path.resolve(__dirname, './bin'),
filename: 'tsdb-kit.js'
},
plugins: [
new webpack.BannerPlugin({ banner: "#!/usr/bin/env node", raw: true }),
],
resolve: {
extensions: ['.ts', '.js'],
},
module: {
rules: [
{
test: /.ts$/,
loader: 'ts-loader',
options: {
configFile: 'bin.tsconfig.json'
}
}
]
}
};

683
yarn.lock

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save