Compare commits

..

1 Commits

Author SHA1 Message Date
dependabot[bot] 7f17588775
Bump lodash from 4.17.11 to 4.17.21 3 years ago
  1. 9
      .npmignore
  2. 32
      README.md
  3. 8
      bin.tsconfig.json
  4. 6
      lib.tsconfig.json
  5. 4992
      package-lock.json
  6. 19
      package.json
  7. 13
      spec/elasticsearch.jest.ts
  8. 10
      spec/graphite.jest.ts
  9. 125
      spec/postgres.jest.ts
  10. 20
      spec/prometheus.jest.ts
  11. 17
      spec/targets.jest.ts
  12. 2
      spec/utils.jest.ts
  13. 31
      src/connectors/connector_factory.ts
  14. 58
      src/connectors/index.ts
  15. 5
      src/connectors/mysql.ts
  16. 5
      src/connectors/postgres.ts
  17. 134
      src/grafana_service.ts
  18. 58
      src/index.ts
  19. 44
      src/metrics/elasticsearch_metric.ts
  20. 12
      src/metrics/graphite_metric.ts
  21. 13
      src/metrics/influxdb_metric.ts
  22. 40
      src/metrics/metric.ts
  23. 78
      src/metrics/metrics_factory.ts
  24. 5
      src/metrics/mysql_metric.ts
  25. 5
      src/metrics/postgres_metric.ts
  26. 28
      src/metrics/prometheus_metric.ts
  27. 31
      src/metrics/sql_metric.ts
  28. 8
      src/metrics/utils.ts
  29. 62
      src/models/query_config.ts
  30. 11
      src/services/query_service/base.ts
  31. 52
      src/services/query_service/direct.ts
  32. 64
      src/services/query_service/grafana.ts
  33. 23
      src/services/query_service/query_service_factory.ts
  34. 50
      src/tsdb-kit/index.ts
  35. 15
      src/types.ts
  36. 33
      webpack.config.js
  37. 656
      yarn.lock

9
.npmignore

@ -2,8 +2,7 @@ src
spec
.travis.yml
jest.config.js
lib.tsconfig.json
bin.tsconfig.json
webpack.config.js
yarn.lock
.vscode
tsconfig.lib.json
tsconfig.bin.json
tsconfig.jest.json

32
README.md

@ -1,43 +1,21 @@
# tsdb-kit
TSDB-kit is a node.js library and CLI-tool for querying timeseries-datasources.
[![Build Status](https://travis-ci.org/CorpGlory/tsdb-kit.svg?branch=master)](https://travis-ci.org/CorpGlory/tsdb-kit)
## Features
Node.js library and utilities for running Grafana datasources on backend.
You can send your datasource metrics from Grafana to compile it on Node.js and query your datasource via Grafana API in background.
- can query datasources directly or using Grafana as proxy
- can be used as a lib from your node.js-code or as a CLI-tool
- user gets a unified interface to all datasources. Library gives single output format: fields order, time units, etc.
User gets a unified interface to all datasources. Library gives single output format: fields order, time units, etc
## Supported datasources
### Direct
* Prometheus
### Grafana
* Influxdb
* Graphite
* Prometheus
* PostgreSQL / TimescaleDB / MySQL
* ElasticSearch
Please write us at ping@corpglory.com if you want your datasource to be supported
## Usage
### Lib (TODO)
### CLI
For now, CLI supports only direct Prometheus queries
For example:
`npx @corpglory/tsdb-kit -U http://localhost:9090 -q '100-(avg by (instance) (irate(node_cpu_seconds_total{mode="idle"}[5m])) * 100)' -u my_user -p my_password`
## Development (TODO)
Please write us at ping@corpglory.com if you want your datasource to be supported:
## Projects based on library
* [grafana-data-exporter](https://github.com/CorpGlory/grafana-data-exporter)

8
bin.tsconfig.json

@ -1,10 +1,10 @@
{
"compilerOptions": {
"moduleResolution": "node",
"sourceMap": true,
"target": "es6",
"target": "es2015",
"declaration": false,
"skipLibCheck": true
"outFile": "bin/tsdb-kit.js"
},
"include": [ "src/**/*.ts" ]
"include": [ "src/**/*.ts" ],
"exclude": [ "src/index.ts" ]
}

6
lib.tsconfig.json

@ -2,11 +2,9 @@
"compilerOptions": {
"sourceMap": true,
"module": "commonjs",
"moduleResolution": "node",
"target": "esnext",
"target": "es2015",
"declaration": true,
"outDir": "lib",
"skipLibCheck": true
"outDir": "lib"
},
"include": [ "src/**/*.ts" ],
"exclude": [ "src/tsdb-kit" ]

4992
package-lock.json generated

File diff suppressed because it is too large Load Diff

19
package.json

@ -1,18 +1,17 @@
{
"name": "@corpglory/tsdb-kit",
"version": "2.0.4",
"version": "1.1.1",
"description": "",
"scripts": {
"build": "yarn build:lib && yarn build:bin",
"build:lib": "tsc --p lib.tsconfig.json",
"build:bin": "webpack --config webpack.config.js",
"dev:lib": "tsc --p lib.tsconfig.json -w",
"dev:bin": "webpack --watch --config webpack.config.js",
"build:bin": "tsc --p bin.tsconfig.json",
"dev": "tsc -w",
"test": "jest"
},
"repository": {
"type": "git",
"url": "git+https://code.corpglory.net/hastic/tsdb-kit.git"
"url": "git+https://github.com/CorpGlory/tsdb-kit.git"
},
"author": {
"name": "CorpGlory Inc."
@ -22,9 +21,9 @@
},
"license": "Apache-2.0",
"bugs": {
"url": "https://code.corpglory.net/hastic/tsdb-kit/issues"
"url": "https://github.com/CorpGlory/tsdb-kit/issues"
},
"homepage": "https://code.corpglory.net/hastic/tsdb-kit",
"homepage": "https://github.com/CorpGlory/tsdb-kit",
"dependencies": {
"axios": "^0.18.0",
"moment": "^2.22.2",
@ -33,13 +32,9 @@
"devDependencies": {
"@types/jest": "^26.0.15",
"@types/lodash": "^4.14.165",
"argparse": "^2.0.1",
"jest": "^26.6.3",
"ts-jest": "^26.4.4",
"ts-loader": "^9.3.1",
"typescript": "^4.1.2",
"webpack": "^5.74.0",
"webpack-cli": "^4.10.0"
"typescript": "^4.1.2"
},
"main": "./lib/index.js",
"bin": {

13
spec/elasticsearch.jest.ts

@ -1,7 +1,6 @@
import { ElasticsearchConnector } from '../src/connectors/elasticsearch';
import { Datasource, DatasourceType } from '../src/connectors';
import { ElasticsearchMetric } from '../src/metrics/elasticsearch_metric';
import { Datasource } from '../src/metrics/metric';
import 'jest';
import * as _ from 'lodash';
describe('simple query', function(){
@ -63,7 +62,7 @@ describe('simple query', function(){
}
}
}],
type: DatasourceType.ELASTICSEARCH
type: "elasticsearch"
};
datasource.data = datasource.data.map(d => JSON.stringify(d)).join('\n');
@ -163,7 +162,7 @@ describe('simple query', function(){
}
}];
let connector = new ElasticsearchConnector(datasource, targets);
let elasticMetric = new ElasticsearchMetric(datasource, targets);
it('check correct time processing', function() {
const expectedQueryTemplate = _.cloneDeep(queryTemplate);
@ -192,7 +191,7 @@ describe('simple query', function(){
}
};
let result = connector.getQuery(from, to, limit, offset);
let result = elasticMetric.getQuery(from, to, limit, offset);
expect(result).toEqual(expectedQuery);
});
@ -265,6 +264,6 @@ describe('simple query', function(){
]
};
expect(connector.parseResponse(result)).toEqual(expectedResult);
expect(elasticMetric.getResults(result)).toEqual(expectedResult);
});
});

10
spec/graphite.jest.ts

@ -1,13 +1,11 @@
import { Datasource, DatasourceType } from '../src/index';
import { GraphiteConnector } from '../src/connectors/graphite';
import { Datasource, Metric } from '../src/index';
import 'jest';
describe('correct Graphite query', function() {
let datasource: Datasource = {
url: 'http://example.com:1234',
type: DatasourceType.GRAPHITE,
type: 'graphite',
params: {
db: '',
q: '',
@ -17,10 +15,10 @@ describe('correct Graphite query', function() {
};
let target = `target=template(hosts.$hostname.cpu, hostname="worker1")`;
let connector = new GraphiteConnector(datasource, [target]);
let query = new Metric(datasource, [target]);
it("test simple query with time clause", function () {
expect(connector.getQuery(1534809600000, 1537488000000, 500, 0).url).toBe(
expect(query.metricQuery.getQuery(1534809600000, 1537488000000, 500, 0).url).toBe(
`${datasource.url}?target=${target}&from=1534809600&until=1537488000&maxDataPoints=500`
)
});

125
spec/sql.jest.ts → spec/postgres.jest.ts

@ -1,5 +1,5 @@
import { SqlConnector } from '../src/connectors/sql';
import { DatasourceType, DatasourceQuery } from '../src/connectors';
import { PostgresMetric } from '../src/metrics/postgres_metric';
import { MetricQuery } from '../src/metrics/metric';
import 'jest';
import * as _ from 'lodash';
@ -11,8 +11,8 @@ describe('Test query creation', function() {
let offset = 0;
let from = 1542983750857;
let to = 1542984313292;
let connector = getConnectorForSqlQuery();
let mQuery: DatasourceQuery = connector.getQuery(from, to, limit, offset);
let postgres = getMetricForSqlQuery();
let mQuery: MetricQuery = postgres.getQuery(from, to, limit, offset);
it('test that payload placed to data field', function() {
expect('data' in mQuery.schema).toBeTruthy();
@ -31,55 +31,37 @@ describe('Test query creation', function() {
});
describe('Test result parsing', function() {
let connector = getConnectorForSqlQuery();
let postgres = getMetricForSqlQuery();
let timestamps = [1542983800000, 1542983800060, 1542983800120]
let response = {
data: {
results: {
A: {
frames: [
refId: 'A',
meta: {
rowCount:0,
sql: 'SELECT "time" AS "time", val FROM local ORDER BY 1'
},
series: [
{
schema: {
refId: 'A',
meta: {
'executedQueryString': 'SELECT\n \"time\" AS \"time\",\n eur\nFROM rate_test\nWHERE\n \"time\" >= 1669648679 AND \"time\" <= 1672240679\nORDER BY 1'
},
fields: [
{
name: 'Time',
type: 'time',
typeInfo: {
frame: 'time.Time',
nullable: true
}
},
{
name: 'eur',
type: 'number',
typeInfo: {
frame: 'float64',
nullable: true
}
}
]
},
data: {
values: [
[ timestamps[0], timestamps[1], timestamps[2] ],
[ 1.53, 1.17, 1.17 ],
]
}
name:"val",
points: [
[622, timestamps[0]],
[844, timestamps[1]],
[648, timestamps[2]]
]
}
]
],
tables: 'null'
}
}
}
}
let result = connector.parseResponse(response);
let result = postgres.getResults(response);
it('check results columns order', function() {
let timestampColumnNumber = result.columns.indexOf('Time');
let timestampColumnNumber = result.columns.indexOf('timestamp');
expect(result.values.map(v => v[timestampColumnNumber])).toEqual(timestamps);
});
});
@ -162,63 +144,6 @@ describe('Test sql processing', function() {
check(original, expected);
});
it('sql with $__timeGroup aggregation', function () {
const original = `SELECT
$__timeGroup("time", $__interval, NULL),
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1`;
const expected = `SELECT
"time",
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1 LIMIT ${limit} OFFSET ${offset}`;
check(original, expected);
});
it('sql with $__timeGroupAlias aggregation', function () {
const original = `SELECT
$__timeGroupAlias("time", $__interval),
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1`;
const expected = `SELECT
"time",
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter("time")
GROUP BY 1
ORDER BY 1 LIMIT ${limit} OFFSET ${offset}`;
check(original, expected);
});
it('sql with $__timeGroupAlias aggregation and linebreaks', function () {
const original = `SELECT
$__timeGroupAlias(
any_field,
$__interval
),
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter(any_field)
GROUP BY 1
ORDER BY 1`;
const expected = `SELECT
any_field,
avg("metric") AS "Réseau"
FROM metric_values
WHERE $__timeFilter(any_field)
GROUP BY 1
ORDER BY 1 LIMIT ${limit} OFFSET ${offset}`;
check(original, expected);
});
it('complex sql with one select', function() {
let original = `SELECT
statistics.created_at as time,
@ -291,11 +216,11 @@ describe('Test sql processing', function() {
});
function checkExpectation(original: string, expected: string, from: number, to: number, limit: number, offset: number) {
let metric = getConnectorForSqlQuery(original);
let metric = getMetricForSqlQuery(original);
expect(metric.getQuery(from, to, limit, offset).schema.data.queries[0].rawSql).toBe(expected);
}
function getConnectorForSqlQuery(query: string = ''): SqlConnector {
function getMetricForSqlQuery(query: string = ''): PostgresMetric {
const queryPayload = {
from: 1542983750857,
to: 1542984313292
@ -303,7 +228,7 @@ function getConnectorForSqlQuery(query: string = ''): SqlConnector {
const datasource = {
url: 'api/tsdb/query',
type: DatasourceType.POSTGRES,
type: 'postgres',
data: queryPayload
};
@ -316,5 +241,5 @@ function getConnectorForSqlQuery(query: string = ''): SqlConnector {
format: 'time_series'
}];
return new SqlConnector(datasource, targets);
return new PostgresMetric(datasource, targets);
}

20
spec/prometheus.jest.ts

@ -1,20 +1,15 @@
import { PrometheusConnector } from '../src/connectors/prometheus';
import { DatasourceType } from '../src/connectors';
import { PrometheusMetric } from '../src/metrics/prometheus_metric';
import 'jest';
describe('Test Prometheus time range processing', function() {
let datasource = {
type: DatasourceType.PROMETHEUS,
url: 'api/datasources/proxy/4/api/v1/query_range?query=node_disk_io_time_ms&start=1543411320&end=1543432950&step=30',
auth: {
username: 'my_user',
password: 'my_password',
}
type: 'prometheus',
url: 'api/datasources/proxy/4/api/v1/query_range?query=node_disk_io_time_ms&start=1543411320&end=1543432950&step=30'
}
let targets = [];
let prometheus = new PrometheusConnector(datasource, targets);
let prometheus = new PrometheusMetric(datasource, targets);
it('check that from/to present in url', function() {
let from = 1234567891234; //milliseconds
@ -23,11 +18,4 @@ describe('Test Prometheus time range processing', function() {
expect(query.url.indexOf(`start=${Math.floor(from / 1000)}`) !== -1).toBeTruthy();
expect(query.url.indexOf(`end=${Math.floor(to / 1000)}`) !== -1).toBeTruthy();
});
it('check that username/password present in query', function() {
let query = prometheus.getQuery(0, 0, 1000, 0);
expect(query.auth?.username).toBe('my_user');
expect(query.auth?.password).toBe('my_password');
})
});

17
spec/influxdb.jest.ts → spec/targets.jest.ts

@ -1,5 +1,4 @@
import { Datasource, DatasourceType } from '../src/index';
import { InfluxdbConnector } from '../src/connectors/influxdb';
import { Datasource, Metric } from '../src/index';
import 'jest';
@ -7,7 +6,7 @@ import 'jest';
describe('Correct InfluxDB query', function() {
let datasource: Datasource = {
url: 'url',
type: DatasourceType.INFLUXDB,
type: 'influxdb',
params: {
db: 'db',
q: `SELECT mean("value") FROM "db" WHERE time > xxx AND time <= xxx LIMIT 100 OFFSET 20`,
@ -18,24 +17,24 @@ describe('Correct InfluxDB query', function() {
let target = 'mean("value")';
it("test query with two time expressions", function() {
const connector = new InfluxdbConnector(datasource, [target]);
expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
let query = new Metric(datasource, [target]);
expect(query.metricQuery.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "db" WHERE time >= 1534809600ms AND time <= 1537488000ms LIMIT 666 OFFSET 10`
)
});
it('test query with one time expression', function() {
datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time >= now() - 6h GROUP BY time(30s) fill(null)`;
const connector = new InfluxdbConnector(datasource, [target]);
expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
let query = new Metric(datasource, [target]);
expect(query.metricQuery.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10`
)
});
it('test query with time expression', function() {
datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time>= now() - 6h AND time<xxx GROUP BY time(30s) fill(null)`;
const connector = new InfluxdbConnector(datasource, [target]);
expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
let query = new Metric(datasource, [target]);
expect(query.metricQuery.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10`
)
});

2
spec/utils.jest.ts

@ -1,4 +1,4 @@
import { processSQLLimitOffset } from '../src/connectors/utils';
import { processSQLLimitOffset } from '../src/metrics/utils';
import 'jest';

31
src/connectors/connector_factory.ts

@ -1,31 +0,0 @@
import { InfluxdbConnector } from './influxdb';
import { GraphiteConnector } from './graphite';
import { DatasourceConnector, DatasourceType } from '.';
import { PrometheusConnector } from './prometheus';
import { PostgresConnector } from './postgres';
import { ElasticsearchConnector } from './elasticsearch';
import { MysqlConnector } from './mysql';
import { QueryConfig } from '../models/query_config';
export function connectorFactory(
queryConfig: QueryConfig,
): DatasourceConnector {
const classMap = {
[DatasourceType.INFLUXDB]: InfluxdbConnector,
[DatasourceType.GRAPHITE]: GraphiteConnector,
[DatasourceType.PROMETHEUS]: PrometheusConnector,
[DatasourceType.POSTGRES]: PostgresConnector,
[DatasourceType.ELASTICSEARCH]: ElasticsearchConnector,
[DatasourceType.MYSQL]: MysqlConnector,
};
const datasource = queryConfig.datasource;
const targets = queryConfig.targets;
if(classMap[datasource.type] === undefined) {
console.error(`Datasources of type ${datasource.type} are not supported currently`);
throw new Error(`Datasources of type ${datasource.type} are not supported currently`);
} else {
return new classMap[datasource.type](datasource, targets);
}
}

58
src/connectors/index.ts

@ -1,58 +0,0 @@
export enum QueryType {
DIRECT = 'direct',
GRAFANA = 'grafana',
}
export enum DatasourceType {
INFLUXDB = 'influxdb',
GRAPHITE = 'graphite',
PROMETHEUS = 'prometheus',
POSTGRES = 'postgres',
ELASTICSEARCH = 'elasticsearch',
MYSQL = 'mysql',
}
// TODO: Datasource: type -> class
export declare type Datasource = {
url: string;
type: DatasourceType;
params?: {
db: string;
q: string;
epoch: string;
};
data?: any;
datasourceId?: string;
auth?: any;
};
export type DatasourceQuery = {
url: string;
method: string;
schema: any;
headers?: any;
auth?: {
username: string;
password: string;
};
}
export type DataTable = {
values: (number | null)[][];
columns: string[];
}
export abstract class DatasourceConnector {
constructor(
public datasource: Datasource,
// TODO: Target type
public targets: any[],
) {}
/*
from / to - timestamp in ms
limit - max number of items in result
offset - number of items to skip from timerange start
*/
abstract getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery;
abstract parseResponse(res): DataTable;
}

5
src/connectors/mysql.ts

@ -1,5 +0,0 @@
import { SqlConnector } from './sql';
export class MysqlConnector extends SqlConnector {
}

5
src/connectors/postgres.ts

@ -1,5 +0,0 @@
import { SqlConnector } from './sql';
export class PostgresConnector extends SqlConnector {
}

134
src/grafana_service.ts

@ -0,0 +1,134 @@
import { Metric } from './metrics/metrics_factory';
import { MetricQuery, Datasource } from './metrics/metric';
import { URL } from 'url';
import axios from 'axios';
import * as _ from 'lodash';
export class DataKitError extends Error {
constructor(
message: string,
public datasourceType?: string,
public datasourceUrl?: string
) {
super(message);
}
};
export class BadRange extends DataKitError {};
export class GrafanaUnavailable extends DataKitError {};
export class DatasourceUnavailable extends DataKitError {};
const CHUNK_SIZE = 50000;
/**
* @param metric to query to Grafana
* @returns { values: [time, value][], columns: string[] }
*/
export async function queryByMetric(
metric: Metric, url: string, from: number, to: number, apiKey: string
): Promise<{ values: [number, number][], columns: string[] }> {
if(from > to) {
throw new BadRange(
`Data-kit got wrong range: from ${from} > to ${to}`,
metric.datasource.type,
url
);
}
if(from === to) {
console.warn(`Data-kit got from === to`);
}
const grafanaUrl = getGrafanaUrl(url);
let data = {
values: [],
columns: []
};
while(true) {
let query = metric.metricQuery.getQuery(from, to, CHUNK_SIZE, data.values.length);
query.url = `${grafanaUrl}/${query.url}`;
let res = await queryGrafana(query, apiKey, metric.datasource);
let chunk = metric.metricQuery.getResults(res);
let values = chunk.values;
data.values = data.values.concat(values);
data.columns = chunk.columns;
if(values.length < CHUNK_SIZE) {
// because if we get less that we could, then there is nothing more
break;
}
}
return data;
}
async function queryGrafana(query: MetricQuery, apiKey: string, datasource: Datasource) {
let headers = { Authorization: `Bearer ${apiKey}` };
if(query.headers !== undefined) {
_.merge(headers, query.headers);
}
let axiosQuery = {
headers,
url: query.url,
method: query.method,
};
_.defaults(axiosQuery, query.schema);
try {
var res = await axios(axiosQuery);
} catch (e) {
const msg = `Data kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.errno === 'ECONNREFUSED') {
throw new GrafanaUnavailable(e.message);
}
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check the API_KEY. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
return res;
}
function getGrafanaUrl(url: string) {
const parsedUrl = new URL(url);
const path = parsedUrl.pathname;
const panelUrl = path.match(/^\/*([^\/]*)\/d\//);
if(panelUrl === null) {
return url;
}
const origin = parsedUrl.origin;
const grafanaSubPath = panelUrl[1];
if(grafanaSubPath.length > 0) {
return `${origin}/${grafanaSubPath}`;
}
return origin;
}

58
src/index.ts

@ -1,55 +1,3 @@
import { DataTable } from './connectors';
import { QueryConfig } from './models/query_config';
import { BadRange } from './types';
export { QueryConfig } from './models/query_config';
export { Datasource, DatasourceType, DataTable } from './connectors'
export { DatasourceUnavailable } from './types';
export { GrafanaUnavailable } from './services/query_service/grafana';
const CHUNK_SIZE = 50000;
/**
* @param queryConfig
* @returns { values: [time, value][], columns: string[] }
*/
export async function queryByConfig(
// TODO: check how did we wanna use `url` field
queryConfig: QueryConfig, url: string, from: number, to: number,
// TODO: we need an abstract DatasourceConfig class which will differ in direct and grafana queries
apiKey?: string
): Promise<DataTable> {
if(from > to) {
throw new BadRange(
`TSDB-kit got wrong range: from ${from} > to ${to}`,
queryConfig.datasource.type,
url
);
}
if(from === to) {
console.warn(`TSDB-kit got from === to`);
}
let data: DataTable = {
values: [],
columns: []
};
while(true) {
let query = queryConfig.datasourceConnector.getQuery(from, to, CHUNK_SIZE, data.values.length);
const res = await queryConfig.queryService.query(query, apiKey);
let chunk = queryConfig.datasourceConnector.parseResponse(res);
let values = chunk.values;
data.values = data.values.concat(values);
data.columns = chunk.columns;
if(values.length < CHUNK_SIZE) {
// because if we get less that we could, then there is nothing more
break;
}
}
return data;
}
export { Metric } from './metrics/metrics_factory';
export { Datasource } from './metrics/metric'
export { queryByMetric, GrafanaUnavailable, DatasourceUnavailable } from './grafana_service';

44
src/connectors/elasticsearch.ts → src/metrics/elasticsearch_metric.ts

@ -1,12 +1,12 @@
import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { TsdbKitError } from '../types';
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric';
import { DataKitError } from '../grafana_service';
import * as _ from 'lodash';
export type RangeFilter = { range: { [key: string]: { gte: String, lte: String } } };
export type QueryStringFilter = { query_string: { analyze_wildcard: Boolean, query: String } };
export type ElasticsearchQuery = {
export type QueryConfig = {
size: number,
query: {
bool: {
@ -18,33 +18,33 @@ export type ElasticsearchQuery = {
export type Aggregation = {
date_histogram: {
interval: string,
field: string,
min_doc_count: number,
extended_bounds: { min: string, max: string },
format: string
interval: String,
field: String,
min_doc_count: Number,
extended_bounds: { min: String, max: String },
format: String
}
};
const DATE_HISTOGRAM_FIELD = 'date_histogram';
export class ElasticsearchConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets);
export class ElasticsearchMetric extends AbstractMetric {
constructor(datasource: Datasource, targets: any[], id?: MetricId) {
super(datasource, targets, id);
}
getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery {
let data = this.datasource.data.split('\n').map(d => d === '' ? d: JSON.parse(d));
if(data.length === 0) {
throw new TsdbKitError('Datasource data is empty');
throw new DataKitError('Datasource data is empty');
}
const query: ElasticsearchQuery = data[1];
const queryConfig: QueryConfig = data[1];
query.size = 0;
let timeField: string | null = null;
queryConfig.size = 0;
let timeField = null;
let aggs = _.filter(query.aggs, f => _.has(f, DATE_HISTOGRAM_FIELD));
let aggs = _.filter(queryConfig.aggs, f => _.has(f, DATE_HISTOGRAM_FIELD));
_.each(aggs, (agg: Aggregation) => {
agg[DATE_HISTOGRAM_FIELD].extended_bounds = {
min: from.toString(),
@ -63,9 +63,9 @@ export class ElasticsearchConnector extends DatasourceConnector {
throw new Error('datasource time field not found');
}
let filters = query.query.bool.filter.filter(f => _.has(f, 'range')) as RangeFilter[];
let filters = queryConfig.query.bool.filter.filter(f => _.has(f, 'range')) as RangeFilter[];
if(filters.length === 0) {
throw new TsdbKitError('Empty filters');
throw new DataKitError('Empty filters');
}
let range = filters[0].range;
range[timeField].gte = from.toString();
@ -86,7 +86,7 @@ export class ElasticsearchConnector extends DatasourceConnector {
}
}
parseResponse(res): DataTable {
getResults(res): MetricResults {
let columns = ['timestamp', 'target'];
let values = [];
@ -106,7 +106,7 @@ export class ElasticsearchConnector extends DatasourceConnector {
const bucketAggs = JSON.stringify(this.targets[0].bucketAggs);
const aggregationKeys = JSON.stringify(_.keys(aggregations));
console.error(`can't find related aggregation id. bucketAggs:${bucketAggs} aggregationKeys:${aggregationKeys}`);
throw new TsdbKitError(`can't find related aggregation id`);
throw new DataKitError(`can't find related aggregation id`);
} else {
aggrgAgg = aggrgAgg[0].id;
}
@ -114,7 +114,7 @@ export class ElasticsearchConnector extends DatasourceConnector {
let agg = this.targets[0].metrics.filter(m => !m.hide).map(m => m.id);
if(agg.length > 1) {
throw new TsdbKitError(`multiple series for metric are not supported currently: ${JSON.stringify(agg)}`);
throw new DataKitError(`multiple series for metric are not supported currently: ${JSON.stringify(agg)}`);
}
agg = agg[0];

12
src/connectors/graphite.ts → src/metrics/graphite_metric.ts

@ -1,14 +1,14 @@
import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric';
import * as _ from 'lodash';
export class GraphiteConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets);
export class GraphiteMetric extends AbstractMetric {
constructor(datasource: Datasource, targets: any[], id?: MetricId) {
super(datasource, targets, id);
}
getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery {
let fromDate = Math.floor(from / 1000);
let toDate = Math.floor(to / 1000);
@ -42,7 +42,7 @@ export class GraphiteConnector extends DatasourceConnector {
}
}
parseResponse(res): DataTable {
getResults(res): MetricResults {
if(res.data === undefined || res.data.length < 1) {
console.log('datasource return empty response, no data');

13
src/connectors/influxdb.ts → src/metrics/influxdb_metric.ts

@ -1,15 +1,16 @@
import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from "./metric";
import { processSQLLimitOffset } from './utils';
const INFLUX_QUERY_TIME_REGEX = /time ?[><=]+ ?[^A-Z]+(AND ?time ?[><=]+ ?[^A-Z]+)?/;
export class InfluxdbConnector extends DatasourceConnector {
export class InfluxdbMetric extends AbstractMetric {
private _queryParts: string[];
constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets);
constructor(datasource: Datasource, targets: any[], id?: MetricId) {
super(datasource, targets, id);
var queryStr = datasource.params.q;
this._queryParts = queryStr.split(INFLUX_QUERY_TIME_REGEX);
@ -23,7 +24,7 @@ export class InfluxdbConnector extends DatasourceConnector {
}
}
getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery {
let timeClause = `time >= ${from}ms AND time <= ${to}ms`;
let q = `${this._queryParts[0]} ${timeClause} ${this._queryParts[2]}`;
q = processSQLLimitOffset(q, limit, offset);
@ -40,7 +41,7 @@ export class InfluxdbConnector extends DatasourceConnector {
}
}
parseResponse(res): DataTable {
getResults(res): MetricResults {
let emptyResult = {
columns: ['timestamp', 'target'],
values: []

40
src/metrics/metric.ts

@ -0,0 +1,40 @@
export declare type Datasource = {
url: string;
type: string;
params?: {
db: string;
q: string;
epoch: string;
};
data?: any;
datasourceId?: string;
};
export type MetricQuery = {
url: string;
method: string;
schema: any;
headers?: any;
}
export type MetricResults = {
values: any;
columns: any;
}
export type MetricId = string;
export abstract class AbstractMetric {
constructor(
public datasource: Datasource,
public targets: any[],
public id?: MetricId
) {};
abstract getQuery(from: number, to: number, limit: number, offset: number): MetricQuery;
/*
from / to - timestamp in ms
limit - max number of items in result
offset - number of items to skip from timerange start
*/
abstract getResults(res): MetricResults;
}

78
src/metrics/metrics_factory.ts

@ -0,0 +1,78 @@
import { InfluxdbMetric } from './influxdb_metric';
import { GraphiteMetric } from './graphite_metric';
import { AbstractMetric, Datasource, MetricId } from './metric';
import { PrometheusMetric } from './prometheus_metric';
import { PostgresMetric } from './postgres_metric';
import { ElasticsearchMetric } from './elasticsearch_metric';
import { MysqlMetric } from './mysql_metric';
export function metricFactory(
datasource: Datasource,
targets: any[],
id?: MetricId
): AbstractMetric {
let classMap = {
'influxdb': InfluxdbMetric,
'graphite': GraphiteMetric,
'prometheus': PrometheusMetric,
'postgres': PostgresMetric,
'elasticsearch': ElasticsearchMetric,
'mysql': MysqlMetric,
};
if(classMap[datasource.type] === undefined) {
console.error(`Datasources of type ${datasource.type} are not supported currently`);
throw new Error(`Datasources of type ${datasource.type} are not supported currently`);
} else {
return new classMap[datasource.type](datasource, targets, id);
}
}
export class Metric {
datasource: Datasource;
targets: any[];
id?: MetricId;
private _metricQuery: AbstractMetric = undefined;
constructor(datasource: Datasource, targets: any[], id?: MetricId) {
if(datasource === undefined) {
throw new Error('datasource is undefined');
}
if(targets === undefined) {
throw new Error('targets is undefined');
}
if(targets.length === 0) {
throw new Error('targets is empty');
}
this.datasource = datasource;
this.targets = targets;
this.id = id;
}
public get metricQuery() {
if(this._metricQuery === undefined) {
this._metricQuery = metricFactory(this.datasource, this.targets, this.id);
}
return this._metricQuery;
}
public toObject() {
return {
datasource: this.datasource,
targets: this.targets,
_id: this.id
};
}
static fromObject(obj: any): Metric {
if(obj === undefined) {
throw new Error('obj is undefined');
}
return new Metric(
obj.datasource,
obj.targets,
obj._id
);
}
}

5
src/metrics/mysql_metric.ts

@ -0,0 +1,5 @@
import { SqlMetric } from './sql_metric';
export class MysqlMetric extends SqlMetric {
}

5
src/metrics/postgres_metric.ts

@ -0,0 +1,5 @@
import { SqlMetric } from './sql_metric';
export class PostgresMetric extends SqlMetric {
}

28
src/connectors/prometheus.ts → src/metrics/prometheus_metric.ts

@ -1,15 +1,15 @@
import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric';
const QUERY_TIME_REGEX = /\&start=[^\&]*\&end=[^\&]*\&/;
export class PrometheusConnector extends DatasourceConnector {
export class PrometheusMetric extends AbstractMetric {
constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets);
constructor(datasource: Datasource, targets: any[], id?: MetricId) {
super(datasource, targets, id);
}
getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery {
let url = this.datasource.url;
from = Math.floor(from / 1000); //prometheus uses seconds for timestamp
to = Math.floor(to / 1000);
@ -21,12 +21,11 @@ export class PrometheusConnector extends DatasourceConnector {
method: 'GET',
schema: {
params: this.datasource.params
},
auth: this.datasource.auth,
}
}
}
parseResponse(res): DataTable {
getResults(res): MetricResults {
if(res.data === undefined || res.data.data.result.length < 1) {
console.log('datasource return empty response, no data');
@ -37,13 +36,13 @@ export class PrometheusConnector extends DatasourceConnector {
}
let result = res.data.data.result;
let result_matrix: DataTable = {
let result_matrix = {
columns: ['timestamp'],
values: []
};
result.map(r => {
let keys: string[] = [];
let keys = [];
for(let key in r.metric) {
keys.push(`${key}=${r.metric[key]}`);
}
@ -52,8 +51,8 @@ export class PrometheusConnector extends DatasourceConnector {
let values = result.map(r => r.values);
let timestamps: (number | null)[] = [];
values.forEach(v => v.forEach((row: number[]) => timestamps.push(row[0])));
let timestamps = [];
values.map(v => v.map(row => timestamps.push(row[0])));
timestamps = timestamps.filter(function(item, i, ar) {
return ar.indexOf(item) === i; //uniq values
});
@ -71,11 +70,12 @@ export class PrometheusConnector extends DatasourceConnector {
if(currentTimestamp === t) {
row.push(+currentValue);
v.shift();
} else {
}
else {
row.push(null);
}
});
row[0] = +(row[0] as number) * 1000; //convert timestamp to ms
row[0] = +row[0] * 1000; //convert timestamp to ms
result_matrix.values.push(row);
};
return result_matrix;

31
src/connectors/sql.ts → src/metrics/sql_metric.ts

@ -1,17 +1,15 @@
import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
import { AbstractMetric, Datasource, MetricId, MetricQuery, MetricResults } from './metric';
import { processSQLLimitOffset } from './utils';
import * as _ from 'lodash';
// as of 26.09.2020, it works for all SQL datasources
export class SqlConnector extends DatasourceConnector {
// for 26.09.2020 it works for all SQL datasources
export class SqlMetric extends AbstractMetric {
private _targetName: string; //save first target name, while multi metric not implemented
private url: string;
private url: string = 'api/tsdb/query';
constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets);
this.url = datasource.url;
constructor(datasource: Datasource, targets: any[], id?: MetricId) {
super(datasource, targets, id);
if(targets.length === 0) {
throw Error('got empty targets list');
@ -19,7 +17,7 @@ export class SqlConnector extends DatasourceConnector {
this._targetName = targets[0].refId;
}
getQuery(from: number, to: number, limit: number, offset: number): DatasourceQuery {
getQuery(from: number, to: number, limit: number, offset: number): MetricQuery {
let queries = this.targets;
_.forEach(queries, q => {
@ -36,13 +34,13 @@ export class SqlConnector extends DatasourceConnector {
data: {
from: String(from),
to: String(to),
queries: queries,
queries: queries
}
}
};
}
parseResponse(res): DataTable {
getResults(res): MetricResults {
let emptyResult = {
columns: ['timestamp', 'target'],
values: []
@ -55,15 +53,16 @@ export class SqlConnector extends DatasourceConnector {
// TODO: support more than 1 metric (each res.data.results item is a metric)
let results = res.data.results[this._targetName];
if (_.isEmpty(results.frames)) {
if (!results.series) {
return emptyResult;
}
const frame = results.frames[0];
let points = results.series[0].points;
points.forEach(p => p.reverse());
return {
columns: frame.schema.fields.map(field => field.name),
// @ts-ignore
values: _.zip(...frame.data.values),
columns: ['timestamp', results.series[0].name],
values: points
};
}
}

8
src/connectors/utils.ts → src/metrics/utils.ts

@ -8,12 +8,6 @@ export function processSQLLimitOffset(sql: string, limit: number, offset: number
}
sql = splits[0]; // removes ";" from EOL
const reAggregation = /\$__timeGroup(?:Alias)?\(\s*([^,]+)\s*,\s*\$__interval[^\)]*\)/igm;
const occurence = reAggregation.exec(sql);
if(occurence) {
sql = sql.replace(reAggregation, occurence[1]);
}
let relim = /limit [0-9]+/ig;
let reoff = /offset [0-9]+/ig;
@ -38,7 +32,7 @@ export function processSQLLimitOffset(sql: string, limit: number, offset: number
}
function ensureParentheses(regex: RegExp, str: string): { index: number, length: number } {
let occurence: RegExpExecArray | null;
let occurence: RegExpExecArray;
while((occurence = regex.exec(str)) !== null) {
let leftPart = str.slice(0, occurence.index)
let rightPart = str.slice(occurence.index + occurence[0].length);

62
src/models/query_config.ts

@ -1,62 +0,0 @@
import { Datasource, DatasourceConnector, QueryType } from '../connectors';
import { connectorFactory } from '../connectors/connector_factory';
import { QueryService } from '../services/query_service/base';
import { queryServiceFactory } from '../services/query_service/query_service_factory';
export class QueryConfig {
queryType: QueryType;
datasource: Datasource;
// TODO: Target type (depends on datasource type)
targets: any[];
private _datasourceConnector?: DatasourceConnector;
private _queryService?: QueryService;
constructor(queryType: QueryType, datasource: Datasource, targets: any[]) {
if(queryType === undefined) {
throw new Error('queryType is undefined');
}
if(datasource === undefined) {
throw new Error('datasource is undefined');
}
if(targets === undefined) {
throw new Error('targets is undefined');
}
this.queryType = queryType;
this.datasource = datasource;
this.targets = targets;
}
get datasourceConnector(): DatasourceConnector {
if(this._datasourceConnector === undefined) {
this._datasourceConnector = connectorFactory(this);
}
return this._datasourceConnector;
}
get queryService(): QueryService {
if(this._queryService === undefined) {
this._queryService = queryServiceFactory(this);
}
return this._queryService;
}
public toObject() {
return {
queryType: this.queryType,
datasource: this.datasource,
targets: this.targets,
};
}
static fromObject(obj: any): QueryConfig {
if(obj === undefined) {
throw new Error('obj is undefined');
}
return new QueryConfig(
obj.queryType,
obj.datasource,
obj.targets,
);
}
}

11
src/services/query_service/base.ts

@ -1,11 +0,0 @@
import { Datasource, DatasourceQuery } from '../../connectors';
import { AxiosResponse } from 'axios';
export abstract class QueryService {
constructor(protected _datasource: Datasource) { }
// TODO: we don't need `apiKey` here, we need some abstract auth config for both Direct and Grafana queries
abstract query(query: DatasourceQuery, apiKey?: string): Promise<AxiosResponse<any>>;
}

52
src/services/query_service/direct.ts

@ -1,52 +0,0 @@
import { QueryService } from './base';
import { DatasourceUnavailable } from '../../types';
import { Datasource, DatasourceQuery } from '../../connectors';
import axios, { AxiosResponse } from 'axios';
import * as _ from 'lodash';
export class DirectQueryService extends QueryService {
constructor(datasource: Datasource) {
super(datasource);
}
async query(query: DatasourceQuery): Promise<AxiosResponse<any>> {
// TODO: support auth
let axiosQuery = {
...query,
};
_.defaults(axiosQuery, query.schema);
try {
return axios(axiosQuery);
} catch(e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check credentials. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
this._datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}
}

64
src/services/query_service/grafana.ts

@ -1,64 +0,0 @@
import { QueryService } from './base';
import { Datasource, DatasourceQuery } from '../../connectors';
import { TsdbKitError, DatasourceUnavailable } from '../../types';
import axios, { AxiosResponse } from 'axios';
import * as _ from 'lodash';
export class GrafanaUnavailable extends TsdbKitError { };
export class GrafanaQueryService extends QueryService {
constructor(datasource: Datasource) {
super(datasource);
}
async query(query: DatasourceQuery, apiKey: string): Promise<AxiosResponse<any>> {
let headers = { Authorization: `Bearer ${apiKey}` };
if(query.headers !== undefined) {
_.merge(headers, query.headers);
}
let axiosQuery = {
headers,
url: query.url,
method: query.method,
};
_.defaults(axiosQuery, query.schema);
try {
const resp = await axios(axiosQuery);
return resp;
} catch (e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.errno === 'ECONNREFUSED') {
throw new GrafanaUnavailable(e.message);
}
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check the API_KEY. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
this._datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}
}

23
src/services/query_service/query_service_factory.ts

@ -1,23 +0,0 @@
import { QueryService } from './base';
import { DirectQueryService } from './direct';
import { GrafanaQueryService } from './grafana';
import { QueryType } from '../../connectors';
import { QueryConfig } from '../../models/query_config';
export function queryServiceFactory(
queryConfig: QueryConfig,
): QueryService {
const classMap = {
[QueryType.DIRECT]: DirectQueryService,
[QueryType.GRAFANA]: GrafanaQueryService,
};
const queryType = queryConfig.queryType;
const datasource = queryConfig.datasource;
if(classMap[queryType] === undefined) {
console.error(`Queries of type ${queryType} are not supported currently`);
throw new Error(`Queries of type ${queryType} are not supported currently`);
} else {
return new classMap[queryType](datasource);
}
}

50
src/tsdb-kit/index.ts

@ -1,49 +1 @@
import { queryByConfig, QueryConfig } from '..';
import { DatasourceType, QueryType } from '../connectors';
const { version } = require('../../package.json')
import { ArgumentParser } from 'argparse';
import * as _ from 'lodash';
const parser = new ArgumentParser();
parser.add_argument('-v', '--version', { action: 'version', version });
parser.add_argument('-U', '--url', { help: 'Datasource URL', required: true });
parser.add_argument('-q', '--query', { help: 'Query Template', required: true });
parser.add_argument('-f', '--from', { help: 'From timestamp (ms), e.g. 1660670020000. If not specified, `now-5m` is used' });
parser.add_argument('-t', '--to', { help: 'To timestamp (ms), e.g. 1660670026000. If not specified, `now` is used' });
parser.add_argument('-u', '--username', { help: 'Basic Auth Username' });
parser.add_argument('-p', '--password', { help: 'Basic Auth Password' });
const args = parser.parse_args();
const timeNowInMs = new Date().getTime();
const PROMETHEUS_URL = args.url;
const QUERY = args.query;
const FROM = args.from || timeNowInMs - 5 * 60 * 1000;
const TO = args.to || timeNowInMs;
const USERNAME = args.username;
const PASSWORD = args.password;
let auth;
if(USERNAME && PASSWORD) {
auth = { username: USERNAME, password: PASSWORD };
}
const datasource = {
type: DatasourceType.PROMETHEUS,
// TODO: remove PROMETHEUS_URL from here
url: `${PROMETHEUS_URL}/api/v1/query_range?query=${QUERY}&start=1543411320&end=1543432950&step=30`,
auth,
};
const targets = [];
const queryConfig = new QueryConfig(QueryType.DIRECT, datasource, targets);
queryByConfig(queryConfig, PROMETHEUS_URL, FROM, TO)
.then(res => {
console.log(res);
})
.catch(err => {
console.error('Query error: ', err);
});
console.log('Hello world');

15
src/types.ts

@ -1,15 +0,0 @@
import { DatasourceType } from './connectors';
export class TsdbKitError extends Error {
constructor(
message: string,
public datasourceType?: DatasourceType,
public datasourceUrl?: string
) {
super(message);
}
};
export class BadRange extends TsdbKitError {};
export class DatasourceUnavailable extends TsdbKitError {};

33
webpack.config.js

@ -1,33 +0,0 @@
const webpack = require('webpack');
const path = require('path');
module.exports = {
mode: 'development',
target: 'node',
devtool: 'inline-source-map',
entry: {
main: './src/tsdb-kit/index.ts',
},
output: {
path: path.resolve(__dirname, './bin'),
filename: 'tsdb-kit.js'
},
plugins: [
new webpack.BannerPlugin({ banner: "#!/usr/bin/env node", raw: true }),
],
resolve: {
extensions: ['.ts', '.js'],
},
module: {
rules: [
{
test: /.ts$/,
loader: 'ts-loader',
options: {
configFile: 'bin.tsconfig.json'
}
}
]
}
};

656
yarn.lock

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save