Browse Source

QueryService refactoring:

- new abstract QueryService class which is a parent for DirectQueryService and GrafanaQueryService
- move `queryType` param from `queryByConfig` to QueryConfig
- rename `<datasource_name>Metric`s to `<datasource_name>Connector`s
- rename some tests
pull/6/head
rozetko 2 years ago
parent
commit
fb3b597b3f
  1. 8
      spec/elasticsearch.jest.ts
  2. 7
      spec/graphite.jest.ts
  3. 15
      spec/influxdb.jest.ts
  4. 7
      spec/prometheus.jest.ts
  5. 16
      spec/sql.jest.ts
  6. 24
      src/connectors/connector_factory.ts
  7. 2
      src/connectors/elasticsearch.ts
  8. 2
      src/connectors/graphite.ts
  9. 2
      src/connectors/index.ts
  10. 2
      src/connectors/influxdb.ts
  11. 4
      src/connectors/mysql.ts
  12. 4
      src/connectors/postgres.ts
  13. 2
      src/connectors/prometheus.ts
  14. 2
      src/connectors/sql.ts
  15. 25
      src/index.ts
  16. 21
      src/models/query_config.ts
  17. 48
      src/services/direct_service.ts
  18. 77
      src/services/grafana_service.ts
  19. 11
      src/services/query_service/base.ts
  20. 53
      src/services/query_service/direct.ts
  21. 83
      src/services/query_service/grafana.ts
  22. 23
      src/services/query_service/query_service_factory.ts
  23. 9
      src/tsdb-kit/index.ts

8
spec/elasticsearch.jest.ts

@ -1,4 +1,4 @@
import { ElasticsearchMetric } from '../src/connectors/elasticsearch'; import { ElasticsearchConnector } from '../src/connectors/elasticsearch';
import { Datasource, DatasourceType } from '../src/connectors'; import { Datasource, DatasourceType } from '../src/connectors';
import 'jest'; import 'jest';
@ -163,7 +163,7 @@ describe('simple query', function(){
} }
}]; }];
let elasticMetric = new ElasticsearchMetric(datasource, targets); let connector = new ElasticsearchConnector(datasource, targets);
it('check correct time processing', function() { it('check correct time processing', function() {
const expectedQueryTemplate = _.cloneDeep(queryTemplate); const expectedQueryTemplate = _.cloneDeep(queryTemplate);
@ -192,7 +192,7 @@ describe('simple query', function(){
} }
}; };
let result = elasticMetric.getQuery(from, to, limit, offset); let result = connector.getQuery(from, to, limit, offset);
expect(result).toEqual(expectedQuery); expect(result).toEqual(expectedQuery);
}); });
@ -265,6 +265,6 @@ describe('simple query', function(){
] ]
}; };
expect(elasticMetric.parseResponse(result)).toEqual(expectedResult); expect(connector.parseResponse(result)).toEqual(expectedResult);
}); });
}); });

7
spec/graphite.jest.ts

@ -1,4 +1,5 @@
import { Datasource, QueryConfig, DatasourceType } from '../src/index'; import { Datasource, DatasourceType } from '../src/index';
import { GraphiteConnector } from '../src/connectors/graphite';
import 'jest'; import 'jest';
@ -16,10 +17,10 @@ describe('correct Graphite query', function() {
}; };
let target = `target=template(hosts.$hostname.cpu, hostname="worker1")`; let target = `target=template(hosts.$hostname.cpu, hostname="worker1")`;
let queryConfig = new QueryConfig(datasource, [target]); let connector = new GraphiteConnector(datasource, [target]);
it("test simple query with time clause", function () { it("test simple query with time clause", function () {
expect(queryConfig.datasourceConnector.getQuery(1534809600000, 1537488000000, 500, 0).url).toBe( expect(connector.getQuery(1534809600000, 1537488000000, 500, 0).url).toBe(
`${datasource.url}?target=${target}&from=1534809600&until=1537488000&maxDataPoints=500` `${datasource.url}?target=${target}&from=1534809600&until=1537488000&maxDataPoints=500`
) )
}); });

15
spec/targets.jest.ts → spec/influxdb.jest.ts

@ -1,4 +1,5 @@
import { Datasource, DatasourceType, QueryConfig } from '../src/index'; import { Datasource, DatasourceType } from '../src/index';
import { InfluxdbConnector } from '../src/connectors/influxdb';
import 'jest'; import 'jest';
@ -17,24 +18,24 @@ describe('Correct InfluxDB query', function() {
let target = 'mean("value")'; let target = 'mean("value")';
it("test query with two time expressions", function() { it("test query with two time expressions", function() {
const queryConfig = new QueryConfig(datasource, [target]); const connector = new InfluxdbConnector(datasource, [target]);
expect(queryConfig.datasourceConnector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe( expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "db" WHERE time >= 1534809600ms AND time <= 1537488000ms LIMIT 666 OFFSET 10` `SELECT mean("value") FROM "db" WHERE time >= 1534809600ms AND time <= 1537488000ms LIMIT 666 OFFSET 10`
) )
}); });
it('test query with one time expression', function() { it('test query with one time expression', function() {
datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time >= now() - 6h GROUP BY time(30s) fill(null)`; datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time >= now() - 6h GROUP BY time(30s) fill(null)`;
const queryConfig = new QueryConfig(datasource, [target]); const connector = new InfluxdbConnector(datasource, [target]);
expect(queryConfig.datasourceConnector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe( expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10` `SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10`
) )
}); });
it('test query with time expression', function() { it('test query with time expression', function() {
datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time>= now() - 6h AND time<xxx GROUP BY time(30s) fill(null)`; datasource.params.q = `SELECT mean("value") FROM "cpu_value" WHERE time>= now() - 6h AND time<xxx GROUP BY time(30s) fill(null)`;
const queryConfig = new QueryConfig(datasource, [target]); const connector = new InfluxdbConnector(datasource, [target]);
expect(queryConfig.datasourceConnector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe( expect(connector.getQuery(1534809600,1537488000,666,10).schema.params.q).toBe(
`SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10` `SELECT mean("value") FROM "cpu_value" WHERE time >= 1534809600ms AND time <= 1537488000ms GROUP BY time(30s) fill(null) LIMIT 666 OFFSET 10`
) )
}); });

7
spec/prometheus.jest.ts

@ -1,7 +1,8 @@
import 'jest'; import { PrometheusConnector } from '../src/connectors/prometheus';
import { PrometheusMetric } from '../src/connectors/prometheus';
import { DatasourceType } from '../src/connectors'; import { DatasourceType } from '../src/connectors';
import 'jest';
describe('Test Prometheus time range processing', function() { describe('Test Prometheus time range processing', function() {
let datasource = { let datasource = {
@ -9,7 +10,7 @@ describe('Test Prometheus time range processing', function() {
url: 'api/datasources/proxy/4/api/v1/query_range?query=node_disk_io_time_ms&start=1543411320&end=1543432950&step=30' url: 'api/datasources/proxy/4/api/v1/query_range?query=node_disk_io_time_ms&start=1543411320&end=1543432950&step=30'
} }
let targets = []; let targets = [];
let prometheus = new PrometheusMetric(datasource, targets); let prometheus = new PrometheusConnector(datasource, targets);
it('check that from/to present in url', function() { it('check that from/to present in url', function() {
let from = 1234567891234; //milliseconds let from = 1234567891234; //milliseconds

16
spec/postgres.jest.ts → spec/sql.jest.ts

@ -1,4 +1,4 @@
import { PostgresMetric } from '../src/connectors/postgres'; import { SqlConnector } from '../src/connectors/sql';
import { DatasourceType, DatasourceQuery } from '../src/connectors'; import { DatasourceType, DatasourceQuery } from '../src/connectors';
import 'jest'; import 'jest';
@ -11,8 +11,8 @@ describe('Test query creation', function() {
let offset = 0; let offset = 0;
let from = 1542983750857; let from = 1542983750857;
let to = 1542984313292; let to = 1542984313292;
let postgres = getMetricForSqlQuery(); let connector = getConnectorForSqlQuery();
let mQuery: DatasourceQuery = postgres.getQuery(from, to, limit, offset); let mQuery: DatasourceQuery = connector.getQuery(from, to, limit, offset);
it('test that payload placed to data field', function() { it('test that payload placed to data field', function() {
expect('data' in mQuery.schema).toBeTruthy(); expect('data' in mQuery.schema).toBeTruthy();
@ -31,7 +31,7 @@ describe('Test query creation', function() {
}); });
describe('Test result parsing', function() { describe('Test result parsing', function() {
let postgres = getMetricForSqlQuery(); let connector = getConnectorForSqlQuery();
let timestamps = [1542983800000, 1542983800060, 1542983800120] let timestamps = [1542983800000, 1542983800060, 1542983800120]
let response = { let response = {
data: { data: {
@ -58,7 +58,7 @@ describe('Test result parsing', function() {
} }
} }
let result = postgres.parseResponse(response); let result = connector.parseResponse(response);
it('check results columns order', function() { it('check results columns order', function() {
let timestampColumnNumber = result.columns.indexOf('timestamp'); let timestampColumnNumber = result.columns.indexOf('timestamp');
@ -216,11 +216,11 @@ describe('Test sql processing', function() {
}); });
function checkExpectation(original: string, expected: string, from: number, to: number, limit: number, offset: number) { function checkExpectation(original: string, expected: string, from: number, to: number, limit: number, offset: number) {
let metric = getMetricForSqlQuery(original); let metric = getConnectorForSqlQuery(original);
expect(metric.getQuery(from, to, limit, offset).schema.data.queries[0].rawSql).toBe(expected); expect(metric.getQuery(from, to, limit, offset).schema.data.queries[0].rawSql).toBe(expected);
} }
function getMetricForSqlQuery(query: string = ''): PostgresMetric { function getConnectorForSqlQuery(query: string = ''): SqlConnector {
const queryPayload = { const queryPayload = {
from: 1542983750857, from: 1542983750857,
to: 1542984313292 to: 1542984313292
@ -241,5 +241,5 @@ function getMetricForSqlQuery(query: string = ''): PostgresMetric {
format: 'time_series' format: 'time_series'
}]; }];
return new PostgresMetric(datasource, targets); return new SqlConnector(datasource, targets);
} }

24
src/connectors/connector_factory.ts

@ -1,10 +1,10 @@
import { InfluxdbMetric } from './influxdb'; import { InfluxdbConnector } from './influxdb';
import { GraphiteMetric } from './graphite'; import { GraphiteConnector } from './graphite';
import { DatasourceConnector, DatasourceType } from '.'; import { DatasourceConnector, DatasourceType } from '.';
import { PrometheusMetric } from './prometheus'; import { PrometheusConnector } from './prometheus';
import { PostgresMetric } from './postgres'; import { PostgresConnector } from './postgres';
import { ElasticsearchMetric } from './elasticsearch'; import { ElasticsearchConnector } from './elasticsearch';
import { MysqlMetric } from './mysql'; import { MysqlConnector } from './mysql';
import { QueryConfig } from '../models/query_config'; import { QueryConfig } from '../models/query_config';
@ -13,12 +13,12 @@ export function connectorFactory(
queryConfig: QueryConfig, queryConfig: QueryConfig,
): DatasourceConnector { ): DatasourceConnector {
const classMap = { const classMap = {
[DatasourceType.INFLUXDB]: InfluxdbMetric, [DatasourceType.INFLUXDB]: InfluxdbConnector,
[DatasourceType.GRAPHITE]: GraphiteMetric, [DatasourceType.GRAPHITE]: GraphiteConnector,
[DatasourceType.PROMETHEUS]: PrometheusMetric, [DatasourceType.PROMETHEUS]: PrometheusConnector,
[DatasourceType.POSTGRES]: PostgresMetric, [DatasourceType.POSTGRES]: PostgresConnector,
[DatasourceType.ELASTICSEARCH]: ElasticsearchMetric, [DatasourceType.ELASTICSEARCH]: ElasticsearchConnector,
[DatasourceType.MYSQL]: MysqlMetric, [DatasourceType.MYSQL]: MysqlConnector,
}; };
const datasource = queryConfig.datasource; const datasource = queryConfig.datasource;
const targets = queryConfig.targets; const targets = queryConfig.targets;

2
src/connectors/elasticsearch.ts

@ -28,7 +28,7 @@ export type Aggregation = {
const DATE_HISTOGRAM_FIELD = 'date_histogram'; const DATE_HISTOGRAM_FIELD = 'date_histogram';
export class ElasticsearchMetric extends DatasourceConnector { export class ElasticsearchConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[]) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets); super(datasource, targets);
} }

2
src/connectors/graphite.ts

@ -3,7 +3,7 @@ import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.'
import * as _ from 'lodash'; import * as _ from 'lodash';
export class GraphiteMetric extends DatasourceConnector { export class GraphiteConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[]) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets); super(datasource, targets);
} }

2
src/connectors/index.ts

@ -41,7 +41,7 @@ export abstract class DatasourceConnector {
public datasource: Datasource, public datasource: Datasource,
// TODO: Target type // TODO: Target type
public targets: any[], public targets: any[],
) {}; ) {}
/* /*
from / to - timestamp in ms from / to - timestamp in ms
limit - max number of items in result limit - max number of items in result

2
src/connectors/influxdb.ts

@ -4,7 +4,7 @@ import { processSQLLimitOffset } from './utils';
const INFLUX_QUERY_TIME_REGEX = /time ?[><=]+ ?[^A-Z]+(AND ?time ?[><=]+ ?[^A-Z]+)?/; const INFLUX_QUERY_TIME_REGEX = /time ?[><=]+ ?[^A-Z]+(AND ?time ?[><=]+ ?[^A-Z]+)?/;
export class InfluxdbMetric extends DatasourceConnector { export class InfluxdbConnector extends DatasourceConnector {
private _queryParts: string[]; private _queryParts: string[];

4
src/connectors/mysql.ts

@ -1,5 +1,5 @@
import { SqlMetric } from './sql'; import { SqlConnector } from './sql';
export class MysqlMetric extends SqlMetric { export class MysqlConnector extends SqlConnector {
} }

4
src/connectors/postgres.ts

@ -1,5 +1,5 @@
import { SqlMetric } from './sql'; import { SqlConnector } from './sql';
export class PostgresMetric extends SqlMetric { export class PostgresConnector extends SqlConnector {
} }

2
src/connectors/prometheus.ts

@ -3,7 +3,7 @@ import { DatasourceConnector, Datasource, DatasourceQuery, DataTable } from '.';
const QUERY_TIME_REGEX = /\&start=[^\&]*\&end=[^\&]*\&/; const QUERY_TIME_REGEX = /\&start=[^\&]*\&end=[^\&]*\&/;
export class PrometheusMetric extends DatasourceConnector { export class PrometheusConnector extends DatasourceConnector {
constructor(datasource: Datasource, targets: any[]) { constructor(datasource: Datasource, targets: any[]) {
super(datasource, targets); super(datasource, targets);

2
src/connectors/sql.ts

@ -4,7 +4,7 @@ import { processSQLLimitOffset } from './utils';
import * as _ from 'lodash'; import * as _ from 'lodash';
// as of 26.09.2020, it works for all SQL datasources // as of 26.09.2020, it works for all SQL datasources
export class SqlMetric extends DatasourceConnector { export class SqlConnector extends DatasourceConnector {
private _targetName: string; //save first target name, while multi metric not implemented private _targetName: string; //save first target name, while multi metric not implemented
private url: string = 'api/tsdb/query'; private url: string = 'api/tsdb/query';

25
src/index.ts

@ -1,13 +1,11 @@
import { DataTable, QueryType } from './connectors'; import { DataTable } from './connectors';
import { QueryConfig } from './models/query_config'; import { QueryConfig } from './models/query_config';
import { queryDirect } from './services/direct_service';
import { queryGrafana } from './services/grafana_service';
import { BadRange } from './types'; import { BadRange } from './types';
export { QueryConfig } from './models/query_config'; export { QueryConfig } from './models/query_config';
export { Datasource, DatasourceType, DataTable } from './connectors' export { Datasource, DatasourceType, DataTable } from './connectors'
export { DatasourceUnavailable } from './types'; export { DatasourceUnavailable } from './types';
export { GrafanaUnavailable } from './services/grafana_service'; export { GrafanaUnavailable } from './services/query_service/grafana';
const CHUNK_SIZE = 50000; const CHUNK_SIZE = 50000;
@ -18,21 +16,21 @@ const CHUNK_SIZE = 50000;
*/ */
export async function queryByConfig( export async function queryByConfig(
// TODO: check how did we wanna use `url` field // TODO: check how did we wanna use `url` field
queryConfig: QueryConfig, url: string, from: number, to: number, queryType: QueryType, queryConfig: QueryConfig, url: string, from: number, to: number,
// TODO: we need an abstract DatasourceConfig class which will differ in direct and grafana queries // TODO: we need an abstract DatasourceConfig class which will differ in direct and grafana queries
apiKey?: string apiKey?: string
): Promise<DataTable> { ): Promise<DataTable> {
if(from > to) { if(from > to) {
throw new BadRange( throw new BadRange(
`Data-kit got wrong range: from ${from} > to ${to}`, `TSDB-kit got wrong range: from ${from} > to ${to}`,
queryConfig.datasource.type, queryConfig.datasource.type,
url url
); );
} }
if(from === to) { if(from === to) {
console.warn(`Data-kit got from === to`); console.warn(`TSDB-kit got from === to`);
} }
let data: DataTable = { let data: DataTable = {
@ -42,18 +40,7 @@ export async function queryByConfig(
while(true) { while(true) {
let query = queryConfig.datasourceConnector.getQuery(from, to, CHUNK_SIZE, data.values.length); let query = queryConfig.datasourceConnector.getQuery(from, to, CHUNK_SIZE, data.values.length);
let res: any; const res = await queryConfig.queryService.query(query, apiKey);
// TODO: use polymorphic `query` method instead
switch(queryType) {
case QueryType.GRAFANA:
res = await queryGrafana(query, apiKey as string, queryConfig.datasource);
break;
case QueryType.DIRECT:
res = await queryDirect(query, queryConfig.datasource);
break;
default:
throw new Error(`Unknown query type: ${queryType}`);
}
let chunk = queryConfig.datasourceConnector.parseResponse(res); let chunk = queryConfig.datasourceConnector.parseResponse(res);
let values = chunk.values; let values = chunk.values;
data.values = data.values.concat(values); data.values = data.values.concat(values);

21
src/models/query_config.ts

@ -1,20 +1,28 @@
import { Datasource, DatasourceConnector } from '../connectors'; import { Datasource, DatasourceConnector, QueryType } from '../connectors';
import { connectorFactory } from '../connectors/connector_factory'; import { connectorFactory } from '../connectors/connector_factory';
import { QueryService } from '../services/query_service/base';
import { queryServiceFactory } from '../services/query_service/query_service_factory';
export class QueryConfig { export class QueryConfig {
queryType: QueryType;
datasource: Datasource; datasource: Datasource;
// TODO: Target type (depends on datasource type) // TODO: Target type (depends on datasource type)
targets: any[]; targets: any[];
private _datasourceConnector?: DatasourceConnector; private _datasourceConnector?: DatasourceConnector;
private _queryService?: QueryService;
constructor(datasource: Datasource, targets: any[]) { constructor(queryType: QueryType, datasource: Datasource, targets: any[]) {
if(queryType === undefined) {
throw new Error('queryType is undefined');
}
if(datasource === undefined) { if(datasource === undefined) {
throw new Error('datasource is undefined'); throw new Error('datasource is undefined');
} }
if(targets === undefined) { if(targets === undefined) {
throw new Error('targets is undefined'); throw new Error('targets is undefined');
} }
this.queryType = queryType;
this.datasource = datasource; this.datasource = datasource;
this.targets = targets; this.targets = targets;
} }
@ -26,8 +34,16 @@ export class QueryConfig {
return this._datasourceConnector; return this._datasourceConnector;
} }
get queryService(): QueryService {
if(this._queryService === undefined) {
this._queryService = queryServiceFactory(this);
}
return this._queryService;
}
public toObject() { public toObject() {
return { return {
queryType: this.queryType,
datasource: this.datasource, datasource: this.datasource,
targets: this.targets, targets: this.targets,
}; };
@ -38,6 +54,7 @@ export class QueryConfig {
throw new Error('obj is undefined'); throw new Error('obj is undefined');
} }
return new QueryConfig( return new QueryConfig(
obj.queryType,
obj.datasource, obj.datasource,
obj.targets, obj.targets,
); );

48
src/services/direct_service.ts

@ -1,48 +0,0 @@
import { DatasourceUnavailable } from '../types';
import { Datasource, DatasourceQuery } from '../connectors';
import axios from 'axios';
import * as _ from 'lodash';
// TODO: support direct queries auth
// TODO: move to class and inherit from QueryService abstract class
export async function queryDirect(query: DatasourceQuery, datasource: Datasource) {
let axiosQuery = {
url: query.url,
method: query.method,
};
console.log(axiosQuery)
_.defaults(axiosQuery, query.schema);
try {
return axios(axiosQuery);
} catch (e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check credentials. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}

77
src/services/grafana_service.ts

@ -1,77 +0,0 @@
import { Datasource, DatasourceQuery } from '../connectors';
import { TsdbKitError, DatasourceUnavailable } from '../types';
import axios from 'axios';
import * as _ from 'lodash';
export class GrafanaUnavailable extends TsdbKitError { };
// TODO: move to class and inherit from QueryService abstract class
export async function queryGrafana(query: DatasourceQuery, apiKey: string, datasource: Datasource) {
let headers = { Authorization: `Bearer ${apiKey}` };
const grafanaUrl = getGrafanaUrl(query.url);
query.url = `${grafanaUrl}/${query.url}`;
if(query.headers !== undefined) {
_.merge(headers, query.headers);
}
let axiosQuery = {
headers,
url: query.url,
method: query.method,
};
_.defaults(axiosQuery, query.schema);
try {
return axios(axiosQuery);
} catch (e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.errno === 'ECONNREFUSED') {
throw new GrafanaUnavailable(e.message);
}
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check the API_KEY. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}
function getGrafanaUrl(url: string): string {
const parsedUrl = new URL(url);
const path = parsedUrl.pathname;
const panelUrl = path.match(/^\/*([^\/]*)\/d\//);
if(panelUrl === null) {
return url;
}
const origin = parsedUrl.origin;
const grafanaSubPath = panelUrl[1];
if(grafanaSubPath.length > 0) {
return `${origin}/${grafanaSubPath}`;
}
return origin;
}

11
src/services/query_service/base.ts

@ -0,0 +1,11 @@
import { Datasource, DatasourceQuery } from '../../connectors';
import { AxiosResponse } from 'axios';
export abstract class QueryService {
constructor(protected _datasource: Datasource) { }
// TODO: we don't need `apiKey` here, we need some abstract auth config for both Direct and Grafana queries
abstract query(query: DatasourceQuery, apiKey?: string): Promise<AxiosResponse<any>>;
}

53
src/services/query_service/direct.ts

@ -0,0 +1,53 @@
import { QueryService } from './base';
import { DatasourceUnavailable } from '../../types';
import { Datasource, DatasourceQuery } from '../../connectors';
import axios, { AxiosResponse } from 'axios';
import * as _ from 'lodash';
export class DirectQueryService extends QueryService {
constructor(datasource: Datasource) {
super(datasource);
}
async query(query: DatasourceQuery): Promise<AxiosResponse<any>> {
// TODO: support auth
let axiosQuery = {
url: query.url,
method: query.method,
};
_.defaults(axiosQuery, query.schema);
try {
return axios(axiosQuery);
} catch(e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check credentials. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
this._datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}
}

83
src/services/query_service/grafana.ts

@ -0,0 +1,83 @@
import { QueryService } from './base';
import { Datasource, DatasourceQuery } from '../../connectors';
import { TsdbKitError, DatasourceUnavailable } from '../../types';
import axios, { AxiosResponse } from 'axios';
import * as _ from 'lodash';
export class GrafanaUnavailable extends TsdbKitError { };
export class GrafanaQueryService extends QueryService {
constructor(datasource: Datasource) {
super(datasource);
}
async query(query: DatasourceQuery, apiKey: string): Promise<AxiosResponse<any>> {
let headers = { Authorization: `Bearer ${apiKey}` };
const grafanaUrl = getGrafanaUrl(query.url);
query.url = `${grafanaUrl}/${query.url}`;
if(query.headers !== undefined) {
_.merge(headers, query.headers);
}
let axiosQuery = {
headers,
url: query.url,
method: query.method,
};
_.defaults(axiosQuery, query.schema);
try {
return axios(axiosQuery);
} catch (e) {
// TODO: seems like this error handler can be used for both Grafana and Direct queries
const msg = `TSDB-kit: fail while request data: ${e.message}`;
const parsedUrl = new URL(query.url);
const queryUrl = `query url: ${JSON.stringify(parsedUrl.pathname)}`;
console.error(`${msg} ${queryUrl}`);
if(e.errno === 'ECONNREFUSED') {
throw new GrafanaUnavailable(e.message);
}
if(e.response !== undefined) {
console.error(`Response: \
status: ${e.response.status}, \
response data: ${JSON.stringify(e.response.data)}, \
headers: ${JSON.stringify(e.response.headers)}
`);
if(e.response.status === 401) {
throw new Error(`Unauthorized. Check the API_KEY. ${e.message}`);
}
if(e.response.status === 502) {
let datasourceError = new DatasourceUnavailable(
`datasource ${parsedUrl.pathname} unavailable, message: ${e.message}`,
this._datasource.type,
query.url
);
throw datasourceError;
}
}
throw new Error(msg);
}
}
}
export function getGrafanaUrl(url: string): string {
const parsedUrl = new URL(url);
const path = parsedUrl.pathname;
const panelUrl = path.match(/^\/*([^\/]*)\/d\//);
if(panelUrl === null) {
return url;
}
const origin = parsedUrl.origin;
const grafanaSubPath = panelUrl[1];
if(grafanaSubPath.length > 0) {
return `${origin}/${grafanaSubPath}`;
}
return origin;
}

23
src/services/query_service/query_service_factory.ts

@ -0,0 +1,23 @@
import { QueryService } from './base';
import { DirectQueryService } from './direct';
import { GrafanaQueryService } from './grafana';
import { QueryType } from '../../connectors';
import { QueryConfig } from '../../models/query_config';
export function queryServiceFactory(
queryConfig: QueryConfig,
): QueryService {
const classMap = {
[QueryType.DIRECT]: DirectQueryService,
[QueryType.GRAFANA]: GrafanaQueryService,
};
const queryType = queryConfig.queryType;
const datasource = queryConfig.datasource;
if(classMap[queryType] === undefined) {
console.error(`Queries of type ${queryType} are not supported currently`);
throw new Error(`Queries of type ${queryType} are not supported currently`);
} else {
return new classMap[queryType](datasource);
}
}

9
src/tsdb-kit/index.ts

@ -8,16 +8,17 @@ import * as _ from 'lodash';
// TODO: these `const`s should be CLI arguments // TODO: these `const`s should be CLI arguments
const PROMETHEUS_URL = 'http://localhost:9090'; const PROMETHEUS_URL = 'http://localhost:9090';
const QUERY = '100-(avg by (instance) (irate(node_cpu_seconds_total{job="nvicta-ai-node-exporter",mode="idle"}[5m])) * 100)'; const QUERY = '100-(avg by (instance) (irate(node_cpu_seconds_total{job="nvicta-ai-node-exporter",mode="idle"}[5m])) * 100)';
const FROM = 1660307430000; // ms const FROM = 1660670020000; // ms
const TO = 1660307437000; // ms const TO = 1660670026000; // ms
const datasource = { const datasource = {
type: DatasourceType.PROMETHEUS, type: DatasourceType.PROMETHEUS,
// TODO: remove PROMETHEUS_URL from here
url: `${PROMETHEUS_URL}/api/v1/query_range?query=${QUERY}&start=1543411320&end=1543432950&step=30` url: `${PROMETHEUS_URL}/api/v1/query_range?query=${QUERY}&start=1543411320&end=1543432950&step=30`
} }
const targets = []; const targets = [];
const queryConfig = new QueryConfig(datasource, targets); const queryConfig = new QueryConfig(QueryType.DIRECT, datasource, targets);
queryByConfig(queryConfig, PROMETHEUS_URL, FROM, TO, QueryType.DIRECT) queryByConfig(queryConfig, PROMETHEUS_URL, FROM, TO)
.then(res => { .then(res => {
console.log(res); console.log(res);
}) })

Loading…
Cancel
Save