diff --git a/package.json b/package.json index f77b977..d44c232 100644 --- a/package.json +++ b/package.json @@ -20,6 +20,7 @@ "nodemon": "^2.0.20", "ts-loader": "^9.4.2", "typescript": "^4.9.4", + "uuid": "^9.0.0", "webpack": "^5.75.0", "webpack-cli": "^5.0.1" } diff --git a/src/models/target.ts b/src/models/target.ts new file mode 100644 index 0000000..ca24bdf --- /dev/null +++ b/src/models/target.ts @@ -0,0 +1,8 @@ +import { DataSourceSettings, PanelModel } from '../types'; + +export class Target { + constructor( + public panel: PanelModel, + public datasource: DataSourceSettings, + ) {} +} diff --git a/src/routes/tasks.ts b/src/routes/tasks.ts index c8d782b..c03ef20 100644 --- a/src/routes/tasks.ts +++ b/src/routes/tasks.ts @@ -1,26 +1,24 @@ -import { Target } from '../types/target'; import { exporterFactory } from '../services/exporter.factory'; import { EXPORTED_PATH } from '../config'; -import { Task, TaskTableRowConfig } from '../types'; +import { ExportTask } from '../types'; import * as express from 'express'; +import * as _ from 'lodash'; + import * as path from 'path'; import * as fs from 'fs'; type TRequest = { body: { - from: string, - to: string, - username: string, - tasks: Task[], + task: ExportTask, url: string, }, }; async function getTasks(req, res) { - const resp: TaskTableRowConfig[] = []; + const resp: ExportTask[] = []; fs.readdir(EXPORTED_PATH, (err, items) => { if(err) { console.error(err); @@ -34,15 +32,7 @@ async function getTasks(req, res) { // TODO: read async let data = fs.readFileSync(path.join(EXPORTED_PATH, item), 'utf8'); let status = JSON.parse(data); - resp.push({ - timestamp: status.time, - username: status.username, - datasourceRef: status.datasourceRef, - rowsCount: status.exportedRows, - progress: status.progress, - status: status.status, - filename: file.name, - }); + resp.push(status); } res.status(200).send(resp); @@ -51,38 +41,36 @@ async function getTasks(req, res) { } async function addTask(req: TRequest, res) { - const body = req.body; const clientUrl = body.url; - const from = parseInt(body.from); - const to = parseInt(body.to); - const username = body.username; - const tasks = body.tasks; + if (_.isEmpty(clientUrl)) { + res.status(400).send('"url" field is required'); + } + const task = body.task; + if (_.isEmpty(task)) { + res.status(400).send('"task" field is required'); + } const datasourceUrl = `${new URL(clientUrl).origin}/api/ds/query`; - if(isNaN(from) || isNaN(to)) { + const from = +task.timeRange.from; + const to = +task.timeRange.to; + if (isNaN(from) || isNaN(to)) { res.status(400).send('Range error: please fill both "from" and "to" fields'); - } else if(from >= to) { + } else if (from >= to) { res.status(400).send('Range error: "from" should be less than "to"'); - } else { - const names = tasks.map(item => item.datasource.name).join(', '); - - const targets = tasks.map((task: Task) => new Target( - task.panel, - task.datasource, - )); - const exporter = exporterFactory.getExporter(); - exporter.export(targets, datasourceUrl, username, from, to); - res.status(200).send(`Exporting ${names} data from ${new Date(from).toLocaleString()} to ${new Date(to).toLocaleString()}`); } + + const exporter = exporterFactory.getExporter(); + exporter.export(task, datasourceUrl); + res.status(200).send(`Export process started`); } async function deleteTask(req, res) { - let filename = req.body.filename; - let csvFilePath = path.join(EXPORTED_PATH, `${filename}.csv`); - let jsonFilePath = path.join(EXPORTED_PATH, `${filename}.json`); + let taskId = req.body.taskId; + let csvFilePath = path.join(EXPORTED_PATH, `${taskId}.csv`); + let jsonFilePath = path.join(EXPORTED_PATH, `${taskId}.json`); if(fs.existsSync(csvFilePath)) { fs.unlink(csvFilePath, err => console.error(err)); diff --git a/src/services/exporter.ts b/src/services/exporter.ts index 8505ce2..b790de1 100644 --- a/src/services/exporter.ts +++ b/src/services/exporter.ts @@ -1,14 +1,15 @@ -import { Target } from '../types/target'; +import { Target } from '../models/target'; import { URL } from 'url'; import { apiKeys } from '../config'; import { promisify } from '../utils'; -import { ExportStatus } from '../types/export-status'; -import { DataSourceRef } from '../types'; +import { DashboardQuery, ExportProgress, ExportStatus, ExportTask } from '../types'; import { QueryConfig, queryByConfig } from '@corpglory/tsdb-kit'; // TODO: export QueryType directly from @corpglory/tsdb-kit import { QueryType } from '@corpglory/tsdb-kit/lib/connectors'; +import { v4 as uuidv4 } from 'uuid'; + import * as moment from 'moment'; import * as csv from 'fast-csv'; import * as fs from 'fs'; @@ -17,67 +18,32 @@ import * as _ from 'lodash'; const MS_IN_DAY = 24 * 60 * 60 * 1000; +const DEFAULT_PROGRESS = { + exportedRowsCount: 0, + progress: 0, + status: ExportStatus.EXPORTING, +}; + export class Exporter { - private exportedRows = 0; - private createdTimestamp: number; - private username: string; - private datasourceRef: DataSourceRef; - private from: number; - private to: number; - - private initCsvStream() { - const csvStream = csv.createWriteStream({ headers: true }) - .on('error', error => console.error(error)); + private _task: ExportTask; - const writableStream = fs.createWriteStream(this.getFilePath('csv')); + public async export(task: ExportTask, datasourceUrl: string) { + this._task = _.cloneDeep(task); + this._task.id = uuidv4(); + this._task.progress = _.cloneDeep(DEFAULT_PROGRESS); - csvStream.pipe(writableStream); - writableStream.on('finish', async () => { - console.log(`Everything is written to ${this.getFilename('csv')}`); - await this.updateStatus(ExportStatus.FINISHED, 1); - }) + const targets = task.queries.map((query: DashboardQuery) => new Target( + query.panel, + query.datasource, + )); - return csvStream; - } + this._validateTargets(datasourceUrl, targets); - public async updateStatus(status: string, progress: number, error?: string) { - try { - let time = moment().valueOf(); - let data = { - time, - progress, - status, - error, - username: this.username, - exportedRows: this.exportedRows, - datasourceRef: this.datasourceRef, - from: this.from, - to: this.to, - }; + await this._updateProgress(); - await promisify(fs.writeFile, this.getFilePath('json'), JSON.stringify(data), 'utf8') - } catch(err) { - console.error(err); - throw new Error('Can`t write file'); - } - } - - // TODO: rename `data` to `targets` or `queries` - public async export(data: Target[], datasourceUrl: string, username: string, from: number, to: number) { - this.username = username; - this.from = from; - this.to = to; - - this.validateTargets(datasourceUrl, data); - - const targets = data.map(target => { - console.log('target', { - ...target.datasource, - url: datasourceUrl - }); - return { - ...target, - metric: new QueryConfig( + const queryConfigs = targets.map( + target => + new QueryConfig( QueryType.GRAFANA, { ...target.datasource, @@ -85,57 +51,87 @@ export class Exporter { }, target.panel.targets ) - } - }); + ); - const datasource = data[0].datasource; - this.datasourceRef = { uid: datasource.uid, type: datasource.type }; - - await this.updateStatus(ExportStatus.EXPORTING, 0); - - const stream = this.initCsvStream(); + let from = +this._task.timeRange.from; + let to = +this._task.timeRange.to; const days = Math.ceil((to - from) / MS_IN_DAY); console.log(`Total days: ${days}`); - for(let day = 0; day < days; day++) { - to = from + MS_IN_DAY; + const stream = this._initCsvStream(); + try { + for(let day = 0; day < days; day++) { + to = from + MS_IN_DAY; - console.log(`${day} day: ${from}ms -> ${to}ms`); + console.log(`${day} day: ${from}ms -> ${to}ms`); - let columns = []; - let values = []; + let columns = []; + let values = []; - for(const [index, target] of targets.entries()) { - const host = new URL(datasourceUrl).origin; - const apiKey = apiKeys[host]; + for(const queryConfig of queryConfigs) { + const host = new URL(datasourceUrl).origin; + const apiKey = apiKeys[host]; - try { - const datasourceMetrics = await queryByConfig(target.metric, datasourceUrl, from, to, apiKey); + const datasourceMetrics = await queryByConfig(queryConfig, datasourceUrl, from, to, apiKey); columns = datasourceMetrics.columns; values = datasourceMetrics.values; - } catch (e) { - await this.updateStatus(ExportStatus.ERROR, (day + 1) / days, e.message); - break; } + + if(columns.length > 0) { + console.log('values', values); + this._writeCsv(stream, { + columns, + values, + }); + } + await this._updateProgress({ status: ExportStatus.EXPORTING, progress: (day + 1) / days }); + + from += MS_IN_DAY; } + } catch (e) { + await this._updateProgress({ status: ExportStatus.ERROR, errorMessage: e.message }); + } + stream.end(); + } + + private _initCsvStream() { + const csvStream = csv.createWriteStream({ headers: true }) + .on('error', async e => await this._updateProgress({ status: ExportStatus.ERROR, errorMessage: e.message })); - if(columns.length > 0) { - console.log('values', values); - this.writeCsv(stream, { - columns, - values, - }); + const writableStream = fs.createWriteStream(this._getFilePath('csv')); + + csvStream.pipe(writableStream); + writableStream.on('finish', async () => { + if(this._task.progress.status !== ExportStatus.ERROR) { + console.log(`Everything is written to ${this._getFilename('csv')}`); + await this._updateProgress({ status: ExportStatus.FINISHED, progress: 1 }); + } else { + console.log(`${this._getFilename('csv')} export is finished with error`); } - await this.updateStatus(ExportStatus.EXPORTING, (day + 1) / days); + }) - from += MS_IN_DAY; + return csvStream; + } + + private async _updateProgress(progress?: Partial) { + try { + let time = moment().valueOf(); + + const data = { + ...this._task, + progress: _.assign(this._task.progress, progress, { time }), + }; + + await promisify(fs.writeFile, this._getFilePath('json'), JSON.stringify(data), 'utf8'); + } catch(err) { + console.error(err); + throw new Error('Can`t write file'); } - stream.end(); } - private validateTargets(datasourceUrl: string, targets: Target[]) { + private _validateTargets(datasourceUrl: string, targets: Target[]) { if(!targets || !Array.isArray(targets)) { throw new Error('Incorrect targets format'); } @@ -152,7 +148,7 @@ export class Exporter { } } - private writeCsv(stream, series) { + private _writeCsv(stream, series) { for(let row of series.values) { const isEmpty = _.every( _.slice(row, 1), @@ -164,20 +160,17 @@ export class Exporter { csvRow[series.columns[col]] = row[col]; } stream.write(csvRow); - this.exportedRows++; + this._task.progress.exportedRowsCount++; } } } - private getFilename(extension) { - if(this.createdTimestamp === undefined) { - this.createdTimestamp = moment().valueOf(); - } - return `${this.createdTimestamp}.${this.datasourceRef.uid}.${extension}`; + private _getFilename(extension: string): string { + return `${this._task.id}.${extension}`; } - private getFilePath(extension) { - let filename = this.getFilename(extension); + private _getFilePath(extension: string): string { + let filename = this._getFilename(extension); return path.join(__dirname, `../exported/${filename}`); } } diff --git a/src/types/index.ts b/src/types.ts similarity index 81% rename from src/types/index.ts rename to src/types.ts index ca9a1b5..05aa249 100644 --- a/src/types/index.ts +++ b/src/types.ts @@ -80,19 +80,33 @@ export interface PanelModel { alert?: any; } -// TODO: rename to query -export type Task = Omit & { - selected: boolean; - panel: PanelModel; - datasource: DataSourceSettings; +export enum ExportStatus { + EXPORTING = 'exporting', + FINISHED = 'finished', + ERROR = 'error', +} + +export type ExportProgress = { + time: number; + exportedRowsCount: number; + progress: number; + status: ExportStatus; + errorMessage?: string; }; -export type TaskTableRowConfig = { - timestamp: number; +export type ExportTask = { username: string; - datasourceRef: DataSourceRef; - rowsCount: number; - progress: number; - status: string; - filename?: string; + queries: DashboardQuery[]; + timeRange: { + from: number; + to: number; + }; + progress?: ExportProgress; + id?: string; +}; + +export type DashboardQuery = DataQuery & { + selected: boolean; + panel: PanelModel; + datasource: DataSourceSettings; }; diff --git a/src/types/export-status.ts b/src/types/export-status.ts deleted file mode 100644 index a12b8d7..0000000 --- a/src/types/export-status.ts +++ /dev/null @@ -1,5 +0,0 @@ -export enum ExportStatus { - EXPORTING = 'exporting', - FINISHED = 'finished', - ERROR = 'error', -} diff --git a/src/types/target.ts b/src/types/target.ts deleted file mode 100644 index d6639b3..0000000 --- a/src/types/target.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { DataSourceSettings, PanelModel } from '.'; - -export class Target { - constructor( - public panel: PanelModel, - public datasource: DataSourceSettings, - ) {} -} diff --git a/yarn.lock b/yarn.lock index 5251a87..5462164 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1555,6 +1555,11 @@ utils-merge@1.0.1: resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== +uuid@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.0.tgz#592f550650024a38ceb0c562f2f6aa435761efb5" + integrity sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg== + vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"