|
|
@ -70,17 +70,15 @@ export class Exporter { |
|
|
|
console.log(`${day} day: ${from}ms -> ${to}ms`); |
|
|
|
console.log(`${day} day: ${from}ms -> ${to}ms`); |
|
|
|
|
|
|
|
|
|
|
|
let columns = []; |
|
|
|
let columns = []; |
|
|
|
let rows = []; |
|
|
|
let rowsDict = {}; |
|
|
|
|
|
|
|
|
|
|
|
for(const queryConfig of queryConfigs) { |
|
|
|
for(const queryConfig of queryConfigs) { |
|
|
|
const datasourceMetrics: DataTable = await queryByConfig(queryConfig, datasourceUrl, from, to, apiKey); |
|
|
|
const datasourceMetrics: DataTable = await queryByConfig(queryConfig, datasourceUrl, from, to, apiKey); |
|
|
|
columns = this._updateColumns(columns, datasourceMetrics.columns); |
|
|
|
columns = this._updateColumns(columns, datasourceMetrics.columns); |
|
|
|
console.log("columns", columns) |
|
|
|
rowsDict = this._updateRows(rowsDict, datasourceMetrics.values, datasourceMetrics.columns); |
|
|
|
rows = this._updateRows(rows, datasourceMetrics.values); |
|
|
|
|
|
|
|
console.log("rows", rows.slice(0, 10)); |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
const rows = this._getRowsFromDict(rowsDict, columns); |
|
|
|
rows = rows.map((row: number[]) => [toIsoString(row[0], timeZoneName), ...row.slice(1)]); |
|
|
|
// const formattedRows = rows.map((row: number[]) => [toIsoString(row[0], timeZoneName), ...row.slice(1)]);
|
|
|
|
|
|
|
|
|
|
|
|
if(columns.length > 0) { |
|
|
|
if(columns.length > 0) { |
|
|
|
this._writeCsv(stream, { columns, rows }); |
|
|
|
this._writeCsv(stream, { columns, rows }); |
|
|
@ -100,20 +98,31 @@ export class Exporter { |
|
|
|
return _.concat(columnsToUpdate, queriedColumns.slice(1)); |
|
|
|
return _.concat(columnsToUpdate, queriedColumns.slice(1)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private _updateRows(rowsToUpdate: (number | null)[][], queriedRows: (number | null)[][]): (number | null)[][] { |
|
|
|
private _updateRows(dict, queriedRows: (number | null)[][], queriedColumns: string[]): any { |
|
|
|
if(_.isEmpty(rowsToUpdate)) { |
|
|
|
const columns = queriedColumns.slice(1); |
|
|
|
return queriedRows; |
|
|
|
for(const rowIdx in queriedRows) { |
|
|
|
} |
|
|
|
const key = queriedRows[rowIdx][0]; |
|
|
|
if(rowsToUpdate.length !== queriedRows.length) { |
|
|
|
const values = queriedRows[rowIdx].slice(1); |
|
|
|
throw new Error(`All queries should return rows of the same lengths`); |
|
|
|
dict[key] = dict[key] || {}; |
|
|
|
|
|
|
|
for(const valueIdx in values) { |
|
|
|
|
|
|
|
dict[key][columns[valueIdx]] = values[valueIdx]; |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
for(const rowIdx in rowsToUpdate) { |
|
|
|
return dict; |
|
|
|
if(queriedRows[rowIdx][0] !== rowsToUpdate[rowIdx][0]) { |
|
|
|
} |
|
|
|
throw new Error('Queries should return the same timestamps'); |
|
|
|
|
|
|
|
|
|
|
|
private _getRowsFromDict(dict: any, columns: string[]): (number | null)[][] { |
|
|
|
|
|
|
|
let keyList = _.orderBy(_.keys(dict)); |
|
|
|
|
|
|
|
let rows = []; |
|
|
|
|
|
|
|
for(const keyIdx in keyList) { |
|
|
|
|
|
|
|
const key = keyList[keyIdx]; |
|
|
|
|
|
|
|
rows[keyIdx] = [key]; |
|
|
|
|
|
|
|
for(const column of columns) { |
|
|
|
|
|
|
|
const value = dict[key][column] || null; |
|
|
|
|
|
|
|
rows[keyIdx].push(value) |
|
|
|
} |
|
|
|
} |
|
|
|
rowsToUpdate[rowIdx] = _.concat(rowsToUpdate[rowIdx], queriedRows[rowIdx].slice(1)); |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
return rowsToUpdate; |
|
|
|
return rows; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private _initCsvStream() { |
|
|
|
private _initCsvStream() { |
|
|
|