Browse Source

Abstract class for db connector #906 (#908)

* db connector

* fix imports

* minor fixes

* todos

* minor codestyle fix

* todo

* rm extra logs

* abstract class -> interface

* singletons

Co-authored-by: corpglory-dev <dev@corpglory.com>
pull/1/head
rozetko 4 years ago committed by GitHub
parent
commit
65f77676c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      server/src/models/analytic_unit_cache_model.ts
  2. 3
      server/src/models/analytic_units/db.ts
  3. 3
      server/src/models/detection_model.ts
  4. 3
      server/src/models/segment_model.ts
  5. 8
      server/src/services/data_service/collection.ts
  6. 39
      server/src/services/data_service/db_connector/factory.ts
  7. 8
      server/src/services/data_service/db_connector/index.ts
  8. 69
      server/src/services/data_service/db_connector/mongodb_connector.ts
  9. 67
      server/src/services/data_service/db_connector/nedb_connector.ts
  10. 98
      server/src/services/data_service/index.ts
  11. 3
      server/src/services/data_service/migrations.ts

3
server/src/models/analytic_unit_cache_model.ts

@ -1,5 +1,6 @@
import { AnalyticUnitId, AnalyticUnit } from './analytic_units';
import { Collection, makeDBQ } from '../services/data_service';
import { Collection } from '../services/data_service/collection';
import { makeDBQ } from '../services/data_service';
import * as _ from 'lodash';

3
server/src/models/analytic_units/db.ts

@ -1,7 +1,8 @@
import { createAnalyticUnitFromObject } from './utils';
import { AnalyticUnit } from './analytic_unit_model';
import { AnalyticUnitId, FindManyQuery } from './types';
import { Collection, makeDBQ, SortingOrder } from '../../services/data_service';
import { Collection } from '../../services/data_service/collection';
import { makeDBQ, SortingOrder } from '../../services/data_service';
import { Metric } from 'grafana-datasource-kit';

3
server/src/models/detection_model.ts

@ -1,5 +1,6 @@
import { AnalyticUnitId } from './analytic_units';
import { Collection, makeDBQ } from '../services/data_service';
import { Collection } from '../services/data_service/collection';
import { makeDBQ } from '../services/data_service';
import * as _ from 'lodash';

3
server/src/models/segment_model.ts

@ -1,7 +1,8 @@
import { AnalyticUnitId } from './analytic_units';
import * as AnalyticUnit from '../models/analytic_units';
import * as AnalyticUnitCache from '../models/analytic_unit_cache_model';
import { Collection, makeDBQ } from '../services/data_service';
import { Collection } from '../services/data_service/collection';
import { makeDBQ } from '../services/data_service';
import * as _ from 'lodash';

8
server/src/services/data_service/collection.ts

@ -0,0 +1,8 @@
export enum Collection {
ANALYTIC_UNITS,
ANALYTIC_UNIT_CACHES,
SEGMENTS,
THRESHOLD,
DETECTION_SPANS,
DB_META
};

39
server/src/services/data_service/db_connector/factory.ts

@ -0,0 +1,39 @@
import { DBType } from '../../data_layer';
import { DbConnector } from './index';
import { MongodbConnector } from './mongodb_connector';
import { NedbConnector } from './nedb_connector';
import * as config from '../../../config';
export class DbConnectorFactory {
private static _connector: DbConnector;
private constructor() { }
public static async getDbConnector(): Promise<DbConnector> {
if(this._connector !== undefined) {
return this._connector;
}
let connector: DbConnector;
switch(config.HASTIC_DB_CONNECTION_TYPE) {
case DBType.nedb:
connector = NedbConnector.instance;
break;
case DBType.mongodb:
connector = MongodbConnector.instance;
break;
default:
throw new Error(
`"${config.HASTIC_DB_CONNECTION_TYPE}" HASTIC_DB_CONNECTION_TYPE is not supported. Possible values: "nedb", "mongodb"`
);
}
await connector.init();
this._connector = connector;
return this._connector;
}
}

8
server/src/services/data_service/db_connector/index.ts

@ -0,0 +1,8 @@
import { Collection } from '../collection';
import { dbCollection } from '../../data_layer';
export interface DbConnector {
db: Map<Collection, dbCollection>;
init(): Promise<void>;
// TODO: static instance?
}

69
server/src/services/data_service/db_connector/mongodb_connector.ts

@ -0,0 +1,69 @@
import { Collection } from '../collection';
import { DbConnector } from './index';
import { dbCollection } from '../../data_layer';
import * as config from '../../../config';
import * as mongodb from 'mongodb';
export class MongodbConnector implements DbConnector {
private static _instance: MongodbConnector;
private _db = new Map<Collection, dbCollection>();
private static COLLECTION_TO_NAME_MAPPING = new Map<Collection, string>([
[Collection.ANALYTIC_UNITS, 'analytic_units'],
[Collection.ANALYTIC_UNIT_CACHES, 'analytic_unit_caches'],
[Collection.SEGMENTS, 'segments'],
[Collection.THRESHOLD, 'threshold'],
[Collection.DETECTION_SPANS, 'detection_spans'],
[Collection.DB_META, 'db_meta']
]);
private _client: mongodb.MongoClient;
private constructor() { }
async init(): Promise<void> {
const dbConfig = config.HASTIC_DB_CONFIG;
const uri = `mongodb://${dbConfig.user}:${dbConfig.password}@${dbConfig.url}`;
const auth = {
user: dbConfig.user,
password: dbConfig.password
};
this._client = new mongodb.MongoClient(uri, {
useNewUrlParser: true,
auth,
autoReconnect: true,
useUnifiedTopology: true,
// TODO: it should be configurable
authMechanism: 'SCRAM-SHA-1',
authSource: dbConfig.dbName
});
try {
const client: mongodb.MongoClient = await this._client.connect();
const hasticDb: mongodb.Db = client.db(dbConfig.dbName);
MongodbConnector.COLLECTION_TO_NAME_MAPPING.forEach(
(name: string, collection: Collection) => {
this._db.set(collection, hasticDb.collection(name));
}
);
} catch(err) {
console.log(`got error while connecting to MongoDB: ${err}`);
// TODO: throw a better error, e.g.: ServiceInitializationError
throw err;
}
}
get db(): Map<Collection, dbCollection> {
return this._db;
}
static get instance(): MongodbConnector {
if(this._instance === undefined) {
this._instance = new this();
}
return this._instance;
}
}

67
server/src/services/data_service/db_connector/nedb_connector.ts

@ -0,0 +1,67 @@
import { Collection } from '../collection';
import { DbConnector } from './index';
import { dbCollection } from '../../data_layer';
import * as config from '../../../config';
import * as nedb from 'nedb';
import * as fs from 'fs';
type NedbCollectionConfig = {
filename: string,
timestampData?: boolean
};
function maybeCreateDir(path: string): void {
if (fs.existsSync(path)) {
return;
}
console.log('data service: mkdir: ' + path);
fs.mkdirSync(path);
}
function checkDataFolders(): void {
[
config.DATA_PATH
].forEach(maybeCreateDir);
}
export class NedbConnector implements DbConnector {
private static _instance: NedbConnector;
private _db = new Map<Collection, dbCollection>();
private static COLLECTION_TO_CONFIG_MAPPING = new Map<Collection, NedbCollectionConfig>([
[Collection.ANALYTIC_UNITS, { filename: config.ANALYTIC_UNITS_DATABASE_PATH, timestampData: true }],
[Collection.ANALYTIC_UNIT_CACHES, { filename: config.ANALYTIC_UNIT_CACHES_DATABASE_PATH }],
[Collection.SEGMENTS, { filename: config.SEGMENTS_DATABASE_PATH }],
[Collection.THRESHOLD, { filename: config.THRESHOLD_DATABASE_PATH }],
[Collection.DETECTION_SPANS, { filename: config.DETECTION_SPANS_DATABASE_PATH }],
[Collection.DB_META, { filename: config.DB_META_PATH }],
]);
constructor() { }
async init(): Promise<void> {
checkDataFolders();
const inMemoryOnly = config.HASTIC_DB_IN_MEMORY;
// TODO: it can throw an error, so we should catch it
NedbConnector.COLLECTION_TO_CONFIG_MAPPING.forEach(
(config: NedbCollectionConfig, collection: Collection) => {
this._db.set(collection, new nedb({ ...config, autoload: true, inMemoryOnly }));
}
);
}
get db(): Map<Collection, dbCollection> {
return this._db;
}
static get instance(): NedbConnector {
if (this._instance === undefined) {
this._instance = new this();
}
return this._instance;
}
}

98
server/src/services/data_service/index.ts

@ -1,30 +1,11 @@
import { getDbQueryWrapper, dbCollection, DBType } from '../data_layer';
import * as config from '../../config';
import { Collection } from './collection';
import { getDbQueryWrapper, dbCollection } from '../data_layer';
import { DbConnector } from './db_connector';
import { DbConnectorFactory } from './db_connector/factory';
import * as nedb from 'nedb';
import * as fs from 'fs';
import * as mongodb from 'mongodb';
import * as deasync from 'deasync';
export enum Collection {
ANALYTIC_UNITS,
ANALYTIC_UNIT_CACHES,
SEGMENTS,
THRESHOLD,
DETECTION_SPANS,
DB_META
};
const COLLECTION_TO_NAME_MAPPING = new Map<Collection, string>([
[Collection.ANALYTIC_UNITS, 'analytic_units'],
[Collection.ANALYTIC_UNIT_CACHES, 'analytic_unit_caches'],
[Collection.SEGMENTS, 'segments'],
[Collection.THRESHOLD, 'threshold'],
[Collection.DETECTION_SPANS, 'detection_spans'],
[Collection.DB_META, 'db_meta']
]);
export enum SortingOrder { ASCENDING = 1, DESCENDING = -1 };
/**
@ -44,8 +25,7 @@ export type DBQ = {
}
const queryWrapper = getDbQueryWrapper();
const db = new Map<Collection, dbCollection>();
let mongoClient: mongodb.MongoClient;
let db: Map<Collection, dbCollection>;
function dbCollectionFromCollection(collection: Collection): dbCollection {
let dbCollection = db.get(collection);
@ -69,74 +49,10 @@ export function makeDBQ(collection: Collection): DBQ {
}
function maybeCreateDir(path: string): void {
if(fs.existsSync(path)) {
return;
}
console.log('data service: mkdir: ' + path);
fs.mkdirSync(path);
}
function checkDataFolders(): void {
[
config.DATA_PATH
].forEach(maybeCreateDir);
}
async function connectToDb() {
if(config.HASTIC_DB_CONNECTION_TYPE === DBType.nedb) {
checkDataFolders();
const inMemoryOnly = config.HASTIC_DB_IN_MEMORY;
console.log('NeDB is used as the storage');
// TODO: it's better if models request db which we create if it`s needed
db.set(Collection.ANALYTIC_UNITS, new nedb({ filename: config.ANALYTIC_UNITS_DATABASE_PATH, autoload: true, timestampData: true, inMemoryOnly}));
db.set(Collection.ANALYTIC_UNIT_CACHES, new nedb({ filename: config.ANALYTIC_UNIT_CACHES_DATABASE_PATH, autoload: true, inMemoryOnly}));
db.set(Collection.SEGMENTS, new nedb({ filename: config.SEGMENTS_DATABASE_PATH, autoload: true, inMemoryOnly}));
db.set(Collection.THRESHOLD, new nedb({ filename: config.THRESHOLD_DATABASE_PATH, autoload: true, inMemoryOnly}));
db.set(Collection.DETECTION_SPANS, new nedb({ filename: config.DETECTION_SPANS_DATABASE_PATH, autoload: true, inMemoryOnly}));
db.set(Collection.DB_META, new nedb({ filename: config.DB_META_PATH, autoload: true, inMemoryOnly}));
} else if(config.HASTIC_DB_CONNECTION_TYPE === DBType.mongodb) {
console.log('MongoDB is used as the storage');
const dbConfig = config.HASTIC_DB_CONFIG;
const uri = `mongodb://${dbConfig.user}:${dbConfig.password}@${dbConfig.url}`;
const auth = {
user: dbConfig.user,
password: dbConfig.password
};
mongoClient = new mongodb.MongoClient(uri, {
useNewUrlParser: true,
auth,
autoReconnect: true,
useUnifiedTopology: true,
authMechanism: 'SCRAM-SHA-1',
authSource: dbConfig.dbName
});
try {
const client: mongodb.MongoClient = await mongoClient.connect();
const hasticDb: mongodb.Db = client.db(dbConfig.dbName);
COLLECTION_TO_NAME_MAPPING.forEach((name, collection) => {
db.set(collection, hasticDb.collection(name));
});
} catch(err) {
console.log(`got error while connect to MongoDB ${err}`);
throw err;
}
} else {
throw new Error(
`"${config.HASTIC_DB_CONNECTION_TYPE}" HASTIC_DB_CONNECTION_TYPE is not supported. Possible values: "nedb", "mongodb"`
);
}
}
export async function closeDb() {
if(mongoClient !== undefined && mongoClient.isConnected) {
await mongoClient.close();
}
}
let done = false;
connectToDb().then(() => {
DbConnectorFactory.getDbConnector().then((connector: DbConnector) => {
done = true;
db = connector.db;
}).catch((err) => {
console.log(`data service got error while connect to data base ${err}`);
//TODO: choose best practice for error handling

3
server/src/services/data_service/migrations.ts

@ -7,7 +7,8 @@
Note: do not import code from other modules here because it can be changed
*/
import { Collection, makeDBQ } from './index';
import { Collection } from './collection';
import { makeDBQ } from './index';
import * as _ from 'lodash';

Loading…
Cancel
Save