From 65f77676c202d43e7d9da6f50ff52269f03823a7 Mon Sep 17 00:00:00 2001 From: rozetko Date: Wed, 3 Jun 2020 14:24:09 +0300 Subject: [PATCH] Abstract class for db connector #906 (#908) * db connector * fix imports * minor fixes * todos * minor codestyle fix * todo * rm extra logs * abstract class -> interface * singletons Co-authored-by: corpglory-dev --- .../src/models/analytic_unit_cache_model.ts | 3 +- server/src/models/analytic_units/db.ts | 3 +- server/src/models/detection_model.ts | 3 +- server/src/models/segment_model.ts | 3 +- .../src/services/data_service/collection.ts | 8 ++ .../data_service/db_connector/factory.ts | 39 ++++++++ .../data_service/db_connector/index.ts | 8 ++ .../db_connector/mongodb_connector.ts | 69 +++++++++++++ .../db_connector/nedb_connector.ts | 67 +++++++++++++ server/src/services/data_service/index.ts | 98 ++----------------- .../src/services/data_service/migrations.ts | 3 +- 11 files changed, 208 insertions(+), 96 deletions(-) create mode 100644 server/src/services/data_service/collection.ts create mode 100644 server/src/services/data_service/db_connector/factory.ts create mode 100644 server/src/services/data_service/db_connector/index.ts create mode 100644 server/src/services/data_service/db_connector/mongodb_connector.ts create mode 100644 server/src/services/data_service/db_connector/nedb_connector.ts diff --git a/server/src/models/analytic_unit_cache_model.ts b/server/src/models/analytic_unit_cache_model.ts index 97dbc1e..f5e5ad0 100644 --- a/server/src/models/analytic_unit_cache_model.ts +++ b/server/src/models/analytic_unit_cache_model.ts @@ -1,5 +1,6 @@ import { AnalyticUnitId, AnalyticUnit } from './analytic_units'; -import { Collection, makeDBQ } from '../services/data_service'; +import { Collection } from '../services/data_service/collection'; +import { makeDBQ } from '../services/data_service'; import * as _ from 'lodash'; diff --git a/server/src/models/analytic_units/db.ts b/server/src/models/analytic_units/db.ts index 3463994..a56720c 100644 --- a/server/src/models/analytic_units/db.ts +++ b/server/src/models/analytic_units/db.ts @@ -1,7 +1,8 @@ import { createAnalyticUnitFromObject } from './utils'; import { AnalyticUnit } from './analytic_unit_model'; import { AnalyticUnitId, FindManyQuery } from './types'; -import { Collection, makeDBQ, SortingOrder } from '../../services/data_service'; +import { Collection } from '../../services/data_service/collection'; +import { makeDBQ, SortingOrder } from '../../services/data_service'; import { Metric } from 'grafana-datasource-kit'; diff --git a/server/src/models/detection_model.ts b/server/src/models/detection_model.ts index 483890f..cdb3cf6 100644 --- a/server/src/models/detection_model.ts +++ b/server/src/models/detection_model.ts @@ -1,5 +1,6 @@ import { AnalyticUnitId } from './analytic_units'; -import { Collection, makeDBQ } from '../services/data_service'; +import { Collection } from '../services/data_service/collection'; +import { makeDBQ } from '../services/data_service'; import * as _ from 'lodash'; diff --git a/server/src/models/segment_model.ts b/server/src/models/segment_model.ts index 0598c74..362c633 100644 --- a/server/src/models/segment_model.ts +++ b/server/src/models/segment_model.ts @@ -1,7 +1,8 @@ import { AnalyticUnitId } from './analytic_units'; import * as AnalyticUnit from '../models/analytic_units'; import * as AnalyticUnitCache from '../models/analytic_unit_cache_model'; -import { Collection, makeDBQ } from '../services/data_service'; +import { Collection } from '../services/data_service/collection'; +import { makeDBQ } from '../services/data_service'; import * as _ from 'lodash'; diff --git a/server/src/services/data_service/collection.ts b/server/src/services/data_service/collection.ts new file mode 100644 index 0000000..fa935ad --- /dev/null +++ b/server/src/services/data_service/collection.ts @@ -0,0 +1,8 @@ +export enum Collection { + ANALYTIC_UNITS, + ANALYTIC_UNIT_CACHES, + SEGMENTS, + THRESHOLD, + DETECTION_SPANS, + DB_META +}; diff --git a/server/src/services/data_service/db_connector/factory.ts b/server/src/services/data_service/db_connector/factory.ts new file mode 100644 index 0000000..d57da89 --- /dev/null +++ b/server/src/services/data_service/db_connector/factory.ts @@ -0,0 +1,39 @@ +import { DBType } from '../../data_layer'; +import { DbConnector } from './index'; +import { MongodbConnector } from './mongodb_connector'; +import { NedbConnector } from './nedb_connector'; + +import * as config from '../../../config'; + + +export class DbConnectorFactory { + private static _connector: DbConnector; + + private constructor() { } + + public static async getDbConnector(): Promise { + if(this._connector !== undefined) { + return this._connector; + } + + let connector: DbConnector; + switch(config.HASTIC_DB_CONNECTION_TYPE) { + case DBType.nedb: + connector = NedbConnector.instance; + break; + + case DBType.mongodb: + connector = MongodbConnector.instance; + break; + + default: + throw new Error( + `"${config.HASTIC_DB_CONNECTION_TYPE}" HASTIC_DB_CONNECTION_TYPE is not supported. Possible values: "nedb", "mongodb"` + ); + } + + await connector.init(); + this._connector = connector; + return this._connector; + } +} diff --git a/server/src/services/data_service/db_connector/index.ts b/server/src/services/data_service/db_connector/index.ts new file mode 100644 index 0000000..1898df5 --- /dev/null +++ b/server/src/services/data_service/db_connector/index.ts @@ -0,0 +1,8 @@ +import { Collection } from '../collection'; +import { dbCollection } from '../../data_layer'; + +export interface DbConnector { + db: Map; + init(): Promise; + // TODO: static instance? +} diff --git a/server/src/services/data_service/db_connector/mongodb_connector.ts b/server/src/services/data_service/db_connector/mongodb_connector.ts new file mode 100644 index 0000000..8fb34da --- /dev/null +++ b/server/src/services/data_service/db_connector/mongodb_connector.ts @@ -0,0 +1,69 @@ +import { Collection } from '../collection'; +import { DbConnector } from './index'; +import { dbCollection } from '../../data_layer'; +import * as config from '../../../config'; + +import * as mongodb from 'mongodb'; + + +export class MongodbConnector implements DbConnector { + private static _instance: MongodbConnector; + + private _db = new Map(); + + private static COLLECTION_TO_NAME_MAPPING = new Map([ + [Collection.ANALYTIC_UNITS, 'analytic_units'], + [Collection.ANALYTIC_UNIT_CACHES, 'analytic_unit_caches'], + [Collection.SEGMENTS, 'segments'], + [Collection.THRESHOLD, 'threshold'], + [Collection.DETECTION_SPANS, 'detection_spans'], + [Collection.DB_META, 'db_meta'] + ]); + + private _client: mongodb.MongoClient; + + private constructor() { } + + async init(): Promise { + const dbConfig = config.HASTIC_DB_CONFIG; + const uri = `mongodb://${dbConfig.user}:${dbConfig.password}@${dbConfig.url}`; + const auth = { + user: dbConfig.user, + password: dbConfig.password + }; + this._client = new mongodb.MongoClient(uri, { + useNewUrlParser: true, + auth, + autoReconnect: true, + useUnifiedTopology: true, + // TODO: it should be configurable + authMechanism: 'SCRAM-SHA-1', + authSource: dbConfig.dbName + }); + + try { + const client: mongodb.MongoClient = await this._client.connect(); + const hasticDb: mongodb.Db = client.db(dbConfig.dbName); + MongodbConnector.COLLECTION_TO_NAME_MAPPING.forEach( + (name: string, collection: Collection) => { + this._db.set(collection, hasticDb.collection(name)); + } + ); + } catch(err) { + console.log(`got error while connecting to MongoDB: ${err}`); + // TODO: throw a better error, e.g.: ServiceInitializationError + throw err; + } + } + + get db(): Map { + return this._db; + } + + static get instance(): MongodbConnector { + if(this._instance === undefined) { + this._instance = new this(); + } + return this._instance; + } +} diff --git a/server/src/services/data_service/db_connector/nedb_connector.ts b/server/src/services/data_service/db_connector/nedb_connector.ts new file mode 100644 index 0000000..efd6e95 --- /dev/null +++ b/server/src/services/data_service/db_connector/nedb_connector.ts @@ -0,0 +1,67 @@ +import { Collection } from '../collection'; +import { DbConnector } from './index'; +import { dbCollection } from '../../data_layer'; +import * as config from '../../../config'; + +import * as nedb from 'nedb'; +import * as fs from 'fs'; + + +type NedbCollectionConfig = { + filename: string, + timestampData?: boolean +}; + +function maybeCreateDir(path: string): void { + if (fs.existsSync(path)) { + return; + } + console.log('data service: mkdir: ' + path); + fs.mkdirSync(path); +} + +function checkDataFolders(): void { + [ + config.DATA_PATH + ].forEach(maybeCreateDir); +} + +export class NedbConnector implements DbConnector { + private static _instance: NedbConnector; + + private _db = new Map(); + + private static COLLECTION_TO_CONFIG_MAPPING = new Map([ + [Collection.ANALYTIC_UNITS, { filename: config.ANALYTIC_UNITS_DATABASE_PATH, timestampData: true }], + [Collection.ANALYTIC_UNIT_CACHES, { filename: config.ANALYTIC_UNIT_CACHES_DATABASE_PATH }], + [Collection.SEGMENTS, { filename: config.SEGMENTS_DATABASE_PATH }], + [Collection.THRESHOLD, { filename: config.THRESHOLD_DATABASE_PATH }], + [Collection.DETECTION_SPANS, { filename: config.DETECTION_SPANS_DATABASE_PATH }], + [Collection.DB_META, { filename: config.DB_META_PATH }], + ]); + + constructor() { } + + async init(): Promise { + checkDataFolders(); + + const inMemoryOnly = config.HASTIC_DB_IN_MEMORY; + // TODO: it can throw an error, so we should catch it + NedbConnector.COLLECTION_TO_CONFIG_MAPPING.forEach( + (config: NedbCollectionConfig, collection: Collection) => { + this._db.set(collection, new nedb({ ...config, autoload: true, inMemoryOnly })); + } + ); + } + + get db(): Map { + return this._db; + } + + static get instance(): NedbConnector { + if (this._instance === undefined) { + this._instance = new this(); + } + return this._instance; + } +} diff --git a/server/src/services/data_service/index.ts b/server/src/services/data_service/index.ts index d0a36f9..6ca683a 100644 --- a/server/src/services/data_service/index.ts +++ b/server/src/services/data_service/index.ts @@ -1,30 +1,11 @@ -import { getDbQueryWrapper, dbCollection, DBType } from '../data_layer'; -import * as config from '../../config'; +import { Collection } from './collection'; +import { getDbQueryWrapper, dbCollection } from '../data_layer'; +import { DbConnector } from './db_connector'; +import { DbConnectorFactory } from './db_connector/factory'; -import * as nedb from 'nedb'; -import * as fs from 'fs'; -import * as mongodb from 'mongodb'; import * as deasync from 'deasync'; -export enum Collection { - ANALYTIC_UNITS, - ANALYTIC_UNIT_CACHES, - SEGMENTS, - THRESHOLD, - DETECTION_SPANS, - DB_META -}; - -const COLLECTION_TO_NAME_MAPPING = new Map([ - [Collection.ANALYTIC_UNITS, 'analytic_units'], - [Collection.ANALYTIC_UNIT_CACHES, 'analytic_unit_caches'], - [Collection.SEGMENTS, 'segments'], - [Collection.THRESHOLD, 'threshold'], - [Collection.DETECTION_SPANS, 'detection_spans'], - [Collection.DB_META, 'db_meta'] -]); - export enum SortingOrder { ASCENDING = 1, DESCENDING = -1 }; /** @@ -44,8 +25,7 @@ export type DBQ = { } const queryWrapper = getDbQueryWrapper(); -const db = new Map(); -let mongoClient: mongodb.MongoClient; +let db: Map; function dbCollectionFromCollection(collection: Collection): dbCollection { let dbCollection = db.get(collection); @@ -69,74 +49,10 @@ export function makeDBQ(collection: Collection): DBQ { } -function maybeCreateDir(path: string): void { - if(fs.existsSync(path)) { - return; - } - console.log('data service: mkdir: ' + path); - fs.mkdirSync(path); -} - -function checkDataFolders(): void { - [ - config.DATA_PATH - ].forEach(maybeCreateDir); -} - -async function connectToDb() { - if(config.HASTIC_DB_CONNECTION_TYPE === DBType.nedb) { - checkDataFolders(); - const inMemoryOnly = config.HASTIC_DB_IN_MEMORY; - console.log('NeDB is used as the storage'); - // TODO: it's better if models request db which we create if it`s needed - db.set(Collection.ANALYTIC_UNITS, new nedb({ filename: config.ANALYTIC_UNITS_DATABASE_PATH, autoload: true, timestampData: true, inMemoryOnly})); - db.set(Collection.ANALYTIC_UNIT_CACHES, new nedb({ filename: config.ANALYTIC_UNIT_CACHES_DATABASE_PATH, autoload: true, inMemoryOnly})); - db.set(Collection.SEGMENTS, new nedb({ filename: config.SEGMENTS_DATABASE_PATH, autoload: true, inMemoryOnly})); - db.set(Collection.THRESHOLD, new nedb({ filename: config.THRESHOLD_DATABASE_PATH, autoload: true, inMemoryOnly})); - db.set(Collection.DETECTION_SPANS, new nedb({ filename: config.DETECTION_SPANS_DATABASE_PATH, autoload: true, inMemoryOnly})); - db.set(Collection.DB_META, new nedb({ filename: config.DB_META_PATH, autoload: true, inMemoryOnly})); - } else if(config.HASTIC_DB_CONNECTION_TYPE === DBType.mongodb) { - console.log('MongoDB is used as the storage'); - const dbConfig = config.HASTIC_DB_CONFIG; - const uri = `mongodb://${dbConfig.user}:${dbConfig.password}@${dbConfig.url}`; - const auth = { - user: dbConfig.user, - password: dbConfig.password - }; - mongoClient = new mongodb.MongoClient(uri, { - useNewUrlParser: true, - auth, - autoReconnect: true, - useUnifiedTopology: true, - authMechanism: 'SCRAM-SHA-1', - authSource: dbConfig.dbName - }); - try { - const client: mongodb.MongoClient = await mongoClient.connect(); - const hasticDb: mongodb.Db = client.db(dbConfig.dbName); - COLLECTION_TO_NAME_MAPPING.forEach((name, collection) => { - db.set(collection, hasticDb.collection(name)); - }); - } catch(err) { - console.log(`got error while connect to MongoDB ${err}`); - throw err; - } - } else { - throw new Error( - `"${config.HASTIC_DB_CONNECTION_TYPE}" HASTIC_DB_CONNECTION_TYPE is not supported. Possible values: "nedb", "mongodb"` - ); - } -} - -export async function closeDb() { - if(mongoClient !== undefined && mongoClient.isConnected) { - await mongoClient.close(); - } -} - let done = false; -connectToDb().then(() => { +DbConnectorFactory.getDbConnector().then((connector: DbConnector) => { done = true; + db = connector.db; }).catch((err) => { console.log(`data service got error while connect to data base ${err}`); //TODO: choose best practice for error handling diff --git a/server/src/services/data_service/migrations.ts b/server/src/services/data_service/migrations.ts index 806f179..3ff822b 100644 --- a/server/src/services/data_service/migrations.ts +++ b/server/src/services/data_service/migrations.ts @@ -7,7 +7,8 @@ Note: do not import code from other modules here because it can be changed */ -import { Collection, makeDBQ } from './index'; +import { Collection } from './collection'; +import { makeDBQ } from './index'; import * as _ from 'lodash';