Browse Source

Merge branch 'master' of github.com:hastic/hastic-server

pull/1/head
amper43 6 years ago
parent
commit
d2d3702b41
  1. 29
      Dockerfile
  2. 5
      analytics/.dockerignore
  3. 20
      analytics/Dockerfile
  4. 2
      analytics/config.py
  5. 2
      analytics/models/drop_model.py
  6. 30
      analytics/models/general_model.py
  7. 2
      analytics/models/jump_model.py
  8. 2
      analytics/models/peak_model.py
  9. 2
      analytics/models/trough_model.py
  10. 19
      docker-compose.yml
  11. 4
      server/.dockerignore
  12. 26
      server/Dockerfile
  13. 1
      server/src/config.ts
  14. 17
      server/src/services/analytics_service.ts

29
Dockerfile

@ -1,29 +0,0 @@
FROM python:3.6.6
EXPOSE 8000
VOLUME [ "/var/www/data" ]
COPY . /var/www
WORKDIR /var/www/analytics
RUN pip install -r requirements.txt
RUN apt-get update && apt-get install -y \
apt-utils \
gnupg \
curl \
python \
make \
g++ \
git
RUN curl -sL https://deb.nodesource.com/setup_8.x | bash -
RUN apt-get update && apt-get install -y nodejs
WORKDIR /var/www/server
RUN npm install && npm run build
CMD ["npm", "start"]

5
analytics/.dockerignore

@ -0,0 +1,5 @@
.git
npm-debug
node_modules
__pycache__
.vscode

20
analytics/Dockerfile

@ -0,0 +1,20 @@
FROM python:3.6.6
WORKDIR /var/www/analytics
COPY ./requirements.txt /var/www/analytics
RUN pip install -r requirements.txt \
&& apt-get update && apt-get install -y \
apt-utils \
gnupg \
curl \
make \
g++ \
git
VOLUME [ "/var/www/data" ]
COPY . /var/www/analytics/
CMD ["python", "server.py"]

2
analytics/config.py

@ -25,4 +25,4 @@ def get_config_field(field, default_val = None):
raise Exception('Please configure {}'.format(field))
ZMQ_DEV_PORT = get_config_field('ZMQ_DEV_PORT', '8002')
ZMQ_CONNECTION_STRING = get_config_field('ZMQ_CONNECTION_STRING', 'tcp://*:%s' % ZMQ_DEV_PORT)
ZMQ_CONNECTION_STRING = get_config_field('ZMQ_CONNECTION_STRING', 'tcp://0.0.0.0:%s' % ZMQ_DEV_PORT)

2
analytics/models/drop_model.py

@ -172,7 +172,7 @@ class DropModel(Model):
conv = scipy.signal.fftconvolve(convol_data, pattern_data)
if conv[self.state['WINDOW_SIZE']*2] > self.state['convolve_max'] * 1.2 or conv[self.state['WINDOW_SIZE']*2] < self.state['convolve_min'] * 0.8:
delete_list.append(segment)
if max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
delete_list.append(segment)
else:
delete_list.append(segment)

30
analytics/models/general_model.py

@ -23,6 +23,8 @@ class GeneralModel(Model):
'convolve_max': 240,
'convolve_min': 200,
'WINDOW_SIZE': 240,
'conv_del_min': 100,
'conv_del_max': 120,
}
self.all_conv = []
@ -38,7 +40,7 @@ class GeneralModel(Model):
segment_data = data[segment_from_index: segment_to_index + 1]
if len(segment_data) == 0:
continue
x = segment_from_index + int((segment_to_index - segment_from_index) / 2)
x = segment_from_index + math.ceil((segment_to_index - segment_from_index) / 2)
self.ipats.append(x)
segment_data = data[x - self.state['WINDOW_SIZE'] : x + self.state['WINDOW_SIZE']]
segment_min = min(segment_data)
@ -53,6 +55,20 @@ class GeneralModel(Model):
convolve_data = scipy.signal.fftconvolve(labeled_data, self.model_gen)
convolve_list.append(max(auto_convolve))
convolve_list.append(max(convolve_data))
del_conv_list = []
for segment in segments:
if segment['deleted']:
segment_from_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['from'], unit='ms'))
segment_to_index = utils.timestamp_to_index(dataframe, pd.to_datetime(segment['to'], unit='ms'))
segment_data = data[segment_from_index: segment_to_index + 1]
if len(segment_data) == 0:
continue
del_mid_index = segment_from_index + math.ceil((segment_to_index - segment_from_index) / 2)
deleted_pat = data[del_mid_index - self.state['WINDOW_SIZE']: del_mid_index + self.state['WINDOW_SIZE'] + 1]
deleted_pat = deleted_pat - min(deleted_pat)
del_conv_pat = scipy.signal.fftconvolve(deleted_pat, self.model_gen)
del_conv_list.append(max(del_conv_pat))
if len(convolve_list) > 0:
self.state['convolve_max'] = float(max(convolve_list))
@ -63,6 +79,16 @@ class GeneralModel(Model):
self.state['convolve_min'] = float(min(convolve_list))
else:
self.state['convolve_min'] = self.state['WINDOW_SIZE'] / 3
if len(del_conv_list) > 0:
self.state['conv_del_min'] = float(min(del_conv_list))
else:
self.state['conv_del_min'] = self.state['WINDOW_SIZE']
if len(del_conv_list) > 0:
self.state['conv_del_max'] = float(max(del_conv_list))
else:
self.state['conv_del_max'] = self.state['WINDOW_SIZE']
def do_predict(self, dataframe: pd.DataFrame) -> list:
data = dataframe['value']
@ -88,6 +114,8 @@ class GeneralModel(Model):
for val in segments:
if self.all_conv[val] < self.state['convolve_min'] * 0.8:
delete_list.append(val)
elif (self.all_conv[val] < self.state['conv_del_max'] * 1.02 and self.all_conv[val] > self.state['conv_del_min'] * 0.98):
delete_list.append(val)
for item in delete_list:
segments.remove(item)

2
analytics/models/jump_model.py

@ -173,7 +173,7 @@ class JumpModel(Model):
conv = scipy.signal.fftconvolve(convol_data, pattern_data)
if max(conv) > self.state['convolve_max'] * 1.2 or max(conv) < self.state['convolve_min'] * 0.8:
delete_list.append(segment)
if max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
delete_list.append(segment)
else:
delete_list.append(segment)

2
analytics/models/peak_model.py

@ -134,7 +134,7 @@ class PeakModel(Model):
conv = scipy.signal.fftconvolve(convol_data, pattern_data)
if max(conv) > self.state['convolve_max'] * 1.05 or max(conv) < self.state['convolve_min'] * 0.95:
delete_list.append(segment)
if max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
delete_list.append(segment)
else:
delete_list.append(segment)

2
analytics/models/trough_model.py

@ -136,7 +136,7 @@ class TroughModel(Model):
conv = scipy.signal.fftconvolve(convol_data, pattern_data)
if max(conv) > self.state['convolve_max'] * 1.1 or max(conv) < self.state['convolve_min'] * 0.9:
delete_list.append(segment)
if max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
elif max(conv) < self.state['conv_del_max'] * 1.02 and max(conv) > self.state['conv_del_min'] * 0.98:
delete_list.append(segment)
else:
delete_list.append(segment)

19
docker-compose.yml

@ -0,0 +1,19 @@
version: '2'
services:
server:
image: hastic/server:latest
build: server
environment:
HASTIC_API_KEY: ${HASTIC_API_KEY}
ZMQ_CONNECTION_STRING: tcp://analytics:8002
ports:
- 8000:8000
volumes:
- data-volume:/var/www/data
analytics:
image: hastic/analytics:latest
build: analytics
volumes:
data-volume:

4
server/.dockerignore

@ -0,0 +1,4 @@
.git
node_modules
npm-debug
.vscode

26
server/Dockerfile

@ -0,0 +1,26 @@
FROM python:3.6.6
RUN apt-get install curl \
bash \
gnupg \
make \
g++ \
&& curl -sL https://deb.nodesource.com/setup_8.x | bash - \
&& apt-get update \
&& apt-get install nodejs
VOLUME [ "/var/www/data" ]
WORKDIR /var/www/server
COPY package.json /var/www/server
RUN npm install
COPY . /var/www/server
RUN npm run build
ENV INSIDE_DOCKER true
CMD ["npm", "start"]

1
server/src/config.ts

@ -20,6 +20,7 @@ export const HASTIC_PORT = getConfigField('HASTIC_PORT', '8000');
export const ZMQ_CONNECTION_STRING = getConfigField('ZMQ_CONNECTION_STRING', null);
export const ZMQ_IPC_PATH = getConfigField('ZMQ_IPC_PATH', path.join(os.tmpdir(), 'hastic'));
export const ZMQ_DEV_PORT = getConfigField('ZMQ_DEV_PORT', '8002');
export const ZMQ_HOST = getConfigField('ZMQ_HOST', '127.0.0.1');
export const HASTIC_API_KEY = getConfigField('HASTIC_API_KEY');
export const ANLYTICS_PING_INTERVAL = 500; // ms

17
server/src/services/analytics_service.ts

@ -18,8 +18,12 @@ export class AnalyticsService {
private _ipcPath: string = null;
private _analyticsPinger: NodeJS.Timer = null;
private _isClosed = false;
private _productionMode = false;
private _inDocker = false;
constructor(private _onMessage: (message: AnalyticsMessage) => void) {
this._productionMode = process.env.NODE_ENV !== 'development';
this._inDocker = process.env.INSIDE_DOCKER !== undefined;
this._init();
}
@ -67,10 +71,12 @@ export class AnalyticsService {
private async _init() {
this._requester = zmq.socket('pair');
let productionMode = process.env.NODE_ENV !== 'development';
this._zmqConnectionString = `tcp://127.0.0.1:${config.ZMQ_DEV_PORT}`; // debug mode
if(productionMode) {
this._zmqConnectionString = `tcp://${config.ZMQ_HOST}:${config.ZMQ_DEV_PORT}`; // debug mode
if(this._inDocker) {
this._zmqConnectionString = config.ZMQ_CONNECTION_STRING;
} else if(this._productionMode && !this._inDocker) {
this._zmqConnectionString = config.ZMQ_CONNECTION_STRING;
if(this._zmqConnectionString === null) {
var createResult = await AnalyticsService.createIPCAddress();
@ -84,7 +90,7 @@ export class AnalyticsService {
this._requester.on("message", this._onAnalyticsMessage.bind(this));
console.log('Ok');
if(productionMode) {
if(this._productionMode && !this._inDocker) {
console.log('Creating analytics process...');
try {
var cp = await AnalyticsService._runAnalyticsProcess(this._zmqConnectionString);
@ -165,12 +171,13 @@ export class AnalyticsService {
private async _onAnalyticsDown() {
console.log('Analytics is down');
if(process.env.NODE_ENV !== 'development') {
if(this._productionMode && !this._inDocker) {
await AnalyticsService._runAnalyticsProcess(this._zmqConnectionString);
}
}
private _onAnalyticsMessage(data: any) {
let text = data.toString();
if(text === 'PONG') {
this._pingResponded = true;

Loading…
Cancel
Save