Browse Source

data_service, renamings and detectors imports

pull/1/head
Alexey Velikiy 7 years ago
parent
commit
aba8f97a8d
  1. 7
      analytics/analytic_unit_worker.py
  2. 5
      analytics/detectors/__init__.py
  3. 4
      analytics/detectors/general_detector.py
  4. 0
      analytics/detectors/jump_detector.py
  5. 4
      analytics/detectors/pattern_detection_model.py
  6. 2
      analytics/grafana_data_provider.py
  7. 21
      analytics/server.py
  8. 2
      analytics/services/__init__.py
  9. 9
      analytics/services/data_service.py

7
analytics/analytic_unit_worker.py

@ -1,6 +1,5 @@
import config
from detectors.general_detector import GeneralDetector
from detectors.pattern_detection_model import PatternDetectionModel
import detectors
import json
import logging
import sys
@ -80,8 +79,8 @@ class AnalyticUnitWorker(object):
def get_model(self, analytic_unit_id, pattern_type):
if analytic_unit_id not in self.models_cache:
if pattern_type == 'general':
model = GeneralDetector(analytic_unit_id)
model = detectors.GeneralDetector(analytic_unit_id)
else:
model = PatternDetectionModel(analytic_unit_id, pattern_type)
model = detectors.PatternDetectionModel(analytic_unit_id, pattern_type)
self.models_cache[analytic_unit_id] = model
return self.models_cache[analytic_unit_id]

5
analytics/detectors/__init__.py

@ -0,0 +1,5 @@
from detectors.general_detector import GeneralDetector
from detectors.pattern_detection_model import PatternDetectionModel
from detectors.peaks_detector import PeaksDetector
from detectors.step_detector import StepDetector
from detectors.jump_detector import Jumpdetector

4
analytics/detectors/general_detector.py

@ -1,4 +1,4 @@
from data_provider import DataProvider
from grafana_data_provider import GrafanaDataProvider
from data_preprocessor import data_preprocessor
import pandas as pd
import logging
@ -40,7 +40,7 @@ class GeneralDetector:
with open(target_filename, 'r') as file:
target = json.load(file)
self.data_prov = DataProvider(datasource, target, dataset_filename)
self.data_prov = GrafanaDataProvider(datasource, target, dataset_filename)
self.preprocessor = data_preprocessor(self.data_prov, augmented_path)
self.model = None

0
analytics/jump_detector.py → analytics/detectors/jump_detector.py

4
analytics/detectors/pattern_detection_model.py

@ -1,7 +1,7 @@
from detectors.step_detector import StepDetector
from detectors.peaks_detector import PeaksDetector
from data_provider import DataProvider
from grafana_data_provider import GrafanaDataProvider
import logging
from urllib.parse import urlparse
@ -47,7 +47,7 @@ class PatternDetectionModel:
with open(target_filename, 'r') as file:
target = json.load(file)
self.data_prov = DataProvider(datasource, target, dataset_filename)
self.data_prov = GrafanaDataProvider(datasource, target, dataset_filename)
self.model = None
self.__load_model(pattern_type)

2
analytics/data_provider.py → analytics/grafana_data_provider.py

@ -10,7 +10,7 @@ from config import HASTIC_API_KEY
MS_IN_WEEK = 604800000
class DataProvider:
class GrafanaDataProvider:
chunk_size = 50000
def __init__(self, datasource, target, data_filename):

21
analytics/server.py

@ -3,17 +3,18 @@ import json
import logging
import sys
import asyncio
import services.server_service
import services
from analytic_unit_worker import AnalyticUnitWorker
root = logging.getLogger()
logger = logging.getLogger('SERVER')
worker = None
server_service = None
data_service = None
root.setLevel(logging.DEBUG)
@ -24,7 +25,6 @@ ch.setFormatter(formatter)
root.addHandler(ch)
async def handle_task(text):
try:
task = json.loads(text)
@ -44,12 +44,21 @@ async def handle_task(text):
except Exception as e:
logger.error("Exception: '%s'" % str(e))
def init_services():
logger.info("Starting services...")
logger.info("Server...")
server_service = services.ServerService(handle_task)
logger.info("Ok")
logger.info("Data service...")
data_service = services.DataService(server_service)
logger.info("Ok")
return server_service, data_service
if __name__ == "__main__":
loop = asyncio.get_event_loop()
logger.info("Starting worker...")
worker = AnalyticUnitWorker()
logger.info("Ok")
logger.info("Starting server...")
server_service = services.server_service.ServerService(handle_task)
logger.info("Ok")
server_service, data_service = init_services()
loop.run_until_complete(server_service.handle_loop())

2
analytics/services/__init__.py

@ -0,0 +1,2 @@
from services.server_service import ServerService
from services.data_service import DataService

9
analytics/services/data_service.py

@ -0,0 +1,9 @@
class DataService:
def __init__(self, server_service):
self.server_service = server_service
async def safe_file(filename, content):
pass
async def load_file(filename, content):
pass
Loading…
Cancel
Save