diff --git a/analytics/analytic_unit_worker.py b/analytics/analytic_unit_worker.py index 4c6b528..60ae17c 100644 --- a/analytics/analytic_unit_worker.py +++ b/analytics/analytic_unit_worker.py @@ -1,6 +1,5 @@ import config -from detectors.general_detector import GeneralDetector -from detectors.pattern_detection_model import PatternDetectionModel +import detectors import json import logging import sys @@ -80,8 +79,8 @@ class AnalyticUnitWorker(object): def get_model(self, analytic_unit_id, pattern_type): if analytic_unit_id not in self.models_cache: if pattern_type == 'general': - model = GeneralDetector(analytic_unit_id) + model = detectors.GeneralDetector(analytic_unit_id) else: - model = PatternDetectionModel(analytic_unit_id, pattern_type) + model = detectors.PatternDetectionModel(analytic_unit_id, pattern_type) self.models_cache[analytic_unit_id] = model return self.models_cache[analytic_unit_id] diff --git a/analytics/detectors/__init__.py b/analytics/detectors/__init__.py index e69de29..a7ce05f 100644 --- a/analytics/detectors/__init__.py +++ b/analytics/detectors/__init__.py @@ -0,0 +1,5 @@ +from detectors.general_detector import GeneralDetector +from detectors.pattern_detection_model import PatternDetectionModel +from detectors.peaks_detector import PeaksDetector +from detectors.step_detector import StepDetector +from detectors.jump_detector import Jumpdetector diff --git a/analytics/detectors/general_detector.py b/analytics/detectors/general_detector.py index e3c7c01..84bf3e4 100644 --- a/analytics/detectors/general_detector.py +++ b/analytics/detectors/general_detector.py @@ -1,4 +1,4 @@ -from data_provider import DataProvider +from grafana_data_provider import GrafanaDataProvider from data_preprocessor import data_preprocessor import pandas as pd import logging @@ -40,7 +40,7 @@ class GeneralDetector: with open(target_filename, 'r') as file: target = json.load(file) - self.data_prov = DataProvider(datasource, target, dataset_filename) + self.data_prov = GrafanaDataProvider(datasource, target, dataset_filename) self.preprocessor = data_preprocessor(self.data_prov, augmented_path) self.model = None diff --git a/analytics/jump_detector.py b/analytics/detectors/jump_detector.py similarity index 100% rename from analytics/jump_detector.py rename to analytics/detectors/jump_detector.py diff --git a/analytics/detectors/pattern_detection_model.py b/analytics/detectors/pattern_detection_model.py index c4aa9d8..9bbfce5 100644 --- a/analytics/detectors/pattern_detection_model.py +++ b/analytics/detectors/pattern_detection_model.py @@ -1,7 +1,7 @@ from detectors.step_detector import StepDetector from detectors.peaks_detector import PeaksDetector -from data_provider import DataProvider +from grafana_data_provider import GrafanaDataProvider import logging from urllib.parse import urlparse @@ -47,7 +47,7 @@ class PatternDetectionModel: with open(target_filename, 'r') as file: target = json.load(file) - self.data_prov = DataProvider(datasource, target, dataset_filename) + self.data_prov = GrafanaDataProvider(datasource, target, dataset_filename) self.model = None self.__load_model(pattern_type) diff --git a/analytics/data_provider.py b/analytics/grafana_data_provider.py similarity index 99% rename from analytics/data_provider.py rename to analytics/grafana_data_provider.py index bbe4491..afd31e7 100644 --- a/analytics/data_provider.py +++ b/analytics/grafana_data_provider.py @@ -10,7 +10,7 @@ from config import HASTIC_API_KEY MS_IN_WEEK = 604800000 -class DataProvider: +class GrafanaDataProvider: chunk_size = 50000 def __init__(self, datasource, target, data_filename): diff --git a/analytics/server.py b/analytics/server.py index e14b271..059b5d4 100644 --- a/analytics/server.py +++ b/analytics/server.py @@ -3,17 +3,18 @@ import json import logging import sys import asyncio -import services.server_service +import services from analytic_unit_worker import AnalyticUnitWorker + root = logging.getLogger() logger = logging.getLogger('SERVER') worker = None server_service = None - +data_service = None root.setLevel(logging.DEBUG) @@ -24,7 +25,6 @@ ch.setFormatter(formatter) root.addHandler(ch) - async def handle_task(text): try: task = json.loads(text) @@ -44,12 +44,21 @@ async def handle_task(text): except Exception as e: logger.error("Exception: '%s'" % str(e)) +def init_services(): + logger.info("Starting services...") + logger.info("Server...") + server_service = services.ServerService(handle_task) + logger.info("Ok") + logger.info("Data service...") + data_service = services.DataService(server_service) + logger.info("Ok") + + return server_service, data_service + if __name__ == "__main__": loop = asyncio.get_event_loop() logger.info("Starting worker...") worker = AnalyticUnitWorker() logger.info("Ok") - logger.info("Starting server...") - server_service = services.server_service.ServerService(handle_task) - logger.info("Ok") + server_service, data_service = init_services() loop.run_until_complete(server_service.handle_loop()) diff --git a/analytics/services/__init__.py b/analytics/services/__init__.py new file mode 100644 index 0000000..5cf4c4c --- /dev/null +++ b/analytics/services/__init__.py @@ -0,0 +1,2 @@ +from services.server_service import ServerService +from services.data_service import DataService diff --git a/analytics/services/data_service.py b/analytics/services/data_service.py new file mode 100644 index 0000000..1310418 --- /dev/null +++ b/analytics/services/data_service.py @@ -0,0 +1,9 @@ +class DataService: + def __init__(self, server_service): + self.server_service = server_service + + async def safe_file(filename, content): + pass + + async def load_file(filename, content): + pass \ No newline at end of file