Compare commits

...

15 Commits

  1. 1
      client/src/components/pods/pattern_pod.ts
  2. 1
      client/src/views/Home.vue
  3. 1
      docker-compose.yml
  4. 17
      server/config.example.toml
  5. 27
      server/src/config.rs
  6. 7
      server/src/main.rs
  7. 15
      server/src/services/analytic_service/analytic_service.rs
  8. 10
      server/src/services/analytic_service/detection_runner.rs
  9. 1
      server/src/services/analytic_service/types.rs
  10. 15
      server/src/services/analytic_unit_service.rs
  11. 23
      server/src/services/data_service.rs
  12. 1
      server/src/services/mod.rs
  13. 16
      server/src/services/segments_service.rs

1
client/src/components/pods/pattern_pod.ts

@ -79,7 +79,6 @@ export class PatternPod extends HasticPod<UpdateDataCallback> {
} else {
console.log('took from range from default');
}
console.log(from + " ---- " + to);
this.udc({ from, to })
.then(resp => {

1
client/src/views/Home.vue

@ -86,7 +86,6 @@ export default defineComponent({
// Pattern
correlationScoreChange(e) {
console.log('change correlationScoreChange');
let cfg = _.clone(this.analyticUnitConfig);
cfg.correlation_score = parseFloat(e.target.value);
this.$store.dispatch('patchConfig', { Pattern: cfg });

1
docker-compose.yml

@ -3,7 +3,6 @@ version: '3'
services:
app:
image: hastic/hastic:latest
network_mode: host
restart: always
environment:
HASTIC_PORT: "4347"

17
server/config.example.toml

@ -1,8 +1,10 @@
port = 4347
[prometheus]
url = "http://localhost:9090"
query = "rate(go_memstats_alloc_bytes_total[5m])"
# one of datasource sections (prometheus / influx) should be uncommented and edited corresponding to your environment
# [prometheus]
# url = "http://localhost:9090"
# query = "rate(go_memstats_alloc_bytes_total[5m])"
# [influx]
@ -16,8 +18,7 @@ query = "rate(go_memstats_alloc_bytes_total[5m])"
# |> yield(name: "mean")
# """
[alerting]
type = "webhook"
interval = 10 # in seconds
endpoint = "http://localhost:9092"
# [alerting]
# type = "webhook"
# interval = 10 # in seconds
# endpoint = "http://localhost:9092"

27
server/src/config.rs

@ -27,6 +27,7 @@ pub struct Config {
fn resolve_datasource(config: &config::Config) -> anyhow::Result<DatasourceConfig> {
if config.get::<String>("prometheus.url").is_ok() {
println!("using Prometheus");
return Ok(DatasourceConfig::Prometheus(PrometheusConfig {
url: config.get("prometheus.url")?,
query: config.get("prometheus.query")?,
@ -34,6 +35,7 @@ fn resolve_datasource(config: &config::Config) -> anyhow::Result<DatasourceConfi
}
if config.get::<String>("influx.url").is_ok() {
println!("using Influx");
return Ok(DatasourceConfig::Influx(InfluxConfig {
url: config.get("influx.url")?,
org_id: config.get("influx.org_id")?,
@ -42,7 +44,7 @@ fn resolve_datasource(config: &config::Config) -> anyhow::Result<DatasourceConfi
}));
}
return Err(anyhow::format_err!("no datasource found"));
return Err(anyhow::format_err!("please configure a datasource"));
}
fn resolve_alerting(config: &config::Config) -> anyhow::Result<Option<AlertingConfig>> {
@ -80,24 +82,38 @@ fn resolve_alerting(config: &config::Config) -> anyhow::Result<Option<AlertingCo
// config::Environment doesn't support nested configs, e.g. `alerting.type`,
// so I've copied this from:
// https://github.com/rust-lang/mdBook/blob/f3e5fce6bf5e290c713f4015947dc0f0ad172d20/src/config.rs#L132
// so that `__` can be used in env variables instead of `.`,
// so that `__` can be used in env variables instead of `.`,
// e.g. `HASTIC_ALERTING__TYPE` -> alerting.type
pub fn update_from_env(config: &mut config::Config) {
let overrides =
env::vars().filter_map(|(key, value)| parse_env(&key).map(|index| (index, value)));
for (key, value) in overrides {
println!("{} => {}", key, value);
config.set(&key, value).unwrap();
}
}
pub fn print_config(config: config::Config) {
// TODO: support any nesting level
let sections = config.to_owned().cache.into_table().unwrap();
for (section_name, values) in sections {
match values.clone().into_table() {
Err(_) => println!("{} => {}", section_name, values),
Ok(section) => {
for (key, value) in section {
println!("{}.{} => {}", section_name, key, value);
}
}
}
}
}
fn parse_env(key: &str) -> Option<String> {
const PREFIX: &str = "HASTIC_";
if key.starts_with(PREFIX) {
let key = &key[PREFIX.len()..];
Some(key.to_lowercase().replace("__", ".").replace("_", "-"))
Some(key.to_lowercase().replace("__", "."))
} else {
None
}
@ -117,7 +133,8 @@ impl Config {
config.set("port", 4347).unwrap();
}
// TODO: print resulted config (perfectly, it needs adding `derive(Debug)` in `subbeat`'s `DatasourceConfig`)
print_config(config.clone());
Ok(Config {
port: config.get::<u16>("port").unwrap(),
datasource_config: resolve_datasource(&config)?,

7
server/src/main.rs

@ -1,6 +1,6 @@
mod api;
use hastic::services::{analytic_service, analytic_unit_service, metric_service, segments_service};
use hastic::services::{analytic_service, analytic_unit_service, metric_service, segments_service, data_service};
use anyhow;
@ -9,9 +9,10 @@ async fn main() -> anyhow::Result<()> {
let config = hastic::config::Config::new()?;
let cfg_clone = config.clone();
let analytic_unit_service = analytic_unit_service::AnalyticUnitService::new()?;
let data_service = data_service::DataService::new()?;
let analytic_unit_service = analytic_unit_service::AnalyticUnitService::new(&data_service)?;
let metric_service = metric_service::MetricService::new(&config.datasource_config);
let segments_service = segments_service::SegmentsService::new()?;
let segments_service = segments_service::SegmentsService::new(&data_service)?;
let mut analytic_service = analytic_service::AnalyticService::new(
analytic_unit_service.clone(),

15
server/src/services/analytic_service/analytic_service.rs

@ -245,6 +245,9 @@ impl AnalyticService {
.set_last_detection(id, timestamp)
.unwrap();
}
ResponseType::DetectionRunnerDetection(from, to) => {
println!("detection: {} {}", from, to);
}
ResponseType::LearningStarted => {
self.analytic_unit_learning_status = LearningStatus::Learning
}
@ -299,8 +302,9 @@ impl AnalyticService {
self.consume_request(RequestType::RunLearning);
match tx.send(()) {
Ok(_) => {}
Err(_e) => {
Err(e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
return;
@ -312,8 +316,9 @@ impl AnalyticService {
au.unwrap().write().await.set_config(cfg);
match tx.send(()) {
Ok(_) => {}
Err(_e) => {
Err(e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
}
@ -323,8 +328,9 @@ impl AnalyticService {
// TODO: check if we need this else
match tx.send(()) {
Ok(_) => {}
Err(_e) => {
Err(e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
}
@ -337,8 +343,9 @@ impl AnalyticService {
self.consume_request(RequestType::RunLearning);
match tx.send(()) {
Ok(_) => {}
Err(_e) => {
Err(e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
}

10
server/src/services/analytic_service/detection_runner.rs

@ -69,7 +69,15 @@ impl DetectionRunner {
let detections = a.detect(ms.clone(), t_from, t_to).await.unwrap();
for d in detections {
println!("detection: {} {}", d.0, d.1);
match tx
.send(AnalyticServiceMessage::Response(Ok(
ResponseType::DetectionRunnerDetection(d.0, d.1),
)))
.await
{
Ok(_) => {}
Err(_e) => println!("Fail to send detection runner detection notification"),
}
}
// TODO: send info about detections to tx

1
server/src/services/analytic_service/types.rs

@ -45,6 +45,7 @@ impl Default for LearningTrain {
pub enum ResponseType {
DetectionRunnerStarted(u64),
DetectionRunnerUpdate(String, u64), // analytic_unit id and timestamp
DetectionRunnerDetection(u64, u64), // TODO: add more into about analytic unit and more
LearningStarted,
LearningFinished(Box<dyn AnalyticUnit + Send + Sync>),
LearningFinishedEmpty,

15
server/src/services/analytic_unit_service.rs

@ -1,5 +1,3 @@
use serde::{Deserialize, Serialize};
use serde_json::{Result, Value};
use std::sync::{Arc, Mutex};
use rusqlite::{params, Connection};
@ -11,19 +9,20 @@ use super::analytic_service::analytic_unit::{
types::{self, AnalyticUnitConfig},
};
use super::data_service::DataService;
#[derive(Clone)]
pub struct AnalyticUnitService {
connection: Arc<Mutex<Connection>>,
}
// TODO: get DataService
impl AnalyticUnitService {
pub fn new() -> anyhow::Result<AnalyticUnitService> {
// TODO: remove repetitoin with segment_service
std::fs::create_dir_all("./data").unwrap();
let conn = Connection::open("./data/analytic_units.db")?;
pub fn new(ds: &DataService) -> anyhow::Result<AnalyticUnitService> {
let conn = ds.analytic_units_connection.clone();
// TODO: add learning results field
conn.execute(
conn.lock().unwrap().execute(
"CREATE TABLE IF NOT EXISTS analytic_unit (
id TEXT PRIMARY KEY,
last_detection INTEGER,
@ -35,7 +34,7 @@ impl AnalyticUnitService {
)?;
Ok(AnalyticUnitService {
connection: Arc::new(Mutex::new(conn)),
connection: conn
})
}

23
server/src/services/data_service.rs

@ -0,0 +1,23 @@
use std::sync::{Arc, Mutex};
use rusqlite::{Connection};
pub struct DataService {
pub analytic_units_connection: Arc<Mutex<Connection>>,
pub segments_connection: Arc<Mutex<Connection>>
}
impl DataService {
pub fn new() -> anyhow::Result<DataService> {
std::fs::create_dir_all("./data").unwrap();
let analytic_units_connection = Connection::open("./data/analytic_units.db")?;
let segments_connection = Connection::open("./data/segments.db")?;
Ok(DataService {
analytic_units_connection: Arc::new(Mutex::new(analytic_units_connection)),
segments_connection: Arc::new(Mutex::new(segments_connection))
})
}
}

1
server/src/services/mod.rs

@ -1,4 +1,5 @@
pub mod analytic_service;
pub mod data_service;
pub mod analytic_unit_service;
pub mod metric_service;
pub mod segments_service;

16
server/src/services/segments_service.rs

@ -6,9 +6,13 @@ use serde::{Deserialize, Serialize};
use std::sync::{Arc, Mutex};
use super::data_service::DataService;
pub const ID_LENGTH: usize = 20;
pub type SegmentId = String;
// TODO: make logic with this enum shorter
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)]
pub enum SegmentType {
@ -58,19 +62,19 @@ impl Segment {
}
}
// TODO: get DataService
#[derive(Clone)]
pub struct SegmentsService {
connection: Arc<Mutex<Connection>>,
}
impl SegmentsService {
pub fn new() -> anyhow::Result<SegmentsService> {
// TODO: move it to data service
std::fs::create_dir_all("./data").unwrap();
pub fn new(ds: &DataService) -> anyhow::Result<SegmentsService> {
// TODO: add unilytic_unit id as a new field
let conn = Connection::open("./data/segments.db")?;
conn.execute(
let conn = ds.segments_connection.clone();
conn.lock().unwrap().execute(
"CREATE TABLE IF NOT EXISTS segment (
id TEXT PRIMARY KEY,
start INTEGER NOT NULL,
@ -81,7 +85,7 @@ impl SegmentsService {
)?;
Ok(SegmentsService {
connection: Arc::new(Mutex::new(conn)),
connection: conn
})
}

Loading…
Cancel
Save