Compare commits

..

No commits in common. 'main' and 'env-vars-docs-#90' have entirely different histories.

  1. 27
      Dockerfile
  2. 1
      client/src/components/pods/pattern_pod.ts
  3. 16
      client/src/views/Home.vue
  4. 12
      docker-compose.yml
  5. 17
      server/config.example.toml
  6. 27
      server/src/config.rs
  7. 7
      server/src/main.rs
  8. 17
      server/src/services/analytic_service/analytic_service.rs
  9. 20
      server/src/services/analytic_service/detection_runner.rs
  10. 1
      server/src/services/analytic_service/types.rs
  11. 15
      server/src/services/analytic_unit_service.rs
  12. 23
      server/src/services/data_service.rs
  13. 1
      server/src/services/mod.rs
  14. 16
      server/src/services/segments_service.rs

27
Dockerfile

@ -1,27 +0,0 @@
FROM rust:1.57.0-bullseye as builder
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash -
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
nodejs \
gcc \
g++ \
make \
musl-tools \
&& rm -rf /var/lib/apt/lists/*
RUN npm install --global yarn
RUN rustup target add x86_64-unknown-linux-musl
ADD . ./
RUN make
FROM debian:bullseye-slim
COPY --from=builder /release/hastic /hastic
COPY --from=builder /release/config.toml /config.toml
COPY --from=builder /release/public /public
CMD ["./hastic"]

1
client/src/components/pods/pattern_pod.ts

@ -79,6 +79,7 @@ export class PatternPod extends HasticPod<UpdateDataCallback> {
} else {
console.log('took from range from default');
}
console.log(from + " ---- " + to);
this.udc({ from, to })
.then(resp => {

16
client/src/views/Home.vue

@ -16,7 +16,7 @@
<div id="controls">
<div v-if="analyticUnitType == analyticUnitTypes[0]">
Threshold:
<input v-model="analyticUnitConfig.threshold" @change="thresholdChange" /> <br/><br/>
<input :value="analyticUnitConfig.threshold" @change="thresholdChange" /> <br/><br/>
</div>
<div v-if="analyticUnitType == analyticUnitTypes[1]">
Hold <pre>S</pre> to label patterns;
@ -25,13 +25,13 @@
<br/>
<hr/>
Correlation score:
<input v-model="analyticUnitConfig.correlation_score" @change="correlationScoreChange" /> <br/>
<input :value="analyticUnitConfig.correlation_score" @change="correlationScoreChange" /> <br/>
Anti correlation score:
<input v-model="analyticUnitConfig.anti_correlation_score" @change="antiCorrelationScoreChange" /> <br/>
<input :value="analyticUnitConfig.anti_correlation_score" @change="antiCorrelationScoreChange" /> <br/>
Model score:
<input v-model="analyticUnitConfig.model_score" @change="modelScoreChange" /> <br/>
<input :value="analyticUnitConfig.model_score" @change="modelScoreChange" /> <br/>
Threshold score:
<input v-model="analyticUnitConfig.threshold_score" @change="thresholdScoreChange" /> <br/><br/>
<input :value="analyticUnitConfig.threshold_score" @change="thresholdScoreChange" /> <br/><br/>
<button @click="clearAllLabeling"> clear all labeling </button>
</div>
<div v-if="analyticUnitType == analyticUnitTypes[2]">
@ -40,11 +40,11 @@
<!-- Alpha:
<input :value="analyticUnitConfig.alpha" @change="alphaChange" /> <br/> -->
Confidence:
<input v-model="analyticUnitConfig.confidence" @change="confidenceChange" /> <br/>
<input :value="analyticUnitConfig.confidence" @change="confidenceChange" /> <br/>
Seasonality:
<input v-model="analyticUnitConfig.seasonality" @change="seasonalityChange" /> <br/>
<input :value="analyticUnitConfig.seasonality" @change="seasonalityChange" /> <br/>
Seasonality iterations:
<input v-model="analyticUnitConfig.seasonality_iterations" @change="seasonalityIterationsChange" /> <br/>
<input :value="analyticUnitConfig.seasonality_iterations" @change="seasonalityIterationsChange" /> <br/>
<br/>
</div>
</div>

12
docker-compose.yml

@ -1,12 +0,0 @@
version: '3'
services:
app:
image: hastic/hastic:latest
restart: always
environment:
HASTIC_PORT: "4347"
HASTIC_PROMETHEUS__URL: "http://demo.robustperception.io:9090"
HASTIC_PROMETHEUS__QUERY: "rate(go_memstats_alloc_bytes_total[1m])"
ports:
- "4347:4347"

17
server/config.example.toml

@ -1,10 +1,8 @@
port = 4347
# one of datasource sections (prometheus / influx) should be uncommented and edited corresponding to your environment
# [prometheus]
# url = "http://localhost:9090"
# query = "rate(go_memstats_alloc_bytes_total[5m])"
[prometheus]
url = "http://localhost:9090"
query = "rate(go_memstats_alloc_bytes_total[5m])"
# [influx]
@ -18,7 +16,8 @@ port = 4347
# |> yield(name: "mean")
# """
# [alerting]
# type = "webhook"
# interval = 10 # in seconds
# endpoint = "http://localhost:9092"
[alerting]
type = "webhook"
interval = 10 # in seconds
endpoint = "http://localhost:9092"

27
server/src/config.rs

@ -27,7 +27,6 @@ pub struct Config {
fn resolve_datasource(config: &config::Config) -> anyhow::Result<DatasourceConfig> {
if config.get::<String>("prometheus.url").is_ok() {
println!("using Prometheus");
return Ok(DatasourceConfig::Prometheus(PrometheusConfig {
url: config.get("prometheus.url")?,
query: config.get("prometheus.query")?,
@ -35,7 +34,6 @@ fn resolve_datasource(config: &config::Config) -> anyhow::Result<DatasourceConfi
}
if config.get::<String>("influx.url").is_ok() {
println!("using Influx");
return Ok(DatasourceConfig::Influx(InfluxConfig {
url: config.get("influx.url")?,
org_id: config.get("influx.org_id")?,
@ -44,7 +42,7 @@ fn resolve_datasource(config: &config::Config) -> anyhow::Result<DatasourceConfi
}));
}
return Err(anyhow::format_err!("please configure a datasource"));
return Err(anyhow::format_err!("no datasource found"));
}
fn resolve_alerting(config: &config::Config) -> anyhow::Result<Option<AlertingConfig>> {
@ -82,38 +80,24 @@ fn resolve_alerting(config: &config::Config) -> anyhow::Result<Option<AlertingCo
// config::Environment doesn't support nested configs, e.g. `alerting.type`,
// so I've copied this from:
// https://github.com/rust-lang/mdBook/blob/f3e5fce6bf5e290c713f4015947dc0f0ad172d20/src/config.rs#L132
// so that `__` can be used in env variables instead of `.`,
// so that `__` can be used in env variables instead of `.`,
// e.g. `HASTIC_ALERTING__TYPE` -> alerting.type
pub fn update_from_env(config: &mut config::Config) {
let overrides =
env::vars().filter_map(|(key, value)| parse_env(&key).map(|index| (index, value)));
for (key, value) in overrides {
println!("{} => {}", key, value);
config.set(&key, value).unwrap();
}
}
pub fn print_config(config: config::Config) {
// TODO: support any nesting level
let sections = config.to_owned().cache.into_table().unwrap();
for (section_name, values) in sections {
match values.clone().into_table() {
Err(_) => println!("{} => {}", section_name, values),
Ok(section) => {
for (key, value) in section {
println!("{}.{} => {}", section_name, key, value);
}
}
}
}
}
fn parse_env(key: &str) -> Option<String> {
const PREFIX: &str = "HASTIC_";
if key.starts_with(PREFIX) {
let key = &key[PREFIX.len()..];
Some(key.to_lowercase().replace("__", "."))
Some(key.to_lowercase().replace("__", ".").replace("_", "-"))
} else {
None
}
@ -133,8 +117,7 @@ impl Config {
config.set("port", 4347).unwrap();
}
print_config(config.clone());
// TODO: print resulted config (perfectly, it needs adding `derive(Debug)` in `subbeat`'s `DatasourceConfig`)
Ok(Config {
port: config.get::<u16>("port").unwrap(),
datasource_config: resolve_datasource(&config)?,

7
server/src/main.rs

@ -1,6 +1,6 @@
mod api;
use hastic::services::{analytic_service, analytic_unit_service, metric_service, segments_service, data_service};
use hastic::services::{analytic_service, analytic_unit_service, metric_service, segments_service};
use anyhow;
@ -9,10 +9,9 @@ async fn main() -> anyhow::Result<()> {
let config = hastic::config::Config::new()?;
let cfg_clone = config.clone();
let data_service = data_service::DataService::new()?;
let analytic_unit_service = analytic_unit_service::AnalyticUnitService::new(&data_service)?;
let analytic_unit_service = analytic_unit_service::AnalyticUnitService::new()?;
let metric_service = metric_service::MetricService::new(&config.datasource_config);
let segments_service = segments_service::SegmentsService::new(&data_service)?;
let segments_service = segments_service::SegmentsService::new()?;
let mut analytic_service = analytic_service::AnalyticService::new(
analytic_unit_service.clone(),

17
server/src/services/analytic_service/analytic_service.rs

@ -137,7 +137,7 @@ impl AnalyticService {
};
let tx = self.tx.clone();
let au = self.analytic_unit.as_ref().unwrap().clone();
let dr = DetectionRunner::new(tx,self.metric_service.clone(), drcfg, au);
let dr = DetectionRunner::new(self.metric_service.clone(), tx, drcfg, au);
self.detection_runner = Some(dr);
self.detection_runner.as_mut().unwrap().run(from);
@ -245,9 +245,6 @@ impl AnalyticService {
.set_last_detection(id, timestamp)
.unwrap();
}
ResponseType::DetectionRunnerDetection(from, to) => {
println!("detection: {} {}", from, to);
}
ResponseType::LearningStarted => {
self.analytic_unit_learning_status = LearningStatus::Learning
}
@ -302,9 +299,8 @@ impl AnalyticService {
self.consume_request(RequestType::RunLearning);
match tx.send(()) {
Ok(_) => {}
Err(e) => {
Err(_e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
return;
@ -316,9 +312,8 @@ impl AnalyticService {
au.unwrap().write().await.set_config(cfg);
match tx.send(()) {
Ok(_) => {}
Err(e) => {
Err(_e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
}
@ -328,9 +323,8 @@ impl AnalyticService {
// TODO: check if we need this else
match tx.send(()) {
Ok(_) => {}
Err(e) => {
Err(_e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
}
@ -343,9 +337,8 @@ impl AnalyticService {
self.consume_request(RequestType::RunLearning);
match tx.send(()) {
Ok(_) => {}
Err(e) => {
Err(_e) => {
println!("Can`t send patch config notification");
println!("{:?}", e);
}
}
}

20
server/src/services/analytic_service/detection_runner.rs

@ -9,8 +9,8 @@ use tokio::time::{sleep, Duration};
pub struct DetectionRunner {
tx: mpsc::Sender<AnalyticServiceMessage>,
metric_service: MetricService,
tx: mpsc::Sender<AnalyticServiceMessage>,
config: DetectionRunnerConfig,
analytic_unit: AnalyticUnitRF,
running_handler: Option<tokio::task::JoinHandle<()>>,
@ -18,8 +18,8 @@ pub struct DetectionRunner {
impl DetectionRunner {
pub fn new(
tx: mpsc::Sender<AnalyticServiceMessage>,
metric_service: MetricService,
tx: mpsc::Sender<AnalyticServiceMessage>,
config: DetectionRunnerConfig,
analytic_unit: AnalyticUnitRF,
) -> DetectionRunner {
@ -33,6 +33,7 @@ impl DetectionRunner {
}
pub fn run(&mut self, from: u64) {
// TODO: get last detection timestamp from persistance
// TODO: set last detection from "now"
if self.running_handler.is_some() {
self.running_handler.as_mut().unwrap().abort();
@ -45,9 +46,8 @@ impl DetectionRunner {
async move {
// TODO: run detection "from" for big timespan
// TODO: parse detections to webhooks
// TODO: define window for detection
// TODO: handle case when detection is in the end and continues after "now"
// it's better to make an issue on github
// TODO: find place to update analytic unit model
let window_size = au.as_ref().read().await.get_detection_window();
let detection_step = ms.get_detection_step();
@ -69,18 +69,10 @@ impl DetectionRunner {
let detections = a.detect(ms.clone(), t_from, t_to).await.unwrap();
for d in detections {
match tx
.send(AnalyticServiceMessage::Response(Ok(
ResponseType::DetectionRunnerDetection(d.0, d.1),
)))
.await
{
Ok(_) => {}
Err(_e) => println!("Fail to send detection runner detection notification"),
}
println!("detection: {} {}", d.0, d.1);
}
// TODO: send info about detections to tx
// TODO: set info about detections to tx
match tx
.send(AnalyticServiceMessage::Response(Ok(

1
server/src/services/analytic_service/types.rs

@ -45,7 +45,6 @@ impl Default for LearningTrain {
pub enum ResponseType {
DetectionRunnerStarted(u64),
DetectionRunnerUpdate(String, u64), // analytic_unit id and timestamp
DetectionRunnerDetection(u64, u64), // TODO: add more into about analytic unit and more
LearningStarted,
LearningFinished(Box<dyn AnalyticUnit + Send + Sync>),
LearningFinishedEmpty,

15
server/src/services/analytic_unit_service.rs

@ -1,3 +1,5 @@
use serde::{Deserialize, Serialize};
use serde_json::{Result, Value};
use std::sync::{Arc, Mutex};
use rusqlite::{params, Connection};
@ -9,20 +11,19 @@ use super::analytic_service::analytic_unit::{
types::{self, AnalyticUnitConfig},
};
use super::data_service::DataService;
#[derive(Clone)]
pub struct AnalyticUnitService {
connection: Arc<Mutex<Connection>>,
}
// TODO: get DataService
impl AnalyticUnitService {
pub fn new(ds: &DataService) -> anyhow::Result<AnalyticUnitService> {
let conn = ds.analytic_units_connection.clone();
pub fn new() -> anyhow::Result<AnalyticUnitService> {
// TODO: remove repetitoin with segment_service
std::fs::create_dir_all("./data").unwrap();
let conn = Connection::open("./data/analytic_units.db")?;
// TODO: add learning results field
conn.lock().unwrap().execute(
conn.execute(
"CREATE TABLE IF NOT EXISTS analytic_unit (
id TEXT PRIMARY KEY,
last_detection INTEGER,
@ -34,7 +35,7 @@ impl AnalyticUnitService {
)?;
Ok(AnalyticUnitService {
connection: conn
connection: Arc::new(Mutex::new(conn)),
})
}

23
server/src/services/data_service.rs

@ -1,23 +0,0 @@
use std::sync::{Arc, Mutex};
use rusqlite::{Connection};
pub struct DataService {
pub analytic_units_connection: Arc<Mutex<Connection>>,
pub segments_connection: Arc<Mutex<Connection>>
}
impl DataService {
pub fn new() -> anyhow::Result<DataService> {
std::fs::create_dir_all("./data").unwrap();
let analytic_units_connection = Connection::open("./data/analytic_units.db")?;
let segments_connection = Connection::open("./data/segments.db")?;
Ok(DataService {
analytic_units_connection: Arc::new(Mutex::new(analytic_units_connection)),
segments_connection: Arc::new(Mutex::new(segments_connection))
})
}
}

1
server/src/services/mod.rs

@ -1,5 +1,4 @@
pub mod analytic_service;
pub mod data_service;
pub mod analytic_unit_service;
pub mod metric_service;
pub mod segments_service;

16
server/src/services/segments_service.rs

@ -6,13 +6,9 @@ use serde::{Deserialize, Serialize};
use std::sync::{Arc, Mutex};
use super::data_service::DataService;
pub const ID_LENGTH: usize = 20;
pub type SegmentId = String;
// TODO: make logic with this enum shorter
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)]
pub enum SegmentType {
@ -62,19 +58,19 @@ impl Segment {
}
}
// TODO: get DataService
#[derive(Clone)]
pub struct SegmentsService {
connection: Arc<Mutex<Connection>>,
}
impl SegmentsService {
pub fn new(ds: &DataService) -> anyhow::Result<SegmentsService> {
pub fn new() -> anyhow::Result<SegmentsService> {
// TODO: move it to data service
std::fs::create_dir_all("./data").unwrap();
// TODO: add unilytic_unit id as a new field
let conn = ds.segments_connection.clone();
conn.lock().unwrap().execute(
let conn = Connection::open("./data/segments.db")?;
conn.execute(
"CREATE TABLE IF NOT EXISTS segment (
id TEXT PRIMARY KEY,
start INTEGER NOT NULL,
@ -85,7 +81,7 @@ impl SegmentsService {
)?;
Ok(SegmentsService {
connection: conn
connection: Arc::new(Mutex::new(conn)),
})
}

Loading…
Cancel
Save