feat: launch from configuration via env vars only
This commit is contained in:
parent
fa31c4fac0
commit
c1c55ec7f3
4 changed files with 115 additions and 11 deletions
20
Makefile
20
Makefile
|
@ -41,6 +41,26 @@ default: ## Debug build
|
||||||
$(call cache_bust)
|
$(call cache_bust)
|
||||||
cargo build
|
cargo build
|
||||||
|
|
||||||
|
env.serve:
|
||||||
|
STARCHART__LOG=info \
|
||||||
|
STARCHART__SOURCE_CODE="https://github.com/forgeflux-org/starchart" \
|
||||||
|
STARCHART__ALLOW_NEW_INDEX=true \
|
||||||
|
STARCHART__ADMIN_EMAIL=realaravinth@batsense.net \
|
||||||
|
STARCHART__SERVER__IP=0.0.0.0 \
|
||||||
|
STARCHART__SERVER__PORT=7000 \
|
||||||
|
STARCHART__SERVER__DOMAIN=localhost \
|
||||||
|
STARCHART__SERVER__PROXY_HAS_TLS=false \
|
||||||
|
STARCHART__SERVER__COOKIE_SECRET=7514316e58bfdb2eb2d71bf4af40827a \
|
||||||
|
STARCHART__DATABASE__POOL=5 STARCHART__DATABASE__TYPE=sqlite \
|
||||||
|
STARCHART__CRAWLER__TTL=3600 \
|
||||||
|
STARCHART__CRAWLER__WAIT_BEFORE_NEXT_API_CALL=2 \
|
||||||
|
STARCHART__CRAWLER__CLIENT_TIMEOUT=60 \
|
||||||
|
STARCHART__CRAWLER__ITEMS_PER_API_CALL=20 \
|
||||||
|
STARCHART__INTRODUCER__PUBLIC_URL="http://localhost:7000" \
|
||||||
|
STARCHART__INTRODUCER__NODES=http://localhost:7001,http://localhost:7002 \
|
||||||
|
STARCHART__REPOSITORY__ROOT=/tmp/starchart.forgeflux.org \
|
||||||
|
cargo run
|
||||||
|
|
||||||
cache-bust: ## Run cache buster on static assets
|
cache-bust: ## Run cache buster on static assets
|
||||||
$(call cache_bust)
|
$(call cache_bust)
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ use db_core::prelude::*;
|
||||||
|
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::pages::chart::home::{OptionalPage, Page};
|
use crate::pages::chart::home::{OptionalPage, Page};
|
||||||
use crate::{errors::*, WebDB};
|
use crate::{errors::*, WebCtx, WebDB};
|
||||||
|
|
||||||
const LIMIT: u32 = 50;
|
const LIMIT: u32 = 50;
|
||||||
|
|
||||||
|
@ -141,11 +141,11 @@ impl Ctx {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn spawn_bootstrap(
|
pub async fn spawn_bootstrap(
|
||||||
self,
|
ctx: WebCtx,
|
||||||
db: Box<dyn SCDatabase>,
|
db: Box<dyn SCDatabase>,
|
||||||
) -> ServiceResult<(Sender<bool>, impl Future)> {
|
) -> ServiceResult<(Sender<bool>, impl Future)> {
|
||||||
let (tx, mut rx) = oneshot::channel();
|
let (tx, mut rx) = oneshot::channel();
|
||||||
let fut = async {
|
let fut = async move {
|
||||||
loop {
|
loop {
|
||||||
let shutdown = match rx.try_recv() {
|
let shutdown = match rx.try_recv() {
|
||||||
// The channel is currently empty
|
// The channel is currently empty
|
||||||
|
@ -163,7 +163,7 @@ impl Ctx {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = self.bootstrap(db).await;
|
let _ = ctx.bootstrap(&db).await;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
10
src/main.rs
10
src/main.rs
|
@ -86,11 +86,11 @@ async fn main() {
|
||||||
|
|
||||||
let crawler_fut = tokio::spawn(spider::Crawler::start(crawler.clone()));
|
let crawler_fut = tokio::spawn(spider::Crawler::start(crawler.clone()));
|
||||||
let ctx = WebCtx::new(ctx);
|
let ctx = WebCtx::new(ctx);
|
||||||
let (kill_introducer, introducer_fut) = ctx
|
|
||||||
.clone()
|
let c = ctx.clone();
|
||||||
.spawn_bootstrap(db.as_ref().clone())
|
let d = db.clone();
|
||||||
.await
|
let (kill_introducer, introducer_fut) =
|
||||||
.unwrap();
|
Ctx::spawn_bootstrap(c, d.as_ref().clone()).await.unwrap();
|
||||||
|
|
||||||
let c = ctx.clone();
|
let c = ctx.clone();
|
||||||
let d = db.clone();
|
let d = db.clone();
|
||||||
|
|
|
@ -18,13 +18,17 @@
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::{env, fs};
|
use std::{env, fs};
|
||||||
|
|
||||||
use config::{Config, ConfigError, Environment, File};
|
use config::{builder::DefaultState, Config, ConfigBuilder, ConfigError, Environment, File};
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
|
use log::info;
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
|
const PREFIX: &str = "STARCHART";
|
||||||
|
const SEPARATOR: &str = "__";
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
pub struct Server {
|
pub struct Server {
|
||||||
pub port: u32,
|
pub port: u32,
|
||||||
|
@ -193,7 +197,14 @@ impl Settings {
|
||||||
log::warn!("configuration file not found");
|
log::warn!("configuration file not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
s = s.add_source(Environment::with_prefix("STARCHART").separator("__"));
|
s = s.add_source(
|
||||||
|
Environment::with_prefix(PREFIX)
|
||||||
|
.separator(SEPARATOR)
|
||||||
|
.list_separator(",")
|
||||||
|
.try_parsing(true)
|
||||||
|
.with_list_parse_key("introducer.nodes"),
|
||||||
|
);
|
||||||
|
s = set_separator_field(s);
|
||||||
|
|
||||||
match env::var("PORT") {
|
match env::var("PORT") {
|
||||||
Ok(val) => s = s.set_override("server.port", val).unwrap(),
|
Ok(val) => s = s.set_override("server.port", val).unwrap(),
|
||||||
|
@ -227,6 +238,79 @@ impl Settings {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(tarpaulin_include))]
|
||||||
|
fn set_separator_field(mut s: ConfigBuilder<DefaultState>) -> ConfigBuilder<DefaultState> {
|
||||||
|
// ref: https://github.com/mehcode/config-rs/issues/391
|
||||||
|
|
||||||
|
fn from_env(
|
||||||
|
s: ConfigBuilder<DefaultState>,
|
||||||
|
env_name: &str,
|
||||||
|
config_name: &str,
|
||||||
|
) -> ConfigBuilder<DefaultState> {
|
||||||
|
if let Ok(val) = env::var(env_name) {
|
||||||
|
info!("Overriding {config_name} with data from env var {env_name}");
|
||||||
|
s.set_override(config_name, val)
|
||||||
|
.unwrap_or_else(|_| panic!("Couldn't set {config_name} from env var {env_name}"))
|
||||||
|
} else {
|
||||||
|
s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s = from_env(s, &format!("{PREFIX}{SEPARATOR}SOURCE_CODE"), "source_code");
|
||||||
|
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}ALLOW_NEW_INDEX"),
|
||||||
|
"allow_new_index",
|
||||||
|
);
|
||||||
|
s = from_env(s, &format!("{PREFIX}{SEPARATOR}ADMIN_EMAIL"), "admin_email");
|
||||||
|
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}DATABASE{SEPARATOR}TYPE"),
|
||||||
|
"database.database_type",
|
||||||
|
);
|
||||||
|
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}SERVER{SEPARATOR}PROXY_HAS_TLS"),
|
||||||
|
"server.proxy_has_tls",
|
||||||
|
);
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}SERVER{SEPARATOR}COOKIE_SECRET"),
|
||||||
|
"server.cookie_secret",
|
||||||
|
);
|
||||||
|
|
||||||
|
// s = from_env(
|
||||||
|
// s,
|
||||||
|
// &format!("{PREFIX}{SEPARATOR}INTRODUCER{SEPARATOR}NODES"),
|
||||||
|
// "introducer.nodes",
|
||||||
|
// );
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}INTRODUCER{SEPARATOR}PUBLIC_URL"),
|
||||||
|
"introducer.public_url",
|
||||||
|
);
|
||||||
|
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}ITEMS_PER_API_CALL"),
|
||||||
|
"crawler.items_per_api_call",
|
||||||
|
);
|
||||||
|
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}CLIENT_TIMEOUT"),
|
||||||
|
"crawler.client_timeout",
|
||||||
|
);
|
||||||
|
s = from_env(
|
||||||
|
s,
|
||||||
|
&format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}WAIT_BEFORE_NEXT_API_CALL"),
|
||||||
|
"crawler.wait_before_next_api_call",
|
||||||
|
);
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
Loading…
Add table
Reference in a new issue