diff --git a/Makefile b/Makefile index 581f891..7290d06 100644 --- a/Makefile +++ b/Makefile @@ -41,6 +41,26 @@ default: ## Debug build $(call cache_bust) cargo build +env.serve: + STARCHART__LOG=info \ + STARCHART__SOURCE_CODE="https://github.com/forgeflux-org/starchart" \ + STARCHART__ALLOW_NEW_INDEX=true \ + STARCHART__ADMIN_EMAIL=realaravinth@batsense.net \ + STARCHART__SERVER__IP=0.0.0.0 \ + STARCHART__SERVER__PORT=7000 \ + STARCHART__SERVER__DOMAIN=localhost \ + STARCHART__SERVER__PROXY_HAS_TLS=false \ + STARCHART__SERVER__COOKIE_SECRET=7514316e58bfdb2eb2d71bf4af40827a \ + STARCHART__DATABASE__POOL=5 STARCHART__DATABASE__TYPE=sqlite \ + STARCHART__CRAWLER__TTL=3600 \ + STARCHART__CRAWLER__WAIT_BEFORE_NEXT_API_CALL=2 \ + STARCHART__CRAWLER__CLIENT_TIMEOUT=60 \ + STARCHART__CRAWLER__ITEMS_PER_API_CALL=20 \ + STARCHART__INTRODUCER__PUBLIC_URL="http://localhost:7000" \ + STARCHART__INTRODUCER__NODES=http://localhost:7001,http://localhost:7002 \ + STARCHART__REPOSITORY__ROOT=/tmp/starchart.forgeflux.org \ + cargo run + cache-bust: ## Run cache buster on static assets $(call cache_bust) diff --git a/src/introduce.rs b/src/introduce.rs index 58a0d85..dfa446a 100644 --- a/src/introduce.rs +++ b/src/introduce.rs @@ -30,7 +30,7 @@ use db_core::prelude::*; use crate::ctx::Ctx; use crate::pages::chart::home::{OptionalPage, Page}; -use crate::{errors::*, WebDB}; +use crate::{errors::*, WebCtx, WebDB}; const LIMIT: u32 = 50; @@ -141,11 +141,11 @@ impl Ctx { } pub async fn spawn_bootstrap( - self, + ctx: WebCtx, db: Box, ) -> ServiceResult<(Sender, impl Future)> { let (tx, mut rx) = oneshot::channel(); - let fut = async { + let fut = async move { loop { let shutdown = match rx.try_recv() { // The channel is currently empty @@ -163,7 +163,7 @@ impl Ctx { break; } - let _ = self.bootstrap(db).await; + let _ = ctx.bootstrap(&db).await; } }; diff --git a/src/main.rs b/src/main.rs index 7ecd651..45a8975 100644 --- a/src/main.rs +++ b/src/main.rs @@ -86,11 +86,11 @@ async fn main() { let crawler_fut = tokio::spawn(spider::Crawler::start(crawler.clone())); let ctx = WebCtx::new(ctx); - let (kill_introducer, introducer_fut) = ctx - .clone() - .spawn_bootstrap(db.as_ref().clone()) - .await - .unwrap(); + + let c = ctx.clone(); + let d = db.clone(); + let (kill_introducer, introducer_fut) = + Ctx::spawn_bootstrap(c, d.as_ref().clone()).await.unwrap(); let c = ctx.clone(); let d = db.clone(); diff --git a/src/settings.rs b/src/settings.rs index 3616832..9367fbe 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -18,13 +18,17 @@ use std::path::Path; use std::{env, fs}; -use config::{Config, ConfigError, Environment, File}; +use config::{builder::DefaultState, Config, ConfigBuilder, ConfigError, Environment, File}; use derive_more::Display; +use log::info; use log::warn; use serde::{Deserialize, Serialize}; use url::Url; use validator::Validate; +const PREFIX: &str = "STARCHART"; +const SEPARATOR: &str = "__"; + #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Server { pub port: u32, @@ -193,7 +197,14 @@ impl Settings { log::warn!("configuration file not found"); } - s = s.add_source(Environment::with_prefix("STARCHART").separator("__")); + s = s.add_source( + Environment::with_prefix(PREFIX) + .separator(SEPARATOR) + .list_separator(",") + .try_parsing(true) + .with_list_parse_key("introducer.nodes"), + ); + s = set_separator_field(s); match env::var("PORT") { Ok(val) => s = s.set_override("server.port", val).unwrap(), @@ -227,6 +238,79 @@ impl Settings { } } +#[cfg(not(tarpaulin_include))] +fn set_separator_field(mut s: ConfigBuilder) -> ConfigBuilder { + // ref: https://github.com/mehcode/config-rs/issues/391 + + fn from_env( + s: ConfigBuilder, + env_name: &str, + config_name: &str, + ) -> ConfigBuilder { + if let Ok(val) = env::var(env_name) { + info!("Overriding {config_name} with data from env var {env_name}"); + s.set_override(config_name, val) + .unwrap_or_else(|_| panic!("Couldn't set {config_name} from env var {env_name}")) + } else { + s + } + } + s = from_env(s, &format!("{PREFIX}{SEPARATOR}SOURCE_CODE"), "source_code"); + + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}ALLOW_NEW_INDEX"), + "allow_new_index", + ); + s = from_env(s, &format!("{PREFIX}{SEPARATOR}ADMIN_EMAIL"), "admin_email"); + + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}DATABASE{SEPARATOR}TYPE"), + "database.database_type", + ); + + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}SERVER{SEPARATOR}PROXY_HAS_TLS"), + "server.proxy_has_tls", + ); + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}SERVER{SEPARATOR}COOKIE_SECRET"), + "server.cookie_secret", + ); + + // s = from_env( + // s, + // &format!("{PREFIX}{SEPARATOR}INTRODUCER{SEPARATOR}NODES"), + // "introducer.nodes", + // ); + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}INTRODUCER{SEPARATOR}PUBLIC_URL"), + "introducer.public_url", + ); + + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}ITEMS_PER_API_CALL"), + "crawler.items_per_api_call", + ); + + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}CLIENT_TIMEOUT"), + "crawler.client_timeout", + ); + s = from_env( + s, + &format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}WAIT_BEFORE_NEXT_API_CALL"), + "crawler.wait_before_next_api_call", + ); + s +} + #[cfg(test)] mod tests { use super::*;