feat: init and load db
This commit is contained in:
parent
086c4f5911
commit
8150aa9ca1
6 changed files with 234 additions and 8 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
debug = true
|
||||||
# source code of your copy of pages server.
|
# source code of your copy of pages server.
|
||||||
source_code = "https://github.com/realaravinth/pages"
|
source_code = "https://github.com/realaravinth/pages"
|
||||||
|
|
||||||
|
@ -20,3 +21,21 @@ ip= "0.0.0.0"
|
||||||
# with one also.
|
# with one also.
|
||||||
workers = 2
|
workers = 2
|
||||||
domain = "demo.librepages.org"
|
domain = "demo.librepages.org"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# This section deals with the database location and how to access it
|
||||||
|
# Please note that at the moment, we have support for only postgresqa.
|
||||||
|
# Example, if you are Batman, your config would be:
|
||||||
|
# hostname = "batcave.org"
|
||||||
|
# port = "5432"
|
||||||
|
# username = "batman"
|
||||||
|
# password = "somereallycomplicatedBatmanpassword"
|
||||||
|
hostname = "localhost"
|
||||||
|
port = "5432"
|
||||||
|
username = "postgres"
|
||||||
|
password = "password"
|
||||||
|
name = "postgres"
|
||||||
|
pool = 4
|
||||||
|
database_type="postgres" # "postgres"
|
||||||
|
|
|
@ -16,15 +16,20 @@
|
||||||
*/
|
*/
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::db::*;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
|
|
||||||
|
pub type ArcCtx = Arc<Ctx>;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Ctx {
|
pub struct Ctx {
|
||||||
pub settings: Settings,
|
pub settings: Settings,
|
||||||
|
pub db: Database,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Ctx {
|
impl Ctx {
|
||||||
pub fn new(settings: Settings) -> Arc<Self> {
|
pub async fn new(settings: Settings) -> Arc<Self> {
|
||||||
Arc::new(Self { settings })
|
let db = get_db(&settings).await;
|
||||||
|
Arc::new(Self { settings, db })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
131
src/db.rs
Normal file
131
src/db.rs
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
/*
|
||||||
|
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
|
||||||
|
*
|
||||||
|
* This program is free software: you can redistribute it and/or modify
|
||||||
|
* it under the terms of the GNU Affero General Public License as
|
||||||
|
* published by the Free Software Foundation, either version 3 of the
|
||||||
|
* License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
* GNU Affero General Public License for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
*/
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use sqlx::postgres::PgPoolOptions;
|
||||||
|
use sqlx::types::time::OffsetDateTime;
|
||||||
|
//use sqlx::types::Json;
|
||||||
|
use sqlx::ConnectOptions;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
use crate::errors::*;
|
||||||
|
|
||||||
|
/// Connect to databse
|
||||||
|
pub enum ConnectionOptions {
|
||||||
|
/// fresh connection
|
||||||
|
Fresh(Fresh),
|
||||||
|
/// existing connection
|
||||||
|
Existing(Conn),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Use an existing database pool
|
||||||
|
pub struct Conn(pub PgPool);
|
||||||
|
|
||||||
|
pub struct Fresh {
|
||||||
|
pub pool_options: PgPoolOptions,
|
||||||
|
pub disable_logging: bool,
|
||||||
|
pub url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConnectionOptions {
|
||||||
|
async fn connect(self) -> ServiceResult<Database> {
|
||||||
|
let pool = match self {
|
||||||
|
Self::Fresh(fresh) => {
|
||||||
|
let mut connect_options =
|
||||||
|
sqlx::postgres::PgConnectOptions::from_str(&fresh.url).unwrap();
|
||||||
|
if fresh.disable_logging {
|
||||||
|
connect_options.disable_statement_logging();
|
||||||
|
}
|
||||||
|
sqlx::postgres::PgConnectOptions::from_str(&fresh.url)
|
||||||
|
.unwrap()
|
||||||
|
.disable_statement_logging();
|
||||||
|
fresh
|
||||||
|
.pool_options
|
||||||
|
.connect_with(connect_options)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
//.map_err(|e| DBError::DBError(Box::new(e)))?
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::Existing(conn) => conn.0,
|
||||||
|
};
|
||||||
|
Ok(Database { pool })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Database {
|
||||||
|
pub pool: PgPool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Database {
|
||||||
|
pub async fn migrate(&self) -> ServiceResult<()> {
|
||||||
|
sqlx::migrate!("./migrations/")
|
||||||
|
.run(&self.pool)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
//.map_err(|e| DBError::DBError(Box::new(e)))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn ping(&self) -> bool {
|
||||||
|
use sqlx::Connection;
|
||||||
|
|
||||||
|
if let Ok(mut con) = self.pool.acquire().await {
|
||||||
|
con.ping().await.is_ok()
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn now_unix_time_stamp() -> OffsetDateTime {
|
||||||
|
OffsetDateTime::now_utc()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_db(settings: &crate::settings::Settings) -> Database {
|
||||||
|
let pool_options = PgPoolOptions::new().max_connections(settings.database.pool);
|
||||||
|
ConnectionOptions::Fresh(Fresh {
|
||||||
|
pool_options,
|
||||||
|
url: settings.database.url.clone(),
|
||||||
|
disable_logging: !settings.debug,
|
||||||
|
})
|
||||||
|
.connect()
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::settings::Settings;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn db_works() {
|
||||||
|
let settings = Settings::new().unwrap();
|
||||||
|
let pool_options = PgPoolOptions::new().max_connections(1);
|
||||||
|
let db = ConnectionOptions::Fresh(Fresh {
|
||||||
|
pool_options,
|
||||||
|
url: settings.database.url.clone(),
|
||||||
|
disable_logging: !settings.debug,
|
||||||
|
})
|
||||||
|
.connect()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(db.ping().await);
|
||||||
|
}
|
||||||
|
}
|
|
@ -14,7 +14,7 @@
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
//! represents all the ways a trait can fail using this crate
|
//! Represents all the ways a trait can fail using this crate
|
||||||
use std::convert::From;
|
use std::convert::From;
|
||||||
use std::io::Error as FSErrorInner;
|
use std::io::Error as FSErrorInner;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
|
@ -18,7 +18,8 @@ use std::env;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use config::{Config, Environment, File};
|
use config::{Config, ConfigError, Environment, File};
|
||||||
|
use derive_more::Display;
|
||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
use log::{error, warn};
|
use log::{error, warn};
|
||||||
|
|
||||||
|
@ -26,6 +27,7 @@ use log::{error, warn};
|
||||||
use std::{println as warn, println as error};
|
use std::{println as warn, println as error};
|
||||||
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
use serde::Serialize;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use crate::errors::*;
|
use crate::errors::*;
|
||||||
|
@ -46,11 +48,39 @@ impl Server {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Display, Eq, PartialEq, Clone, Debug)]
|
||||||
|
#[serde(rename_all = "lowercase")]
|
||||||
|
pub enum DBType {
|
||||||
|
#[display(fmt = "postgres")]
|
||||||
|
Postgres,
|
||||||
|
// #[display(fmt = "maria")]
|
||||||
|
// Maria,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DBType {
|
||||||
|
fn from_url(url: &Url) -> Result<Self, ConfigError> {
|
||||||
|
match url.scheme() {
|
||||||
|
// "mysql" => Ok(Self::Maria),
|
||||||
|
"postgres" => Ok(Self::Postgres),
|
||||||
|
_ => Err(ConfigError::Message("Unknown database type".into())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
|
pub struct Database {
|
||||||
|
pub url: String,
|
||||||
|
pub pool: u32,
|
||||||
|
pub database_type: DBType,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
pub struct Settings {
|
pub struct Settings {
|
||||||
|
pub debug: bool,
|
||||||
pub server: Server,
|
pub server: Server,
|
||||||
pub source_code: String,
|
pub source_code: String,
|
||||||
pub pages: Vec<Arc<Page>>,
|
pub pages: Vec<Arc<Page>>,
|
||||||
|
pub database: Database,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(tarpaulin_include))]
|
#[cfg(not(tarpaulin_include))]
|
||||||
|
@ -84,15 +114,56 @@ impl Settings {
|
||||||
|
|
||||||
s = s.add_source(Environment::with_prefix("PAGES").separator("__"));
|
s = s.add_source(Environment::with_prefix("PAGES").separator("__"));
|
||||||
|
|
||||||
let mut settings = s.build()?.try_deserialize::<Settings>()?;
|
|
||||||
settings.check_url();
|
|
||||||
match env::var("PORT") {
|
match env::var("PORT") {
|
||||||
Ok(val) => {
|
Ok(val) => {
|
||||||
settings.server.port = val.parse().unwrap();
|
s = s.set_override("server.port", val).unwrap();
|
||||||
|
//settings.server.port = val.parse().unwrap();
|
||||||
}
|
}
|
||||||
Err(e) => warn!("couldn't interpret PORT: {}", e),
|
Err(e) => warn!("couldn't interpret PORT: {}", e),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Ok(val) = env::var("DATABASE_URL") {
|
||||||
|
// match env::var("DATABASE_URL") {
|
||||||
|
// Ok(val) => {
|
||||||
|
let url = Url::parse(&val).expect("couldn't parse Database URL");
|
||||||
|
s = s.set_override("database.url", url.to_string()).unwrap();
|
||||||
|
let database_type = DBType::from_url(&url).unwrap();
|
||||||
|
s = s
|
||||||
|
.set_override("database.database_type", database_type.to_string())
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Err(_e) => {
|
||||||
|
// }
|
||||||
|
|
||||||
|
let intermediate_config = s.build_cloned().unwrap();
|
||||||
|
|
||||||
|
s = s
|
||||||
|
.set_override(
|
||||||
|
"database.url",
|
||||||
|
format!(
|
||||||
|
r"postgres://{}:{}@{}:{}/{}",
|
||||||
|
intermediate_config
|
||||||
|
.get::<String>("database.username")
|
||||||
|
.expect("Couldn't access database username"),
|
||||||
|
intermediate_config
|
||||||
|
.get::<String>("database.password")
|
||||||
|
.expect("Couldn't access database password"),
|
||||||
|
intermediate_config
|
||||||
|
.get::<String>("database.hostname")
|
||||||
|
.expect("Couldn't access database hostname"),
|
||||||
|
intermediate_config
|
||||||
|
.get::<String>("database.port")
|
||||||
|
.expect("Couldn't access database port"),
|
||||||
|
intermediate_config
|
||||||
|
.get::<String>("database.name")
|
||||||
|
.expect("Couldn't access database name")
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.expect("Couldn't set database url");
|
||||||
|
|
||||||
|
let settings = s.build()?.try_deserialize::<Settings>()?;
|
||||||
|
settings.check_url();
|
||||||
settings.init();
|
settings.init();
|
||||||
|
|
||||||
Ok(settings)
|
Ok(settings)
|
||||||
|
|
|
@ -49,7 +49,7 @@ pub async fn get_data() -> (Temp, Arc<Ctx>) {
|
||||||
println!("[log] Initialzing settings again with test config");
|
println!("[log] Initialzing settings again with test config");
|
||||||
settings.init();
|
settings.init();
|
||||||
|
|
||||||
(tmp_dir, Ctx::new(settings))
|
(tmp_dir, Ctx::new(settings).await)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code, clippy::upper_case_acronyms)]
|
#[allow(dead_code, clippy::upper_case_acronyms)]
|
||||||
|
|
Loading…
Add table
Reference in a new issue