feat: publish benchmark data periodically (configurable)
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed

This commit is contained in:
Aravinth Manivannan 2023-03-12 20:11:06 +05:30
parent 9411c2ba9f
commit a3f2c3632e
Signed by: realaravinth
GPG key ID: AD9F0F08E855ED88
4 changed files with 74 additions and 18 deletions

View file

@ -36,6 +36,7 @@ pool = 4
[publish] [publish]
dir = "/tmp/mcaptcha-survey" dir = "/tmp/mcaptcha-survey"
duration = 3600
[footer] [footer]
about = "https://mcapthca.org/about" about = "https://mcapthca.org/about"

View file

@ -14,13 +14,15 @@
* You should have received a copy of the GNU Affero General Public License * You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>. * along with this program. If not, see <https://www.gnu.org/licenses/>.
*/ */
use std::future::Future;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::types::time::OffsetDateTime; use sqlx::types::time::OffsetDateTime;
use sqlx::types::Uuid;
use tokio::fs; use tokio::fs;
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
use uuid::Uuid; use tokio::sync::oneshot::{self, error::TryRecvError, Sender};
use crate::api::v1::admin::campaigns::runners::get_results; use crate::api::v1::admin::campaigns::runners::get_results;
use crate::api::v1::admin::campaigns::SurveyResponse; use crate::api::v1::admin::campaigns::SurveyResponse;
@ -68,7 +70,7 @@ impl Archive {
impl Archiver { impl Archiver {
pub fn new(s: &Settings) -> Self { pub fn new(s: &Settings) -> Self {
Archiver { Archiver {
base_path: s.archive.base_path.clone(), base_path: s.publish.dir.clone(),
} }
} }
@ -163,7 +165,7 @@ impl Archiver {
WHERE WHERE
survey_campaigns.ID = $1 survey_campaigns.ID = $1
", ",
&c.id &Uuid::parse_str(&c.id.to_string()).unwrap()
) )
.fetch_one(&data.db) .fetch_one(&data.db)
.await?; .await?;
@ -183,8 +185,15 @@ impl Archiver {
wri.write_record(&keys).await.unwrap(); wri.write_record(&keys).await.unwrap();
loop { loop {
let mut resp = let mut resp = get_results(
get_results(&owner.name, &c.id, data, page, limit, None).await?; &owner.name,
&Uuid::parse_str(&c.id.to_string()).unwrap(),
data,
page,
limit,
None,
)
.await?;
for r in resp.drain(0..) { for r in resp.drain(0..) {
let rec = Self::extract_record(c, r); let rec = Self::extract_record(c, r);
@ -201,6 +210,40 @@ impl Archiver {
Ok(()) Ok(())
} }
pub async fn init_archive_job(
self,
data: AppData,
) -> ServiceResult<(Sender<bool>, impl Future)> {
let (tx, mut rx) = oneshot::channel();
let job = async move {
loop {
// let rx = self.rx.as_mut().unwrap();
match rx.try_recv() {
// The channel is currently empty
Ok(_) => {
log::info!("Killing archive loop: received signal");
break;
}
Err(TryRecvError::Empty) => {
let _ = self.archive(&data).await;
tokio::time::sleep(std::time::Duration::new(
data.settings.publish.duration,
0,
))
.await;
}
Err(TryRecvError::Closed) => break,
}
let _ = self.archive(&data).await;
}
};
let job_fut = tokio::spawn(job);
Ok((tx, job_fut))
}
pub async fn archive(&self, data: &AppData) -> ServiceResult<()> { pub async fn archive(&self, data: &AppData) -> ServiceResult<()> {
let mut db_campaigns = sqlx::query_as!( let mut db_campaigns = sqlx::query_as!(
InnerCampaign, InnerCampaign,
@ -209,8 +252,8 @@ impl Archiver {
.fetch_all(&data.db) .fetch_all(&data.db)
.await?; .await?;
for c in db_campaigns.drain(0..) { for c in db_campaigns.drain(0..) {
let archive = Archive::new(c.id.clone(), self.base_path.clone());
let campaign: Campaign = c.into(); let campaign: Campaign = c.into();
let archive = Archive::new(campaign.id.clone(), self.base_path.clone());
self.write_campaign_file(&campaign, &archive).await?; self.write_campaign_file(&campaign, &archive).await?;
self.write_benchmark_file(&campaign, &archive, data).await?; self.write_benchmark_file(&campaign, &archive, data).await?;
} }
@ -228,7 +271,7 @@ struct InnerCampaign {
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct Campaign { pub struct Campaign {
pub id: Uuid, pub id: uuid::Uuid,
pub name: String, pub name: String,
pub difficulties: Vec<u32>, pub difficulties: Vec<u32>,
pub created_at: i64, pub created_at: i64,
@ -237,7 +280,7 @@ pub struct Campaign {
impl From<InnerCampaign> for Campaign { impl From<InnerCampaign> for Campaign {
fn from(i: InnerCampaign) -> Self { fn from(i: InnerCampaign) -> Self {
Self { Self {
id: i.id, id: uuid::Uuid::parse_str(&i.id.to_string()).unwrap(),
name: i.name, name: i.name,
difficulties: i.difficulties.iter().map(|d| *d as u32).collect(), difficulties: i.difficulties.iter().map(|d| *d as u32).collect(),
created_at: i.created_at.unix_timestamp(), created_at: i.created_at.unix_timestamp(),
@ -263,14 +306,14 @@ mod tests {
fn archive_path_works() { fn archive_path_works() {
let mut settings = Settings::new().unwrap(); let mut settings = Settings::new().unwrap();
let tmp_dir = Temp::new_dir().unwrap(); let tmp_dir = Temp::new_dir().unwrap();
settings.archive.base_path = tmp_dir.join("base_path").to_str().unwrap().into(); settings.publish.dir = tmp_dir.join("base_path").to_str().unwrap().into();
let uuid = Uuid::new_v4(); let uuid = Uuid::new_v4();
let archive = Archive::new(uuid.clone(), settings.archive.base_path.clone()); let archive = Archive::new(uuid.clone(), settings.publish.dir.clone());
let archive_path = archive.archive_path_now(); let archive_path = archive.archive_path_now();
assert_eq!( assert_eq!(
archive_path, archive_path,
Path::new(&settings.archive.base_path) Path::new(&settings.publish.dir)
.join(&uuid.to_string()) .join(&uuid.to_string())
.join(&archive.now.to_string()) .join(&archive.now.to_string())
); );
@ -278,7 +321,7 @@ mod tests {
let campaign_file_path = archive.campaign_file_path(); let campaign_file_path = archive.campaign_file_path();
assert_eq!( assert_eq!(
campaign_file_path, campaign_file_path,
Path::new(&settings.archive.base_path) Path::new(&settings.publish.dir)
.join(&uuid.to_string()) .join(&uuid.to_string())
.join(&archive.now.to_string()) .join(&archive.now.to_string())
.join(CAMPAIGN_INFO_FILE) .join(CAMPAIGN_INFO_FILE)
@ -287,7 +330,7 @@ mod tests {
let benchmark_file_path = archive.benchmark_file_path(); let benchmark_file_path = archive.benchmark_file_path();
assert_eq!( assert_eq!(
benchmark_file_path, benchmark_file_path,
Path::new(&settings.archive.base_path) Path::new(&settings.publish.dir)
.join(&uuid.to_string()) .join(&uuid.to_string())
.join(&archive.now.to_string()) .join(&archive.now.to_string())
.join(BENCHMARK_FILE) .join(BENCHMARK_FILE)
@ -348,8 +391,10 @@ mod tests {
.await.unwrap(); .await.unwrap();
let campaign: Campaign = db_campaign.into(); let campaign: Campaign = db_campaign.into();
let archive = let archive = Archive::new(
Archive::new(campaign.id.clone(), data.settings.archive.base_path.clone()); Uuid::parse_str(&campaign.id.to_string()).unwrap(),
data.settings.publish.dir.clone(),
);
let archiver = Archiver::new(&data.settings); let archiver = Archiver::new(&data.settings);
archiver.archive(&AppData::new(data.clone())).await.unwrap(); archiver.archive(&AppData::new(data.clone())).await.unwrap();
let contents: Campaign = serde_json::from_str( let contents: Campaign = serde_json::from_str(

View file

@ -72,7 +72,9 @@ pub type AppData = actix_web::web::Data<Arc<crate::data::Data>>;
#[cfg(not(tarpaulin_include))] #[cfg(not(tarpaulin_include))]
#[actix_web::main] #[actix_web::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
//env::set_var("RUST_LOG", "info"); if env::var("RUST_LOG").is_err() {
env::set_var("RUST_LOG", "info");
}
pretty_env_logger::init(); pretty_env_logger::init();
@ -87,7 +89,8 @@ async fn main() -> std::io::Result<()> {
let data = actix_web::web::Data::new(data); let data = actix_web::web::Data::new(data);
let arch = archive::Archiver::new(&data.settings); let arch = archive::Archiver::new(&data.settings);
arch.archive(&data).await.unwrap(); let (archive_kiler, archive_job) =
arch.init_archive_job(data.clone()).await.unwrap();
let ip = settings.server.get_ip(); let ip = settings.server.get_ip();
println!("Starting server on: http://{}", ip); println!("Starting server on: http://{}", ip);
@ -106,14 +109,19 @@ async fn main() -> std::io::Result<()> {
.wrap(actix_middleware::NormalizePath::new( .wrap(actix_middleware::NormalizePath::new(
actix_middleware::TrailingSlash::Trim, actix_middleware::TrailingSlash::Trim,
)) ))
.configure(services)
.service(Files::new("/download", &settings.publish.dir).show_files_listing()) .service(Files::new("/download", &settings.publish.dir).show_files_listing())
.configure(services)
.app_data(data.clone()) .app_data(data.clone())
}) })
.bind(ip) .bind(ip)
.unwrap() .unwrap()
.run() .run()
.await .await
.unwrap();
archive_kiler.send(true).unwrap();
archive_job.await;
Ok(())
} }
#[cfg(not(tarpaulin_include))] #[cfg(not(tarpaulin_include))]

View file

@ -85,8 +85,10 @@ pub struct Footer {
pub thanks: Url, pub thanks: Url,
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Publish { pub struct Publish {
pub dir: String, pub dir: String,
pub duration: u64,
} }
impl Publish { impl Publish {