feat: implement repo API search in REST API

This commit is contained in:
Aravinth Manivannan 2023-02-26 23:16:25 +05:30
parent 2755e346d7
commit 5ed1ebacef
Signed by: realaravinth
GPG key ID: AD9F0F08E855ED88
6 changed files with 315 additions and 1 deletions

View file

@ -15,3 +15,6 @@ license = "AGPLv3 or later version"
[dependencies.serde]
features = ["derive"]
version = "1"
[dependencies.db-core]
path = "../db/db-core"

View file

@ -17,17 +17,33 @@
*/
use serde::{Deserialize, Serialize};
use db_core::Repository;
pub const ROUTES: Api = Api::new();
#[derive(Deserialize, Serialize, Clone, Debug, Eq, PartialEq)]
pub struct Search {
pub repository: &'static str,
}
impl Search {
const fn new() -> Search {
let repository = "/api/v1/search/repository";
Search { repository }
}
}
#[derive(Deserialize, Serialize, Clone, Debug, Eq, PartialEq)]
pub struct Api {
pub get_latest: &'static str,
pub search: Search,
}
impl Api {
const fn new() -> Api {
let get_latest = "/api/v1/federated/latest";
Api { get_latest }
let search = Search::new();
Api { get_latest, search }
}
}
@ -35,3 +51,13 @@ impl Api {
pub struct LatestResp {
pub latest: String,
}
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
pub struct SearchRepositoryReq {
pub query: String,
}
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
pub struct SearchRepositoryResp {
pub repositories: Vec<Repository>,
}

View file

@ -1,5 +1,15 @@
{
"db": "SQLite",
"069a127ffb1062321bba5a915a4ead3e0bbe4dabf6e0f684d8cc6a6d8a68ad5b": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 4
}
},
"query": "INSERT OR IGNORE INTO fts_repositories ( name, description, website, html_url ) \n VALUES ( $1, $2, $3, $4 );"
},
"0bb37cc79d5ef803285d05d06e6ef93b62c0b532c0298148fe436178761fd70a": {
"describe": {
"columns": [
@ -18,6 +28,16 @@
},
"query": "SELECT ID FROM starchart_forges WHERE hostname = $1"
},
"0f68a0cf8dbe0566dd6e0962c656382095694428dcf8990dc20d88601b56445b": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "INSERT OR IGNORE INTO fts_project_topics ( name ) VALUES ( $1 );"
},
"0fbcc736f60b14d55fbd88031a2929d04de02f5244345c2bc0f0e58d4c29cb14": {
"describe": {
"columns": [
@ -264,6 +284,16 @@
},
"query": "SELECT \n starchart_forges.hostname,\n starchart_users.username,\n starchart_repositories.name,\n starchart_repositories.description,\n starchart_repositories.html_url,\n starchart_repositories.ID,\n starchart_repositories.website,\n starchart_repositories.imported\n FROM\n starchart_repositories\n INNER JOIN\n starchart_forges\n ON\n starchart_repositories.hostname_id = starchart_forges.id\n INNER JOIN\n starchart_users\n ON\n starchart_repositories.owner_id = starchart_users.id\n ORDER BY\n starchart_repositories.ID\n LIMIT $1 OFFSET $2\n ;"
},
"989fdcfc0088ac19ffbe29bba5349d2dcade134fa2b450769ec617d91f88ffe1": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "INSERT OR IGNORE INTO fts_users ( username ) VALUES ( $1 );"
},
"9978a056397522cf1375900bb00c55bc17685dcc9cb22127b21a24b194a1e536": {
"describe": {
"columns": [],
@ -274,6 +304,24 @@
},
"query": "INSERT INTO\n starchart_forges (hostname, verified_on, forge_type, imported) \n VALUES ($1, $2, (SELECT ID FROM starchart_forge_type WHERE name = $3), $4)"
},
"a60479593819cc7dc7ec87a2d7a25a8960dc2efa1f59c5187d2084c79e26e3f8": {
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
}
],
"nullable": [
false
],
"parameters": {
"Right": 2
}
},
"query": "SELECT name FROM starchart_project_topics ORDER BY ID LIMIT $1 OFFSET $2;"
},
"a81dd4b5df666e22fac211092e7b8425d838dd9023aa2b17659352f30831944d": {
"describe": {
"columns": [
@ -292,6 +340,66 @@
},
"query": "SELECT ID FROM starchart_users WHERE username = $1 AND \n hostname_id = (SELECT ID FROM starchart_forges WHERE hostname = $2)"
},
"ae9295f46da1753fe91a633b5738533084005ad32915c19635f896c454284b6b": {
"describe": {
"columns": [
{
"name": "hostname",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "username",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "description",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "html_url",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "ID",
"ordinal": 5,
"type_info": "Int64"
},
{
"name": "website",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "imported",
"ordinal": 7,
"type_info": "Bool"
}
],
"nullable": [
false,
false,
false,
true,
false,
false,
true,
false
],
"parameters": {
"Right": 1
}
},
"query": "SELECT \n starchart_forges.hostname,\n starchart_users.username,\n starchart_repositories.name,\n starchart_repositories.description,\n starchart_repositories.html_url,\n starchart_repositories.ID,\n starchart_repositories.website,\n starchart_repositories.imported\n FROM\n starchart_repositories\n INNER JOIN\n starchart_forges\n ON\n starchart_repositories.hostname_id = starchart_forges.id\n INNER JOIN\n starchart_users\n ON\n starchart_repositories.owner_id = starchart_users.id\n WHERE starchart_repositories.html_url = $1\n ;"
},
"ca22e5f6e7065cf2d4ffdbfac0084f9871de8cd9073d470cbf7eac2de2a73c47": {
"describe": {
"columns": [],
@ -302,6 +410,24 @@
},
"query": "INSERT INTO \n starchart_repositories (\n hostname_id, owner_id, name, description, html_url, website, created,\n last_crawl, imported\n )\n VALUES (\n (SELECT ID FROM starchart_forges WHERE hostname = $1),\n (SELECT ID FROM starchart_users WHERE username = $2),\n $3, $4, $5, $6, $7, $8, $9\n );"
},
"d22d18f0c2b49a570ac95e1af5ba398b3d1c7a1a5a6780d6dc646077e466714c": {
"describe": {
"columns": [
{
"name": "username",
"ordinal": 0,
"type_info": "Text"
}
],
"nullable": [
false
],
"parameters": {
"Right": 2
}
},
"query": "SELECT username FROM starchart_users ORDER BY ID LIMIT $1 OFFSET $2"
},
"e30ccfaa6aeda8cf30a2b3e9134abd0c0420441c5ed05189c3be605b1405c8e9": {
"describe": {
"columns": [],
@ -312,6 +438,24 @@
},
"query": "\n INSERT INTO starchart_repository_topic_mapping ( topic_id, repository_id )\n VALUES (\n (SELECT ID FROM starchart_project_topics WHERE name = $1),\n (SELECT ID FROM starchart_repositories WHERE html_url = $2)\n );"
},
"e570081e9603b61c9df03d5cbcfcc4bb23b1f6ba077448ed1b743ef3e0cfbf39": {
"describe": {
"columns": [
{
"name": "html_url",
"ordinal": 0,
"type_info": "Null"
}
],
"nullable": [
true
],
"parameters": {
"Right": 1
}
},
"query": "SELECT html_url FROM fts_repositories WHERE html_url MATCH $1;"
},
"ed20e6fcac091172778119cdfc68c8e511224205a0bb7c74df192dcdfbfe63f2": {
"describe": {
"columns": [],

3
sqlx-data.json Normal file
View file

@ -0,0 +1,3 @@
{
"db": "SQLite"
}

View file

@ -22,6 +22,7 @@ use actix_web_codegen_const_routes::get;
pub use api_routes::*;
use crate::errors::*;
use crate::search;
use crate::WebFederate;
#[get(path = "ROUTES.get_latest")]
@ -32,4 +33,5 @@ pub async fn lastest(federate: WebFederate) -> ServiceResult<impl Responder> {
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(lastest);
search::services(cfg);
}

136
src/search.rs Normal file
View file

@ -0,0 +1,136 @@
/*
* ForgeFlux StarChart - A federated software forge spider
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use crate::errors::*;
use actix_web::web;
use actix_web::{HttpResponse, Responder};
use actix_web_codegen_const_routes::post;
use crate::WebDB;
pub use crate::api::{SearchRepositoryReq, ROUTES};
#[post(path = "ROUTES.search.repository")]
pub async fn search_repository(
payload: web::Json<SearchRepositoryReq>,
db: WebDB,
) -> ServiceResult<impl Responder> {
let payload = payload.into_inner();
let query = if !payload.query.contains('*') {
payload.query
} else {
format!("*{}*", payload.query)
};
println!("{}", query);
let resp = db.search_repository(&query).await?;
println!("search_repository method: {:?}", resp);
Ok(HttpResponse::Ok().json(resp))
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(search_repository);
}
#[cfg(test)]
mod tests {
use actix_web::http::StatusCode;
use actix_web::test;
use url::Url;
use super::*;
use db_core::prelude::*;
use crate::tests::*;
use crate::*;
#[actix_rt::test]
async fn search_works() {
const URL: &str = "https://search-works-test.example.com";
const HTML_PROFILE_URL: &str = "https://search-works-test.example.com/user1";
const USERNAME: &str = "user1";
const REPO_NAME: &str = "asdlkfjaldsfjaksdf";
const HTML_REPO_URL: &str =
"https://search-works-test.example.com/user1/asdlkfjaldsfjaksdf";
const TAGS: [&str; 3] = ["test", "starchart", "spider"];
let (db, ctx, federate, _tmpdir) = sqlx_sqlite::get_ctx().await;
let app = get_app!(ctx, db, federate).await;
let url = Url::parse(URL).unwrap();
let create_forge_msg = CreateForge {
url: url.clone(),
forge_type: ForgeImplementation::Gitea,
import: false,
};
let add_user_msg = AddUser {
url: url.clone(),
html_link: HTML_PROFILE_URL,
profile_photo: None,
username: USERNAME,
import: false,
};
let add_repo_msg = AddRepository {
html_link: HTML_REPO_URL,
name: REPO_NAME,
tags: Some(TAGS.into()),
owner: USERNAME,
website: None,
description: None,
url,
import: false,
};
let _ = db.delete_forge_instance(&create_forge_msg.url).await;
db.create_forge_instance(&create_forge_msg).await.unwrap();
assert!(
db.forge_exists(&create_forge_msg.url).await.unwrap(),
"forge creation failed, forge existence check failure"
);
// add user
db.add_user(&add_user_msg).await.unwrap();
// add repository
db.create_repository(&add_repo_msg).await.unwrap();
// verify repo exists
assert!(db
.repository_exists(add_repo_msg.name, add_repo_msg.owner, &add_repo_msg.url)
.await
.unwrap());
// test starts
let payload = SearchRepositoryReq {
query: REPO_NAME[0..REPO_NAME.len() - 4].to_string(),
};
let search_res_resp = test::call_service(
&app,
post_request!(&payload, ROUTES.search.repository).to_request(),
)
.await;
assert_eq!(search_res_resp.status(), StatusCode::OK);
let search_res: Vec<Repository> = test::read_body_json(search_res_resp).await;
println!("{:?}", search_res);
assert!(!search_res.is_empty());
assert_eq!(search_res.first().as_ref().unwrap().name, REPO_NAME);
// test ends
}
}