feat: bootstrap introductions from vals received in config
This commit is contained in:
parent
ac7c1e4066
commit
a155ab9d26
3 changed files with 61 additions and 5 deletions
|
@ -38,6 +38,7 @@ client_timeout = 60 # of HTTP client involved in crawling. In seconds.
|
|||
wait_before_next_api_call = 2 # in seconds
|
||||
|
||||
[introducer]
|
||||
#nodes = ["http://localhost:7000"]
|
||||
nodes = []
|
||||
|
||||
[repository]
|
||||
|
|
|
@ -15,19 +15,57 @@
|
|||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
use std::collections::HashSet;
|
||||
|
||||
use actix_web::web;
|
||||
use actix_web::{HttpResponse, Responder};
|
||||
use actix_web_codegen_const_routes::get;
|
||||
use url::Url;
|
||||
|
||||
pub use api_routes::*;
|
||||
use db_core::prelude::*;
|
||||
|
||||
use crate::ctx::Ctx;
|
||||
use crate::pages::chart::home::{OptionalPage, Page};
|
||||
use crate::search;
|
||||
use crate::WebFederate;
|
||||
use crate::{errors::*, WebDB};
|
||||
|
||||
const LIMIT: u32 = 50;
|
||||
|
||||
impl Ctx {
|
||||
pub async fn bootstrap(&self, db: &Box<dyn SCDatabase>) -> ServiceResult<()> {
|
||||
let mut known_starcharts = HashSet::with_capacity(self.settings.introducer.nodes.len());
|
||||
for starchart in self.settings.introducer.nodes.iter() {
|
||||
let mut page = 1;
|
||||
loop {
|
||||
let mut url = starchart.clone();
|
||||
url.set_path(ROUTES.introducer.list);
|
||||
url.set_query(Some(&format!("page={page}")));
|
||||
let mut nodes: Vec<Starchart> = self
|
||||
.client
|
||||
.get(url)
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.json()
|
||||
.await
|
||||
.unwrap();
|
||||
if nodes.is_empty() {
|
||||
break;
|
||||
}
|
||||
for node in nodes.drain(0..) {
|
||||
let node_url = Url::parse(&node.instance_url)?;
|
||||
db.add_starchart_to_introducer(&node_url).await?;
|
||||
if known_starcharts.get(&node_url).is_none() {
|
||||
known_starcharts.insert(node_url);
|
||||
}
|
||||
}
|
||||
page += 1;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[get(path = "ROUTES.introducer.list")]
|
||||
pub async fn list_introductions(
|
||||
db: WebDB,
|
||||
|
|
23
src/main.rs
23
src/main.rs
|
@ -28,8 +28,10 @@ pub mod db;
|
|||
pub mod dns;
|
||||
pub mod errors;
|
||||
pub mod federate;
|
||||
pub mod introduce;
|
||||
pub mod pages;
|
||||
pub mod routes;
|
||||
pub mod search;
|
||||
pub mod settings;
|
||||
pub mod spider;
|
||||
pub mod static_assets;
|
||||
|
@ -82,8 +84,14 @@ async fn main() {
|
|||
|
||||
let crawler_fut = tokio::spawn(spider::Crawler::start(crawler.clone()));
|
||||
let ctx = WebCtx::new(ctx);
|
||||
ctx.bootstrap(&db).await.unwrap();
|
||||
|
||||
let c = ctx.clone();
|
||||
let d = db.clone();
|
||||
let f = federate.clone();
|
||||
|
||||
let socket_addr = settings.server.get_ip();
|
||||
HttpServer::new(move || {
|
||||
let server_fut = HttpServer::new(move || {
|
||||
App::new()
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::Compress::default())
|
||||
|
@ -98,10 +106,19 @@ async fn main() {
|
|||
})
|
||||
.bind(&socket_addr)
|
||||
.unwrap()
|
||||
.run()
|
||||
.run();
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
let s = tokio::spawn(server_fut);
|
||||
f.import(
|
||||
url::Url::parse("http://localhost:7000").unwrap(),
|
||||
&c.client,
|
||||
&d,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
kill_crawler.send(true).unwrap();
|
||||
crawler_fut.await.unwrap().await;
|
||||
s.await.unwrap().unwrap();
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue