Compare commits

..

1 commit

Author SHA1 Message Date
Renovate Bot
e59d54802a Add renovate.json
Some checks failed
Lint / fmt (pull_request) Has been cancelled
Lint / clippy (pull_request) Has been cancelled
Coverage / stable - x86_64-unknown-linux-gnu (pull_request) Has been cancelled
Build / stable - x86_64-unknown-linux-gnu (pull_request) Has been cancelled
2025-01-05 23:49:07 +00:00
21 changed files with 362 additions and 483 deletions

View file

@ -2,5 +2,3 @@ export POSTGRES_DATABASE_URL="postgres://postgres:password@localhost:5432/postgr
export SQLITE_TMP="$(pwd)/db/db-sqlx-sqlite/tmp"
export SQLITE_DATABASE_URL="sqlite://$SQLITE_TMP/admin.db"
export STARCHART__CRAWLER__WAIT_BEFORE_NEXT_API_CALL=0
export FORGEJO_HOST=http://localhost:3000
export FORGEFLUX_HOST=http://localhost:7000

43
.github/workflows/clippy-fmt.yml vendored Normal file
View file

@ -0,0 +1,43 @@
name: Lint
on:
pull_request:
types: [opened, synchronize, reopened]
push:
branches:
- master
jobs:
fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
components: rustfmt
- name: Check with rustfmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
clippy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
components: clippy
override: true
- name: Check with Clippy
uses: actions-rs/clippy-check@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
args: --workspace --tests --all-features

93
.github/workflows/coverage.yml vendored Normal file
View file

@ -0,0 +1,93 @@
name: Coverage
on:
pull_request:
types: [opened, synchronize, reopened]
push:
branches:
- master
jobs:
build_and_test:
strategy:
fail-fast: false
matrix:
version:
- stable
# services:
# postgres:
# image: postgres
# env:
# POSTGRES_PASSWORD: password
# POSTGRES_USER: postgres
# POSTGRES_DB: postgres
# options: >-
# --health-cmd pg_isready
# --health-interval 10s
# --health-timeout 5s
# --health-retries 5
# ports:
# - 5432:5432
name: ${{ matrix.version }} - x86_64-unknown-linux-gnu
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# - name: ⚡ Cache
# uses: actions/cache@v3
# with:
# path: |
# ~/.cargo/registry
# ~/.cargo/git
# target
# key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Install ${{ matrix.version }}
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ matrix.version }}-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: load env
run: |
mkdir -p db/db-sqlx-sqlite/tmp &&
source .env-sample \
&& echo "POSTGRES_DATABASE_URL=$POSTGRES_DATABASE_URL" >> $GITHUB_ENV \
&& echo "SQLITE_DATABASE_URL=$SQLITE_DATABASE_URL" >> $GITHUB_ENV
# usually run as part of `make test` but because this workflow doesn't run
# that command, `make dev-env` is used
- name: setup dev environment
run: make dev-env
env:
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
- name: run migrations
run: make migrate
env:
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
- name: Generate coverage file
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'pull_request')
uses: actions-rs/tarpaulin@v0.1
env:
# GIT_HASH is dummy value. I guess build.rs is skipped in tarpaulin
# execution so this value is required for preventing meta tests from
# panicking
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
with:
args: "--all-features --no-fail-fast --workspace=db/db-sqlx-sqlite,. -t 1200"
# args: "--all-features --no-fail-fast --workspace=database/db-sqlx-postgres,database/db-sqlx-sqlite,. -t 1200"
- name: Upload to Codecov
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'pull_request')
uses: codecov/codecov-action@v2

128
.github/workflows/linux.yml vendored Normal file
View file

@ -0,0 +1,128 @@
name: Build
on:
pull_request:
types: [opened, synchronize, reopened]
push:
branches:
- master
jobs:
build_and_test:
strategy:
fail-fast: false
matrix:
version:
- stable
# - nightly
name: ${{ matrix.version }} - x86_64-unknown-linux-gnu
runs-on:
ubuntu-latest
# services:
# postgres:
# image: postgres
# env:
# POSTGRES_PASSWORD: password
# POSTGRES_USER: postgres
# POSTGRES_DB: postgres
# options: >-
# --health-cmd pg_isready
# --health-interval 10s
# --health-timeout 5s
# --health-retries 5
# ports:
# - 5432:5432
#
steps:
- uses: actions/checkout@v2
# - name: ⚡ Cache
# uses: actions/cache@v3
# with:
# path: |
# /var/lib/docker
# ~/.cargo/registry
# ~/.cargo/git
# target
# key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Cache
uses: Swatinem/rust-cache@v1
- name: Login to DockerHub
if: (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Install ${{ matrix.version }}
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ matrix.version }}-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: load env
run: |
mkdir -p db/db-sqlx-sqlite/tmp &&
source .env-sample \
&& echo "POSTGRES_DATABASE_URL=$POSTGRES_DATABASE_URL" >> $GITHUB_ENV \
&& echo "SQLITE_DATABASE_URL=$SQLITE_DATABASE_URL" >> $GITHUB_ENV
- name: run migrations
run: make migrate
env:
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
- name: build
run:
make
env:
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
- name: build docker images
if: matrix.version == 'stable'
run: make docker
- name: publish docker images
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
run: make docker-publish
- name: run tests
timeout-minutes: 40
run:
make test
env:
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
- name: generate documentation
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
run:
make doc
env:
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
- name: Deploy to GitHub Pages
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
uses: JamesIves/github-pages-deploy-action@3.7.1
with:
branch: gh-pages
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
FOLDER:
./target/doc/
# - name: deploy
# if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'realaravinth/realaravinth' }}
# run: >-
# curl --location --request POST "https://deploy.batsense.net/api/v1/update" --header 'Content-Type: application/json' --data-raw "{ \"secret\": \"${{ secrets.DEPLOY_TOKEN }}\", \"branch\": \"gh-pages\" }"

View file

@ -1,111 +0,0 @@
steps:
setup:
image: python
when:
event: [push, pull_request, tag, deployment]
environment:
POSTGRES_DATABASE_URL: postgres://postgres:password@database:5432/postgres
commands:
- pip install requests
- sed -i 's/localhost\:3000/forgejo/' scripts/gitea.py
- python ./scripts/gitea.py
# - curl -vv http://forgejo/api/v1/repos/bot/repository_58
# - curl -vv http://forge_forgeflux/forgejo/bot/repository_58
# - curl -vv http://forge_forgeflux/forgejo/bot/repository_01
test:
image: rust
when:
event: [push, pull_request, tag, deployment]
environment:
POSTGRES_DATABASE_URL: postgres://postgres:password@database:5432/postgres
SQLITE_DATABASE_URL: sqlite:///tmp/admin.db
FORGEJO_HOST: http://forgejo
FORGEFLUX_HOST: http://forge_forgeflux
commands:
# - curl -fsSL https://deb.nodesource.com/setup_16.x | bash - &&\
# - apt update && apt-get -y --no-install-recommends install nodejs tar gpg curl wget
# - rustup toolchain install nightly
# - rustup override set nightly
- rustup component add rustfmt
- rustup component add clippy
# - export FORGEFLUX_server_COOKIE_SECRET=d05604b56ffd0f57200be49313a9e51436ca8f1678b9ffdf8d2602d5e9839f05
- make migrate
- make
# - make lint
- make test.workspaces
build_docker_img:
image: plugins/docker
when:
event: [pull_request]
settings:
dry_run: true
repo: forgeflux/starchart
tags: latest
build_and_publish_docker_img:
image: plugins/docker
when:
event: [push, tag, deployment]
branch: master
settings:
username: forgeflux
password:
from_secret: DOCKER_TOKEN
repo: forgeflux/starchart
tags:
latest
services:
forgejo:
image: codeberg.org/forgejo/forgejo:9
environment:
FORGEJO__security__INSTALL_LOCK: true
FORGEJO__federation__ENABLED: true
FORGEJO__server__ROOT_URL: http://forgejo
FORGEJO__server__HTTP_PORT: 80
database:
image: postgres
environment:
POSTGRES_PASSWORD: password
forge_forgeflux_database:
image: postgres
environment:
POSTGRES_PASSWORD: password
forge_forgeflux:
image: forgeflux/forgeflux:latest
depends_on:
- forgeflux_postgres
environment:
FORGEFLUX_server_PUBLIC_URL_HAS_HTTPS: false
FORGEFLUX_debug: true
FORGEFLUX_source_code: https://git.batsense.net/ForgeFlux/ForgeFlux
FORGEFLUX_allow_registration: true
FORGEFLUX_database_POOL: 2
FORGEFLUX_forges_FORGEJO_url: http://forgejo
FORGEFLUX_forges_FORGEJO_client_id: foo
FORGEFLUX_forges_FORGEJO_client_secret: foo
FORGEFLUX_forges_FORGEJO_user_USERNAME: foo
FORGEFLUX_forges_FORGEJO_user_API_TOKEN: foo
DATABASE_URL: postgres://postgres:password@forgeflux_postgres:5432/postgres
PORT: 80
FORGEFLUX_server_DOMAIN: forge_forgeflux
FORGEFLUX_server_COOKIE_SECRET: 995cde0721b6e41602dd111438cc7c1b2506dc14bad31d2653fb9a4adce1f84e
FORGEFLUX_server_IP: 0.0.0.0
FORGEFLUX_forges_GITHUB_url: https://github.com
FORGEFLUX_forges_GITHUB_api_url: https://api.github.com
FORGEFLUX_forges_GITHUB_client_id: foo
FORGEFLUX_forges_GITHUB_client_secret: foo
FORGEFLUX_forges_GITHUB_user_USERNAME: foo
FORGEFLUX_forges_GITHUB_user_PERSONAL_ACCESS_TOKEN: foo
FORGEFLUX_starchart_enable: true
forgeflux_postgres:
image: postgres:17.2
environment:
POSTGRES_PASSWORD: password # change password

17
Cargo.lock generated
View file

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
version = 3
[[package]]
name = "actix"
@ -1147,20 +1147,6 @@ dependencies = [
"url",
]
[[package]]
name = "forgeflux"
version = "0.1.0"
dependencies = [
"actix-rt",
"async-trait",
"forge-core",
"reqwest",
"serde",
"serde_json",
"tokio",
"url",
]
[[package]]
name = "form_urlencoded"
version = "1.0.1"
@ -2936,7 +2922,6 @@ dependencies = [
"derive_more",
"federate-core",
"forge-core",
"forgeflux",
"gitea",
"lazy_static",
"log",

View file

@ -16,7 +16,6 @@ members = [
"db/db-core",
"db/db-sqlx-sqlite",
"forge/forge-core",
"forge/forgeflux",
"forge/gitea",
"federate/federate-core",
"federate/publiccodeyml"
@ -77,9 +76,6 @@ path = "./db/db-sqlx-sqlite"
[dependencies.gitea]
path = "./forge/gitea"
[dependencies.forgeflux]
path = "./forge/forgeflux"
[dependencies.forge-core]
path = "./forge/forge-core"

View file

@ -131,10 +131,6 @@ test: migrate ## Run tests
$(call cache_bust)
$(call test_workspaces)
test.workspaces:
$(call cache_bust)
$(call test_workspaces)
# cd database/db-sqlx-postgres &&\
# DATABASE_URL=${POSTGRES_DATABASE_URL}\
# cargo test --no-fail-fast

View file

@ -309,8 +309,6 @@ impl Clone for Box<dyn SCDatabase> {
pub enum ForgeImplementation {
/// [Gitea](https://gitea.io) softare forge
Gitea,
/// [ForgeFlux](https://net.forgeflux.net)
ForgeFlux,
}
impl ForgeImplementation {
@ -318,7 +316,6 @@ impl ForgeImplementation {
pub const fn to_str(&self) -> &'static str {
match self {
ForgeImplementation::Gitea => "gitea",
ForgeImplementation::ForgeFlux => "forgeflux",
}
}
}
@ -329,11 +326,9 @@ impl FromStr for ForgeImplementation {
/// Convert [str] to [ForgeImplementation]
fn from_str(s: &str) -> DBResult<Self> {
const GITEA: &str = ForgeImplementation::Gitea.to_str();
const FORGEFLUX: &str = ForgeImplementation::ForgeFlux.to_str();
let s = s.trim();
match s {
GITEA => Ok(Self::Gitea),
FORGEFLUX => Ok(Self::ForgeFlux),
_ => Err(DBError::UnknownForgeType(s.to_owned())),
}
}

View file

@ -5,12 +5,19 @@ networks:
external: false
services:
forgejo:
image: codeberg.org/forgejo/forgejo:9
server:
image: gitea/gitea:1.16.5
container_name: gitea
environment:
- FORGEJO__security__INSTALL_LOCK=true
- FORGEJO__federation__ENABLED=true
- FORGEJO__server__ROOT_URL=http://localhost
- FORGEJO__server__HTTP_PORT=3000
- USER_UID=1000
- USER_GID=1000
restart: always
networks:
- gitea
volumes:
- ./tmp/gitea:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
ports:
- "3000:3000"
- "8080:3000"
- "2221:22"

View file

@ -1,37 +0,0 @@
[package]
name = "forgeflux"
version = "0.1.0"
authors = ["realaravinth <realaravinth@batsense.net>"]
description = "ForgeFlux StarChart - Federated forge spider"
documentation = "https://forgeflux.org/"
edition = "2021"
license = "AGPLv3 or later version"
[lib]
name = "forgeflux"
path = "src/lib.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-trait = "0.1.51"
url = { version = "2.2.2", features = ["serde"] }
tokio = { version = "1.17", features = ["time"] }
[dependencies.forge-core]
path = "../forge-core"
[dependencies.reqwest]
features = ["rustls-tls-native-roots", "gzip", "deflate", "brotli", "json"]
version = "0.11.10"
[dependencies.serde]
features = ["derive"]
version = "1"
[dependencies.serde_json]
version = "1"
[dev-dependencies]
actix-rt = "2.7"

View file

@ -1,187 +0,0 @@
/*
* ForgeFlux StarChart - A federated software forge spider
* Copyright © 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::sync::Arc;
use std::time::Duration;
use reqwest::Client;
use tokio::task::JoinHandle;
use url::Url;
use db_core::ForgeImplementation;
use forge_core::dev::*;
use forge_core::Repository;
pub mod schema;
const REPO_SEARCH_PATH: &str = "/search/repositories";
const FORGEFLUX_NODEINFO: &str = "/nodeinfo/2.0";
const FORGEFLUX_IDENTIFIER: &str = "forgeflux";
#[derive(Clone)]
pub struct ForgeFlux {
pub instance_url: Url,
pub client: Client,
url: Url,
}
impl ForgeFlux {
pub fn new(instance_url: Url, client: Client) -> Self {
let url = Url::parse(&db_core::clean_url(&instance_url)).unwrap();
Self {
instance_url,
client,
url,
}
}
}
impl PartialEq for ForgeFlux {
fn eq(&self, other: &Self) -> bool {
self.url == other.url && self.instance_url == other.instance_url
}
}
#[async_trait]
impl SCForge for ForgeFlux {
async fn is_forge(&self) -> bool {
true
}
fn get_url(&self) -> &Url {
&self.url
}
fn forge_type(&self) -> ForgeImplementation {
ForgeImplementation::ForgeFlux
}
async fn crawl(&self, limit: u64, page: u64, rate_limit: u64) -> CrawlResp {
fn empty_is_none(s: &str) -> Option<String> {
let s = s.trim();
if s.is_empty() {
None
} else {
Some(s.to_owned())
}
}
let mut tags = Tags::default();
let mut users = UserMap::default();
let mut internal_users = UserMap::default();
let mut repos = Repositories::default();
let instance_url = self.instance_url.clone();
let mut url = instance_url.clone();
url.set_path(REPO_SEARCH_PATH);
url.set_query(Some(&format!("page={page}&limit={limit}")));
let mut res: Vec<schema::Repository> = self
.client
.get(url)
.send()
.await
.unwrap()
.json()
.await
.unwrap();
let mut sleep_fut: Option<JoinHandle<()>> = None;
for repo in res.drain(0..) {
let user = if internal_users.contains_key(&repo.attributed_to.to_string()) {
if let Some(sleep_fut) = sleep_fut {
sleep_fut.await.unwrap();
}
let user: schema::User = self
.client
.get(repo.attributed_to.clone())
.send()
.await
.unwrap()
.json()
.await
.unwrap();
sleep_fut = Some(tokio::spawn(tokio::time::sleep(Duration::new(
rate_limit, 0,
))));
let profile_photo = if let Some(profile_photo) = user.icon {
Some(profile_photo.url.to_string())
} else {
None
};
let f_user = Arc::new(User {
url: user.id.clone(),
username: Arc::new(user.preferred_username),
html_link: user.id.to_string(),
profile_photo,
});
users.insert(f_user.username.clone(), f_user.clone());
users.insert(Arc::new(f_user.url.to_string()), f_user.clone());
f_user
} else {
internal_users
.get(&repo.attributed_to.to_string())
.unwrap()
.clone()
};
let frepo = Repository {
url: self.url.clone(),
website: None,
name: repo.name,
owner: user,
html_link: repo.id.to_string(),
tags: None,
description: Some(repo.summary),
};
repos.push(frepo);
}
CrawlResp { repos, tags, users }
}
}
#[cfg(test)]
mod tests {
use super::*;
use url::Url;
pub const NET_REPOSITORIES: u64 = 0;
pub const PER_CRAWL: u64 = 10;
#[actix_rt::test]
async fn forgeflux_works() {
let ctx = ForgeFlux::new(
Url::parse(&std::env::var("FORGEFLUX_HOST").unwrap()).unwrap(),
Client::new(),
);
assert!(ctx.is_forge().await);
let steps = NET_REPOSITORIES / PER_CRAWL;
for i in 0..steps {
let res = ctx.crawl(PER_CRAWL, i, 0).await;
assert_eq!(res.repos.len() as u64, PER_CRAWL);
}
}
}

View file

@ -1,69 +0,0 @@
/*
* ForgeFlux StarChart - A federated software forge spider
* Copyright © 2usize22 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct ProfilePhoto {
pub url: Url,
pub media_type: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct User {
pub name: String,
pub preferred_username: String,
pub id: Url,
pub url: Option<Url>,
pub icon: Option<ProfilePhoto>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Repository {
pub id: Url,
pub clone_uri: Url,
pub inbox: Url,
pub name: String,
pub attributed_to: Url,
pub summary: String,
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
#[test]
/// Tests if Gitea responses panic when deserialized with serde into structs defined in this
/// module/file. Since Go doesn't have abilities to describe nullable values, I(@realaravinth)
/// am forced to do this as I my knowledge about Gitea codebase is very limited.
fn schema_doesnt_panic() {
let files = ["./tests/schema/forgeflux/net.forgeflux.org.json"];
for file in files.iter() {
let contents = fs::read_to_string(file).unwrap();
for line in contents.lines() {
let _: Vec<Repository> =
serde_json::from_str(line).expect("Forgeflux schema paniced");
}
}
}
}

File diff suppressed because one or more lines are too long

View file

@ -31,7 +31,6 @@ pub mod schema;
const REPO_SEARCH_PATH: &str = "/api/v1/repos/search";
const GITEA_NODEINFO: &str = "/api/v1/nodeinfo";
const GITEA_IDENTIFIER: &str = "gitea";
const FORGEJO_IDENTIFIER: &str = "forgejo";
#[derive(Clone)]
pub struct Gitea {
@ -178,16 +177,13 @@ mod tests {
use super::*;
use url::Url;
pub const GITEA_HOST: &str = "http://localhost:8080";
pub const NET_REPOSITORIES: u64 = 100;
pub const PER_CRAWL: u64 = 10;
#[actix_rt::test]
async fn gitea_works() {
let ctx = Gitea::new(
Url::parse(&std::env::var("FORGEJO_HOST").unwrap()).unwrap(),
Client::new(),
);
let ctx = Gitea::new(Url::parse(GITEA_HOST).unwrap(), Client::new());
assert!(ctx.is_forge().await);
let steps = NET_REPOSITORIES / PER_CRAWL;

21
renovate.json Normal file
View file

@ -0,0 +1,21 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended",
":dependencyDashboard"
],
"labels": [
"renovate-bot"
],
"prHourlyLimit": 0,
"timezone": "Asia/kolkata",
"prCreation": "immediate",
"vulnerabilityAlerts": {
"enabled": true,
"labels": [
"renovate-bot",
"renovate-security",
"security"
]
}
}

View file

@ -20,7 +20,7 @@ def check_online():
while True:
try:
res = requests.get(
"http://localhost:3000/api/v1/nodeinfo", allow_redirects=False
"http://localhost:8080/api/v1/nodeinfo", allow_redirects=False
)
if any([res.status_code == 302, res.status_code == 200]):
break
@ -30,6 +30,46 @@ def check_online():
count += 1
continue
def install():
INSTALL_PAYLOAD = {
"db_type": "sqlite3",
"db_host": "localhost:3306",
"db_user": "root",
"db_passwd": "",
"db_name": "gitea",
"ssl_mode": "disable",
"db_schema": "",
"charset": "utf8",
"db_path": "/data/gitea/gitea.db",
"app_name": "Gitea:+Git+with+a+cup+of+tea",
"repo_root_path": "/data/git/repositories",
"lfs_root_path": "/data/git/lfs",
"run_user": "git",
"domain": "localhost",
"ssh_port": "2221",
"http_port": "3000",
"app_url": "http://localhost:8080/",
"log_root_path": "/data/gitea/log",
"smtp_host": "",
"smtp_from": "",
"smtp_user": "",
"smtp_passwd": "",
"enable_federated_avatar": "on",
"enable_open_id_sign_in": "on",
"enable_open_id_sign_up": "on",
"default_allow_create_organization": "on",
"default_enable_timetracking": "on",
"no_reply_address": "noreply.localhost",
"password_algorithm": "pbkdf2",
"admin_name": "",
"admin_passwd": "",
"admin_confirm_passwd": "",
"admin_email": "",
}
requests.post(f"http://localhost:8080", data=INSTALL_PAYLOAD)
class ParseCSRFGiteaForm(HTMLParser):
token: str = None
@ -70,7 +110,7 @@ class HTMLClient:
def get_csrf_token(self, url: str) -> str:
resp = self.session.get(url, allow_redirects=False)
if resp.status_code != 200 and resp.status_code != 302 and resp.status_code != 303:
if resp.status_code != 200 and resp.status_code != 302:
print(resp.status_code, resp.text)
raise Exception(f"Can't get csrf token: {resp.status_code}")
csrf = self.__get_csrf_token(resp.text)
@ -78,7 +118,7 @@ class HTMLClient:
def register(client: HTMLClient):
url = "http://localhost:3000/user/sign_up"
url = "http://localhost:8080/user/sign_up"
csrf = client.get_csrf_token(url)
payload = {
"_csrf": csrf,
@ -91,7 +131,7 @@ def register(client: HTMLClient):
def login(client: HTMLClient):
url = "http://localhost:3000/user/login"
url = "http://localhost:8080/user/login"
csrf = client.get_csrf_token(url)
payload = {
"_csrf": csrf,
@ -101,7 +141,7 @@ def login(client: HTMLClient):
}
resp = client.session.post(url, data=payload, allow_redirects=False)
print(f"login {client.session.cookies}")
if resp.status_code == 303:
if resp.status_code == 302:
print("User logged in")
return
@ -127,7 +167,7 @@ def create_repositories(client: HTMLClient):
}
return data
url = "http://localhost:3000/repo/create"
url = "http://localhost:8080/repo/create"
for repo in REPOS:
csrf = client.get_csrf_token(url)
resp = client.session.post(url, data=get_repository_payload(csrf, repo))
@ -142,7 +182,7 @@ def create_repositories(client: HTMLClient):
def add_tag(repo: str, client: HTMLClient):
print("adding tags")
tag = "testing"
url = f"http://{GITEA_USER}:{GITEA_PASSWORD}@localhost:3000/api/v1/repos/{GITEA_USER}/{repo}/topics/{tag}"
url = f"http://{GITEA_USER}:{GITEA_PASSWORD}@localhost:8080/api/v1/repos/{GITEA_USER}/{repo}/topics/{tag}"
resp = requests.put(url)
if resp.status_code != 204:
print(f"Error while adding tags repository: {repo} {resp.status_code}")
@ -156,6 +196,8 @@ if __name__ == "__main__":
REPOS.append(f"repository_{i}")
check_online()
print("Instance online")
install()
print("Instance configured and installed")
client = HTMLClient()
count = 0
while True:

View file

@ -107,15 +107,14 @@ mod tests {
use crate::pages::errors::*;
use crate::settings::Settings;
#[actix_rt::test]
async fn add_page_works() {
let (_, ctx, _, _tmp) = crate::tests::sqlx_sqlite::get_ctx().await;
// let settings = Settings::new().unwrap();
AddChallenge::page(&ctx.settings);
#[test]
fn add_page_works() {
let settings = Settings::new().unwrap();
AddChallenge::page(&settings);
let payload = AddChallengePayload {
hostname: url::Url::parse("https://example.com").unwrap(),
};
let page = AddChallenge::new(&ctx.settings, Some(&payload));
let page = AddChallenge::new(&settings, Some(&payload));
page.with_error(&ReadableError::new(&ServiceError::ClosedForRegistration));
page.render();
}

View file

@ -309,12 +309,6 @@ fn set_separator_field(mut s: ConfigBuilder<DefaultState>) -> ConfigBuilder<Defa
&format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}WAIT_BEFORE_NEXT_API_CALL"),
"crawler.wait_before_next_api_call",
);
s = from_env(
s,
&format!("{PREFIX}{SEPARATOR}REPOSITORY{SEPARATOR}ROOT"),
"repository.root",
);
s
}

View file

@ -25,7 +25,6 @@ use url::Url;
use db_core::prelude::*;
use forge_core::prelude::*;
use forgeflux::ForgeFlux;
use gitea::Gitea;
use crate::ctx::Ctx;
@ -38,16 +37,9 @@ impl Ctx {
info!("[crawl][{instance_url}] Init crawling");
let forge: Box<dyn SCForge> =
Box::new(Gitea::new(instance_url.clone(), self.client.clone()));
let forge = if forge.is_forge().await {
forge
} else {
let forgeflux = Box::new(ForgeFlux::new(instance_url.clone(), self.client.clone()));
if forgeflux.is_forge().await {
forgeflux
} else {
if !forge.is_forge().await {
unimplemented!("Forge type unimplemented");
}
};
let mut page = 1;
let url = forge.get_url();
@ -237,12 +229,13 @@ mod tests {
use url::Url;
pub const GITEA_HOST: &str = "http://localhost:8080";
pub const GITEA_USERNAME: &str = "bot";
#[actix_rt::test]
async fn crawl_gitea() {
let (db, ctx, federate, _tmp_dir) = sqlx_sqlite::get_ctx().await;
let url = Url::parse(&std::env::var("FORGEJO_HOST").unwrap()).unwrap();
let url = Url::parse(GITEA_HOST).unwrap();
ctx.crawl(&url, &db, &federate).await;
// let hostname = get_hostname(&Url::parse(GITEA_HOST).unwrap());
assert!(db.forge_exists(&url).await.unwrap());

View file

@ -57,14 +57,12 @@ pub mod sqlx_sqlite {
let url = env::var("SQLITE_DATABASE_URL").unwrap();
env::set_var("DATABASE_URL", &url);
println!("found db url: {url}");
let tmp_dir = Temp::new_dir().unwrap();
env::set_var("STARCHART__REPOSITORY__ROOT", tmp_dir.to_str().unwrap());
let mut settings = Settings::new().unwrap();
settings.database.url = url.clone();
settings.database.database_type = DBType::Sqlite;
let db = sqlite::get_data(Some(settings.clone())).await;
let tmp_dir = Temp::new_dir().unwrap();
settings.repository.root = tmp_dir.to_str().unwrap().to_string();
let federate = get_federate(Some(settings.clone())).await;