Compare commits
1 commit
master
...
renovate/c
Author | SHA1 | Date | |
---|---|---|---|
|
e59d54802a |
26 changed files with 480 additions and 632 deletions
|
@ -2,5 +2,3 @@ export POSTGRES_DATABASE_URL="postgres://postgres:password@localhost:5432/postgr
|
|||
export SQLITE_TMP="$(pwd)/db/db-sqlx-sqlite/tmp"
|
||||
export SQLITE_DATABASE_URL="sqlite://$SQLITE_TMP/admin.db"
|
||||
export STARCHART__CRAWLER__WAIT_BEFORE_NEXT_API_CALL=0
|
||||
export FORGEJO_HOST=http://localhost:3000
|
||||
export FORGEFLUX_HOST=http://localhost:7000
|
||||
|
|
43
.github/workflows/clippy-fmt.yml
vendored
Normal file
43
.github/workflows/clippy-fmt.yml
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
name: Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
components: rustfmt
|
||||
- name: Check with rustfmt
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy
|
||||
override: true
|
||||
|
||||
- name: Check with Clippy
|
||||
uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --workspace --tests --all-features
|
93
.github/workflows/coverage.yml
vendored
Normal file
93
.github/workflows/coverage.yml
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
name: Coverage
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build_and_test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- stable
|
||||
|
||||
# services:
|
||||
# postgres:
|
||||
# image: postgres
|
||||
# env:
|
||||
# POSTGRES_PASSWORD: password
|
||||
# POSTGRES_USER: postgres
|
||||
# POSTGRES_DB: postgres
|
||||
# options: >-
|
||||
# --health-cmd pg_isready
|
||||
# --health-interval 10s
|
||||
# --health-timeout 5s
|
||||
# --health-retries 5
|
||||
# ports:
|
||||
# - 5432:5432
|
||||
|
||||
name: ${{ matrix.version }} - x86_64-unknown-linux-gnu
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# - name: ⚡ Cache
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cargo/registry
|
||||
# ~/.cargo/git
|
||||
# target
|
||||
# key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install ${{ matrix.version }}
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.version }}-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: load env
|
||||
run: |
|
||||
mkdir -p db/db-sqlx-sqlite/tmp &&
|
||||
source .env-sample \
|
||||
&& echo "POSTGRES_DATABASE_URL=$POSTGRES_DATABASE_URL" >> $GITHUB_ENV \
|
||||
&& echo "SQLITE_DATABASE_URL=$SQLITE_DATABASE_URL" >> $GITHUB_ENV
|
||||
|
||||
# usually run as part of `make test` but because this workflow doesn't run
|
||||
# that command, `make dev-env` is used
|
||||
- name: setup dev environment
|
||||
run: make dev-env
|
||||
env:
|
||||
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
|
||||
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
|
||||
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
|
||||
|
||||
- name: run migrations
|
||||
run: make migrate
|
||||
env:
|
||||
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
|
||||
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
|
||||
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
|
||||
|
||||
- name: Generate coverage file
|
||||
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'pull_request')
|
||||
uses: actions-rs/tarpaulin@v0.1
|
||||
env:
|
||||
# GIT_HASH is dummy value. I guess build.rs is skipped in tarpaulin
|
||||
# execution so this value is required for preventing meta tests from
|
||||
# panicking
|
||||
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61
|
||||
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
|
||||
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
|
||||
with:
|
||||
args: "--all-features --no-fail-fast --workspace=db/db-sqlx-sqlite,. -t 1200"
|
||||
# args: "--all-features --no-fail-fast --workspace=database/db-sqlx-postgres,database/db-sqlx-sqlite,. -t 1200"
|
||||
|
||||
- name: Upload to Codecov
|
||||
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'pull_request')
|
||||
uses: codecov/codecov-action@v2
|
128
.github/workflows/linux.yml
vendored
Normal file
128
.github/workflows/linux.yml
vendored
Normal file
|
@ -0,0 +1,128 @@
|
|||
name: Build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build_and_test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- stable
|
||||
# - nightly
|
||||
|
||||
name: ${{ matrix.version }} - x86_64-unknown-linux-gnu
|
||||
runs-on:
|
||||
ubuntu-latest
|
||||
|
||||
# services:
|
||||
# postgres:
|
||||
# image: postgres
|
||||
# env:
|
||||
# POSTGRES_PASSWORD: password
|
||||
# POSTGRES_USER: postgres
|
||||
# POSTGRES_DB: postgres
|
||||
# options: >-
|
||||
# --health-cmd pg_isready
|
||||
# --health-interval 10s
|
||||
# --health-timeout 5s
|
||||
# --health-retries 5
|
||||
# ports:
|
||||
# - 5432:5432
|
||||
#
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# - name: ⚡ Cache
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: |
|
||||
# /var/lib/docker
|
||||
# ~/.cargo/registry
|
||||
# ~/.cargo/git
|
||||
# target
|
||||
# key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
|
||||
- name: Cache
|
||||
uses: Swatinem/rust-cache@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Install ${{ matrix.version }}
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.version }}-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: load env
|
||||
run: |
|
||||
mkdir -p db/db-sqlx-sqlite/tmp &&
|
||||
source .env-sample \
|
||||
&& echo "POSTGRES_DATABASE_URL=$POSTGRES_DATABASE_URL" >> $GITHUB_ENV \
|
||||
&& echo "SQLITE_DATABASE_URL=$SQLITE_DATABASE_URL" >> $GITHUB_ENV
|
||||
|
||||
- name: run migrations
|
||||
run: make migrate
|
||||
env:
|
||||
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
|
||||
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
|
||||
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
|
||||
|
||||
- name: build
|
||||
run:
|
||||
make
|
||||
env:
|
||||
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
|
||||
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
|
||||
|
||||
- name: build docker images
|
||||
if: matrix.version == 'stable'
|
||||
run: make docker
|
||||
|
||||
- name: publish docker images
|
||||
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
|
||||
run: make docker-publish
|
||||
|
||||
- name: run tests
|
||||
timeout-minutes: 40
|
||||
run:
|
||||
make test
|
||||
env:
|
||||
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
|
||||
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
|
||||
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
|
||||
|
||||
- name: generate documentation
|
||||
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
|
||||
run:
|
||||
make doc
|
||||
env:
|
||||
GIT_HASH: 8e77345f1597e40c2e266cb4e6dee74888918a61 # dummy value
|
||||
POSTGRES_DATABASE_URL: "${{ env.POSTGRES_DATABASE_URL }}"
|
||||
SQLITE_DATABASE_URL: "${{ env.SQLITE_DATABASE_URL }}"
|
||||
|
||||
- name: Deploy to GitHub Pages
|
||||
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'forgeflux-org/starchart'
|
||||
uses: JamesIves/github-pages-deploy-action@3.7.1
|
||||
with:
|
||||
branch: gh-pages
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
FOLDER:
|
||||
./target/doc/
|
||||
|
||||
# - name: deploy
|
||||
# if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'realaravinth/realaravinth' }}
|
||||
# run: >-
|
||||
# curl --location --request POST "https://deploy.batsense.net/api/v1/update" --header 'Content-Type: application/json' --data-raw "{ \"secret\": \"${{ secrets.DEPLOY_TOKEN }}\", \"branch\": \"gh-pages\" }"
|
111
.woodpecker.yml
111
.woodpecker.yml
|
@ -1,111 +0,0 @@
|
|||
steps:
|
||||
setup:
|
||||
image: python
|
||||
when:
|
||||
event: [push, pull_request, tag, deployment]
|
||||
environment:
|
||||
POSTGRES_DATABASE_URL: postgres://postgres:password@database:5432/postgres
|
||||
commands:
|
||||
- pip install requests
|
||||
- sed -i 's/localhost\:3000/forgejo/' scripts/gitea.py
|
||||
- python ./scripts/gitea.py
|
||||
# - curl -vv http://forgejo/api/v1/repos/bot/repository_58
|
||||
# - curl -vv http://forge_forgeflux/forgejo/bot/repository_58
|
||||
# - curl -vv http://forge_forgeflux/forgejo/bot/repository_01
|
||||
|
||||
test:
|
||||
image: rust
|
||||
when:
|
||||
event: [push, pull_request, tag, deployment]
|
||||
environment:
|
||||
POSTGRES_DATABASE_URL: postgres://postgres:password@database:5432/postgres
|
||||
SQLITE_DATABASE_URL: sqlite:///tmp/admin.db
|
||||
FORGEJO_HOST: http://forgejo
|
||||
FORGEFLUX_HOST: http://forge_forgeflux
|
||||
commands:
|
||||
# - curl -fsSL https://deb.nodesource.com/setup_16.x | bash - &&\
|
||||
# - apt update && apt-get -y --no-install-recommends install nodejs tar gpg curl wget
|
||||
# - rustup toolchain install nightly
|
||||
# - rustup override set nightly
|
||||
- rustup component add rustfmt
|
||||
- rustup component add clippy
|
||||
# - export FORGEFLUX_server_COOKIE_SECRET=d05604b56ffd0f57200be49313a9e51436ca8f1678b9ffdf8d2602d5e9839f05
|
||||
- make migrate
|
||||
- make
|
||||
# - make lint
|
||||
- make test.workspaces
|
||||
|
||||
build_docker_img:
|
||||
image: plugins/docker
|
||||
when:
|
||||
event: [pull_request]
|
||||
settings:
|
||||
dry_run: true
|
||||
repo: forgeflux/starchart
|
||||
tags: latest
|
||||
|
||||
build_and_publish_docker_img:
|
||||
image: plugins/docker
|
||||
when:
|
||||
event: [push, tag, deployment]
|
||||
branch: master
|
||||
settings:
|
||||
username: forgeflux
|
||||
password:
|
||||
from_secret: DOCKER_TOKEN
|
||||
repo: forgeflux/starchart
|
||||
tags:
|
||||
latest
|
||||
|
||||
|
||||
services:
|
||||
forgejo:
|
||||
image: codeberg.org/forgejo/forgejo:9
|
||||
environment:
|
||||
FORGEJO__security__INSTALL_LOCK: true
|
||||
FORGEJO__federation__ENABLED: true
|
||||
FORGEJO__server__ROOT_URL: http://forgejo
|
||||
FORGEJO__server__HTTP_PORT: 80
|
||||
|
||||
database:
|
||||
image: postgres
|
||||
environment:
|
||||
POSTGRES_PASSWORD: password
|
||||
|
||||
forge_forgeflux_database:
|
||||
image: postgres
|
||||
environment:
|
||||
POSTGRES_PASSWORD: password
|
||||
|
||||
forge_forgeflux:
|
||||
image: forgeflux/forgeflux:latest
|
||||
depends_on:
|
||||
- forgeflux_postgres
|
||||
environment:
|
||||
FORGEFLUX_server_PUBLIC_URL_HAS_HTTPS: false
|
||||
FORGEFLUX_debug: true
|
||||
FORGEFLUX_source_code: https://git.batsense.net/ForgeFlux/ForgeFlux
|
||||
FORGEFLUX_allow_registration: true
|
||||
FORGEFLUX_database_POOL: 2
|
||||
FORGEFLUX_forges_FORGEJO_url: http://forgejo
|
||||
FORGEFLUX_forges_FORGEJO_client_id: foo
|
||||
FORGEFLUX_forges_FORGEJO_client_secret: foo
|
||||
FORGEFLUX_forges_FORGEJO_user_USERNAME: foo
|
||||
FORGEFLUX_forges_FORGEJO_user_API_TOKEN: foo
|
||||
DATABASE_URL: postgres://postgres:password@forgeflux_postgres:5432/postgres
|
||||
PORT: 80
|
||||
FORGEFLUX_server_DOMAIN: forge_forgeflux
|
||||
FORGEFLUX_server_COOKIE_SECRET: 995cde0721b6e41602dd111438cc7c1b2506dc14bad31d2653fb9a4adce1f84e
|
||||
FORGEFLUX_server_IP: 0.0.0.0
|
||||
FORGEFLUX_forges_GITHUB_url: https://github.com
|
||||
FORGEFLUX_forges_GITHUB_api_url: https://api.github.com
|
||||
FORGEFLUX_forges_GITHUB_client_id: foo
|
||||
FORGEFLUX_forges_GITHUB_client_secret: foo
|
||||
FORGEFLUX_forges_GITHUB_user_USERNAME: foo
|
||||
FORGEFLUX_forges_GITHUB_user_PERSONAL_ACCESS_TOKEN: foo
|
||||
FORGEFLUX_starchart_enable: true
|
||||
|
||||
forgeflux_postgres:
|
||||
image: postgres:17.2
|
||||
environment:
|
||||
POSTGRES_PASSWORD: password # change password
|
17
Cargo.lock
generated
17
Cargo.lock
generated
|
@ -1,6 +1,6 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "actix"
|
||||
|
@ -1147,20 +1147,6 @@ dependencies = [
|
|||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "forgeflux"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"actix-rt",
|
||||
"async-trait",
|
||||
"forge-core",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.0.1"
|
||||
|
@ -2936,7 +2922,6 @@ dependencies = [
|
|||
"derive_more",
|
||||
"federate-core",
|
||||
"forge-core",
|
||||
"forgeflux",
|
||||
"gitea",
|
||||
"lazy_static",
|
||||
"log",
|
||||
|
|
|
@ -16,7 +16,6 @@ members = [
|
|||
"db/db-core",
|
||||
"db/db-sqlx-sqlite",
|
||||
"forge/forge-core",
|
||||
"forge/forgeflux",
|
||||
"forge/gitea",
|
||||
"federate/federate-core",
|
||||
"federate/publiccodeyml"
|
||||
|
@ -77,9 +76,6 @@ path = "./db/db-sqlx-sqlite"
|
|||
[dependencies.gitea]
|
||||
path = "./forge/gitea"
|
||||
|
||||
[dependencies.forgeflux]
|
||||
path = "./forge/forgeflux"
|
||||
|
||||
[dependencies.forge-core]
|
||||
path = "./forge/forge-core"
|
||||
|
||||
|
|
|
@ -18,9 +18,9 @@ COPY . .
|
|||
COPY --from=cacher /src/target target
|
||||
RUN make release
|
||||
|
||||
FROM debian:latest
|
||||
#LABEL org.opencontainers.image.source https://github.com/forgeflux-org/starchart
|
||||
RUN apt-get update && apt-get install -y ca-certificates libssl-dev
|
||||
FROM debian:bullseye-slim
|
||||
LABEL org.opencontainers.image.source https://github.com/forgeflux-org/starchart
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
COPY --from=builder /src/target/release/starchart /usr/local/bin/
|
||||
COPY --from=builder /src/config/default.toml /etc/starchart/config.toml
|
||||
COPY scripts/entrypoint.sh /usr/local/bin
|
||||
|
|
10
Makefile
10
Makefile
|
@ -98,11 +98,11 @@ doc: ## Prepare documentation
|
|||
cargo doc --no-deps --workspace --all-features
|
||||
|
||||
docker: ## Build docker images
|
||||
docker build -t forgeflux/starchart:master -t forgeflux/starchart:latest .
|
||||
docker build -t forgedfed/starchart:master -t forgedfed/starchart:latest .
|
||||
|
||||
docker-publish: docker ## Build and publish docker images
|
||||
docker push forgeflux/starchart:master
|
||||
docker push forgeflux/starchart:latest
|
||||
docker push forgedfed/starchart:master
|
||||
docker push forgedfed/starchart:latest
|
||||
|
||||
lint: ## Lint codebase
|
||||
cargo fmt -v --all -- --emit files
|
||||
|
@ -131,10 +131,6 @@ test: migrate ## Run tests
|
|||
$(call cache_bust)
|
||||
$(call test_workspaces)
|
||||
|
||||
test.workspaces:
|
||||
$(call cache_bust)
|
||||
$(call test_workspaces)
|
||||
|
||||
# cd database/db-sqlx-postgres &&\
|
||||
# DATABASE_URL=${POSTGRES_DATABASE_URL}\
|
||||
# cargo test --no-fail-fast
|
||||
|
|
|
@ -309,8 +309,6 @@ impl Clone for Box<dyn SCDatabase> {
|
|||
pub enum ForgeImplementation {
|
||||
/// [Gitea](https://gitea.io) softare forge
|
||||
Gitea,
|
||||
/// [ForgeFlux](https://net.forgeflux.net)
|
||||
ForgeFlux,
|
||||
}
|
||||
|
||||
impl ForgeImplementation {
|
||||
|
@ -318,7 +316,6 @@ impl ForgeImplementation {
|
|||
pub const fn to_str(&self) -> &'static str {
|
||||
match self {
|
||||
ForgeImplementation::Gitea => "gitea",
|
||||
ForgeImplementation::ForgeFlux => "forgeflux",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -329,11 +326,9 @@ impl FromStr for ForgeImplementation {
|
|||
/// Convert [str] to [ForgeImplementation]
|
||||
fn from_str(s: &str) -> DBResult<Self> {
|
||||
const GITEA: &str = ForgeImplementation::Gitea.to_str();
|
||||
const FORGEFLUX: &str = ForgeImplementation::ForgeFlux.to_str();
|
||||
let s = s.trim();
|
||||
match s {
|
||||
GITEA => Ok(Self::Gitea),
|
||||
FORGEFLUX => Ok(Self::ForgeFlux),
|
||||
_ => Err(DBError::UnknownForgeType(s.to_owned())),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
INSERT OR IGNORE INTO starchart_forge_type (name) VALUES('forgeflux');
|
|
@ -18,6 +18,16 @@
|
|||
},
|
||||
"query": "SELECT\n starchart_introducer.instance_url\n FROM \n starchart_federated_mini_index\n INNER JOIN\n starchart_introducer\n ON\n starchart_introducer.ID = starchart_instance \n WHERE\n mini_index MATCH $1"
|
||||
},
|
||||
"069a127ffb1062321bba5a915a4ead3e0bbe4dabf6e0f684d8cc6a6d8a68ad5b": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
}
|
||||
},
|
||||
"query": "INSERT OR IGNORE INTO fts_repositories ( name, description, website, html_url ) \n VALUES ( $1, $2, $3, $4 );"
|
||||
},
|
||||
"0b179588df37779f563f0ad8c43e920a8bc22b3eed682778cef9dd05608f9691": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
|
@ -94,26 +104,6 @@
|
|||
},
|
||||
"query": "SELECT html_url, profile_photo_html_url, imported FROM starchart_users WHERE username = $1 AND \n hostname_id = (SELECT ID FROM starchart_forges WHERE hostname = $2)"
|
||||
},
|
||||
"1f47bff0270cfb9d58972ecf6ae36f348d9f605a40a8669ce2908b50a3aac735": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
}
|
||||
},
|
||||
"query": "INSERT OR REPLACE INTO fts_repositories ( name, description, website, html_url ) \n VALUES ( $1, $2, $3, $4 );"
|
||||
},
|
||||
"2ac627ddd905bab19582037ca5e402ebaa268b61d94459e125644028d04e6dc2": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
}
|
||||
},
|
||||
"query": "INSERT OR REPLACE INTO starchart_forges\n (hostname, verified_on, forge_type, starchart_instance)\n VALUES (\n $1,\n $2,\n (SELECT ID FROM starchart_forge_type WHERE name = $3),\n (SELECT ID FROM starchart_introducer WHERE instance_url = $4)\n )"
|
||||
},
|
||||
"2afb17ba3753aa440465a836b46b7a1466f25791cfc4d0acdd38bc2755ae3e86": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
|
@ -150,6 +140,16 @@
|
|||
},
|
||||
"query": "SELECT ID FROM starchart_forge_type WHERE name = $1"
|
||||
},
|
||||
"338fb30307071e6df9efee6a68697c60e579d7b2332630bce401c0e7186a642a": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 7
|
||||
}
|
||||
},
|
||||
"query": "INSERT INTO \n starchart_users (\n hostname_id, username, html_url,\n profile_photo_html_url, added_on, last_crawl_on, imported\n ) \n VALUES (\n (SELECT ID FROM starchart_forges WHERE hostname = $1), $2, $3, $4, $5, $6, $7)"
|
||||
},
|
||||
"364c8e3d147318b864fd28ad284f225aaace9479b5cf0428fb97f0e5689e248d": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
|
@ -224,16 +224,6 @@
|
|||
},
|
||||
"query": "SELECT\n hostname,\n last_crawl_on,\n starchart_introducer.instance_url,\n starchart_forge_type.name\n FROM\n starchart_forges\n INNER JOIN\n starchart_forge_type\n ON\n starchart_forges.forge_type = starchart_forge_type.id\n LEFT JOIN\n starchart_introducer\n ON\n starchart_introducer.ID = starchart_forges.starchart_instance\n WHERE \n starchart_forges.imported = 0\n ORDER BY\n starchart_forges.ID\n LIMIT $1 OFFSET $2;\n "
|
||||
},
|
||||
"6c6a24873ae0053df5b875a35ed8c605e73c9718951b87140d2cada093ec4cf6": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 9
|
||||
}
|
||||
},
|
||||
"query": "INSERT OR REPLACE INTO \n starchart_repositories (\n hostname_id, owner_id, name, description, html_url, website, created,\n last_crawl, imported\n )\n VALUES (\n (SELECT ID FROM starchart_forges WHERE hostname = $1),\n (SELECT ID FROM starchart_users WHERE username = $2),\n $3, $4, $5, $6, $7, $8, $9\n );"
|
||||
},
|
||||
"6f5ca3d71a541eb6f33e37a5889c048536ab6ad7e81a6236d73aa71433c13717": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
|
@ -244,6 +234,16 @@
|
|||
},
|
||||
"query": "INSERT OR IGNORE INTO starchart_project_topics ( name ) VALUES ( $1 );"
|
||||
},
|
||||
"74fb3a1ae4f339b5371a6872e6eb4ed7c1f5968dac70de1639454c394a05cb38": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
}
|
||||
},
|
||||
"query": "INSERT INTO starchart_forges\n (hostname, verified_on, forge_type, starchart_instance)\n VALUES\n (\n $1, $2,\n (SELECT ID FROM starchart_forge_type WHERE name = $3),\n $4)"
|
||||
},
|
||||
"7590630f5fe7e05014b70ac0047f9b6c724b88e35e1b1306fb89760612929d55": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
|
@ -424,16 +424,6 @@
|
|||
},
|
||||
"query": "SELECT name FROM starchart_project_topics ORDER BY ID LIMIT $1 OFFSET $2;"
|
||||
},
|
||||
"a754fb4bcdd227f3ab440c5600a534dcabde3e75ea242a530d6aa12c7502c88e": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 7
|
||||
}
|
||||
},
|
||||
"query": "INSERT OR REPLACE INTO \n starchart_users (\n hostname_id, username, html_url,\n profile_photo_html_url, added_on, last_crawl_on, imported\n ) \n VALUES (\n (SELECT ID FROM starchart_forges WHERE hostname = $1), $2, $3, $4, $5, $6, $7)"
|
||||
},
|
||||
"a81dd4b5df666e22fac211092e7b8425d838dd9023aa2b17659352f30831944d": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
|
@ -452,6 +442,16 @@
|
|||
},
|
||||
"query": "SELECT ID FROM starchart_users WHERE username = $1 AND \n hostname_id = (SELECT ID FROM starchart_forges WHERE hostname = $2)"
|
||||
},
|
||||
"a912406491a4e9ea1bbf8a3d0003e948a24df7f101c490aca395f5b86ec64d00": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
}
|
||||
},
|
||||
"query": "INSERT OR IGNORE INTO starchart_forges\n (hostname, verified_on, forge_type, starchart_instance)\n VALUES (\n $1,\n $2,\n (SELECT ID FROM starchart_forge_type WHERE name = $3),\n (SELECT ID FROM starchart_introducer WHERE instance_url = $4)\n )"
|
||||
},
|
||||
"ae9295f46da1753fe91a633b5738533084005ad32915c19635f896c454284b6b": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
|
@ -522,6 +522,16 @@
|
|||
},
|
||||
"query": "INSERT OR IGNORE INTO\n starchart_imported_starcharts (starchart_instance)\n VALUES ((SELECT ID FROM starchart_introducer WHERE instance_url = $1));"
|
||||
},
|
||||
"ca22e5f6e7065cf2d4ffdbfac0084f9871de8cd9073d470cbf7eac2de2a73c47": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 9
|
||||
}
|
||||
},
|
||||
"query": "INSERT INTO \n starchart_repositories (\n hostname_id, owner_id, name, description, html_url, website, created,\n last_crawl, imported\n )\n VALUES (\n (SELECT ID FROM starchart_forges WHERE hostname = $1),\n (SELECT ID FROM starchart_users WHERE username = $2),\n $3, $4, $5, $6, $7, $8, $9\n );"
|
||||
},
|
||||
"d22d18f0c2b49a570ac95e1af5ba398b3d1c7a1a5a6780d6dc646077e466714c": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
|
@ -680,16 +690,6 @@
|
|||
},
|
||||
"query": "SELECT word FROM starchart_mini_index"
|
||||
},
|
||||
"fc32b8d0e5599dcc1ceed4127ca860d09754dd6d1fe558fbe5c7f4086c901b8f": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
}
|
||||
},
|
||||
"query": "INSERT OR REPLACE INTO starchart_forges\n (hostname, verified_on, forge_type, starchart_instance)\n VALUES\n (\n $1, $2,\n (SELECT ID FROM starchart_forge_type WHERE name = $3),\n $4)"
|
||||
},
|
||||
"fdf87490759150d528139b13eb1a28532b5bebb546ade00bcb6a7d648abcd445": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
|
|
|
@ -160,7 +160,7 @@ impl Database {
|
|||
return Ok(());
|
||||
}
|
||||
sqlx::query!(
|
||||
"INSERT OR REPLACE INTO fts_repositories ( name, description, website, html_url )
|
||||
"INSERT OR IGNORE INTO fts_repositories ( name, description, website, html_url )
|
||||
VALUES ( $1, $2, $3, $4 );",
|
||||
name,
|
||||
description,
|
||||
|
@ -313,7 +313,7 @@ impl SCDatabase for Database {
|
|||
let forge_type = f.forge_type.to_str();
|
||||
if let Some(instance_url) = f.starchart_url {
|
||||
sqlx::query!(
|
||||
"INSERT OR REPLACE INTO starchart_forges
|
||||
"INSERT OR IGNORE INTO starchart_forges
|
||||
(hostname, verified_on, forge_type, starchart_instance)
|
||||
VALUES (
|
||||
$1,
|
||||
|
@ -331,7 +331,7 @@ impl SCDatabase for Database {
|
|||
.map_err(map_register_err)?;
|
||||
} else {
|
||||
sqlx::query!(
|
||||
"INSERT OR REPLACE INTO starchart_forges
|
||||
"INSERT INTO starchart_forges
|
||||
(hostname, verified_on, forge_type, starchart_instance)
|
||||
VALUES
|
||||
(
|
||||
|
@ -491,7 +491,7 @@ impl SCDatabase for Database {
|
|||
let now = now_unix_time_stamp();
|
||||
let url = db_core::clean_url(&u.url);
|
||||
sqlx::query!(
|
||||
"INSERT OR REPLACE INTO
|
||||
"INSERT INTO
|
||||
starchart_users (
|
||||
hostname_id, username, html_url,
|
||||
profile_photo_html_url, added_on, last_crawl_on, imported
|
||||
|
@ -607,7 +607,7 @@ impl SCDatabase for Database {
|
|||
let now = now_unix_time_stamp();
|
||||
let url = db_core::clean_url(&r.url);
|
||||
sqlx::query!(
|
||||
"INSERT OR REPLACE INTO
|
||||
"INSERT INTO
|
||||
starchart_repositories (
|
||||
hostname_id, owner_id, name, description, html_url, website, created,
|
||||
last_crawl, imported
|
||||
|
|
|
@ -5,12 +5,19 @@ networks:
|
|||
external: false
|
||||
|
||||
services:
|
||||
forgejo:
|
||||
image: codeberg.org/forgejo/forgejo:9
|
||||
server:
|
||||
image: gitea/gitea:1.16.5
|
||||
container_name: gitea
|
||||
environment:
|
||||
- FORGEJO__security__INSTALL_LOCK=true
|
||||
- FORGEJO__federation__ENABLED=true
|
||||
- FORGEJO__server__ROOT_URL=http://localhost
|
||||
- FORGEJO__server__HTTP_PORT=3000
|
||||
- USER_UID=1000
|
||||
- USER_GID=1000
|
||||
restart: always
|
||||
networks:
|
||||
- gitea
|
||||
volumes:
|
||||
- ./tmp/gitea:/data
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
ports:
|
||||
- "3000:3000"
|
||||
- "8080:3000"
|
||||
- "2221:22"
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
[package]
|
||||
name = "forgeflux"
|
||||
version = "0.1.0"
|
||||
authors = ["realaravinth <realaravinth@batsense.net>"]
|
||||
description = "ForgeFlux StarChart - Federated forge spider"
|
||||
documentation = "https://forgeflux.org/"
|
||||
edition = "2021"
|
||||
license = "AGPLv3 or later version"
|
||||
|
||||
|
||||
[lib]
|
||||
name = "forgeflux"
|
||||
path = "src/lib.rs"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.51"
|
||||
url = { version = "2.2.2", features = ["serde"] }
|
||||
tokio = { version = "1.17", features = ["time"] }
|
||||
|
||||
[dependencies.forge-core]
|
||||
path = "../forge-core"
|
||||
|
||||
[dependencies.reqwest]
|
||||
features = ["rustls-tls-native-roots", "gzip", "deflate", "brotli", "json"]
|
||||
version = "0.11.10"
|
||||
|
||||
[dependencies.serde]
|
||||
features = ["derive"]
|
||||
version = "1"
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.7"
|
|
@ -1,198 +0,0 @@
|
|||
/*
|
||||
* ForgeFlux StarChart - A federated software forge spider
|
||||
* Copyright © 2022 Aravinth Manivannan <realaravinth@batsense.net>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use reqwest::Client;
|
||||
use tokio::task::JoinHandle;
|
||||
use url::Url;
|
||||
|
||||
use db_core::ForgeImplementation;
|
||||
use forge_core::dev::*;
|
||||
use forge_core::Repository;
|
||||
|
||||
pub mod schema;
|
||||
|
||||
const REPO_SEARCH_PATH: &str = "/search/repositories";
|
||||
const FORGEFLUX_NODEINFO: &str = "/nodeinfo/2.0";
|
||||
const FORGEFLUX_IDENTIFIER: &str = "forgeflux";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ForgeFlux {
|
||||
pub instance_url: Url,
|
||||
pub client: Client,
|
||||
url: Url,
|
||||
}
|
||||
|
||||
impl ForgeFlux {
|
||||
pub fn new(instance_url: Url, client: Client) -> Self {
|
||||
let url = Url::parse(&db_core::clean_url(&instance_url)).unwrap();
|
||||
|
||||
Self {
|
||||
instance_url,
|
||||
client,
|
||||
url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for ForgeFlux {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.url == other.url && self.instance_url == other.instance_url
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SCForge for ForgeFlux {
|
||||
async fn is_forge(&self) -> bool {
|
||||
let u = self.instance_url.clone();
|
||||
let mut node_info_url = self.instance_url.clone();
|
||||
node_info_url.set_path(FORGEFLUX_NODEINFO);
|
||||
let resp = self.client.get(node_info_url).send().await.unwrap();
|
||||
if resp.status() == 200 {
|
||||
let res: schema::Nodeinfo = resp.json().await.unwrap();
|
||||
return res.software.name == FORGEFLUX_IDENTIFIER;
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn get_url(&self) -> &Url {
|
||||
&self.url
|
||||
}
|
||||
|
||||
fn forge_type(&self) -> ForgeImplementation {
|
||||
ForgeImplementation::ForgeFlux
|
||||
}
|
||||
|
||||
async fn crawl(&self, limit: u64, page: u64, rate_limit: u64) -> CrawlResp {
|
||||
fn empty_is_none(s: &str) -> Option<String> {
|
||||
let s = s.trim();
|
||||
if s.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(s.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
let tags = Tags::default();
|
||||
let mut users = UserMap::default();
|
||||
let mut internal_users = UserMap::default();
|
||||
let mut repos = Repositories::default();
|
||||
|
||||
let instance_url = self.instance_url.clone();
|
||||
|
||||
let mut url = instance_url.clone();
|
||||
url.set_path(REPO_SEARCH_PATH);
|
||||
url.set_query(Some(&format!("page={page}&limit={limit}")));
|
||||
let mut res: Vec<schema::Repository> = self
|
||||
.client
|
||||
.get(url)
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.json()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut sleep_fut: Option<JoinHandle<()>> = None;
|
||||
for repo in res.drain(0..) {
|
||||
let user = { //if internal_users.contains_key(&repo.attributed_to.to_string()) {
|
||||
if let Some(sleep_fut) = sleep_fut {
|
||||
sleep_fut.await.unwrap();
|
||||
}
|
||||
|
||||
let user: schema::User = self
|
||||
.client
|
||||
.get(repo.attributed_to.clone())
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.json()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sleep_fut = Some(tokio::spawn(tokio::time::sleep(Duration::new(
|
||||
rate_limit, 0,
|
||||
))));
|
||||
|
||||
let profile_photo = if let Some(profile_photo) = user.icon {
|
||||
Some(profile_photo.url.to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let f_user = Arc::new(User {
|
||||
url: user.id.clone(),
|
||||
username: Arc::new(user.preferred_username),
|
||||
html_link: user.id.to_string(),
|
||||
profile_photo,
|
||||
});
|
||||
|
||||
users.insert(f_user.username.clone(), f_user.clone());
|
||||
users.insert(Arc::new(f_user.url.to_string()), f_user.clone());
|
||||
internal_users.insert(Arc::new(repo.attributed_to.to_string()), f_user.clone());
|
||||
f_user
|
||||
};
|
||||
// } else {
|
||||
// internal_users
|
||||
// .get(&repo.attributed_to.to_string())
|
||||
// .unwrap()
|
||||
// .clone()
|
||||
// };
|
||||
|
||||
let frepo = Repository {
|
||||
url: repo.id.clone(),
|
||||
website: None,
|
||||
name: repo.name,
|
||||
owner: user,
|
||||
html_link: repo.id.to_string(),
|
||||
tags: None,
|
||||
description: Some(repo.summary),
|
||||
};
|
||||
|
||||
repos.push(frepo);
|
||||
}
|
||||
CrawlResp { repos, tags, users }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use url::Url;
|
||||
|
||||
pub const NET_REPOSITORIES: u64 = 0;
|
||||
pub const PER_CRAWL: u64 = 10;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn forgeflux_works() {
|
||||
let ctx = ForgeFlux::new(
|
||||
Url::parse(&std::env::var("FORGEFLUX_HOST").unwrap()).unwrap(),
|
||||
Client::new(),
|
||||
);
|
||||
|
||||
assert!(ctx.is_forge().await);
|
||||
let steps = NET_REPOSITORIES / PER_CRAWL;
|
||||
|
||||
for i in 0..steps {
|
||||
let res = ctx.crawl(PER_CRAWL, i, 0).await;
|
||||
assert_eq!(res.repos.len() as u64, PER_CRAWL);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* ForgeFlux StarChart - A federated software forge spider
|
||||
* Copyright © 2usize22 Aravinth Manivannan <realaravinth@batsense.net>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProfilePhoto {
|
||||
pub url: Url,
|
||||
pub media_type: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct User {
|
||||
pub name: String,
|
||||
pub preferred_username: String,
|
||||
pub id: Url,
|
||||
pub url: Option<Url>,
|
||||
pub icon: Option<ProfilePhoto>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Repository {
|
||||
pub id: Url,
|
||||
pub clone_uri: Url,
|
||||
pub inbox: Url,
|
||||
pub name: String,
|
||||
pub attributed_to: Url,
|
||||
pub summary: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize, Deserialize)]
|
||||
pub struct Software {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize, Deserialize)]
|
||||
pub struct Nodeinfo {
|
||||
pub software: Software,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use std::fs;
|
||||
|
||||
#[test]
|
||||
/// Tests if Gitea responses panic when deserialized with serde into structs defined in this
|
||||
/// module/file. Since Go doesn't have abilities to describe nullable values, I(@realaravinth)
|
||||
/// am forced to do this as I my knowledge about Gitea codebase is very limited.
|
||||
fn schema_doesnt_panic() {
|
||||
let files = ["./tests/schema/forgeflux/net.forgeflux.org.json"];
|
||||
for file in files.iter() {
|
||||
let contents = fs::read_to_string(file).unwrap();
|
||||
for line in contents.lines() {
|
||||
let _: Vec<Repository> =
|
||||
serde_json::from_str(line).expect("Forgeflux schema paniced");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -31,7 +31,6 @@ pub mod schema;
|
|||
const REPO_SEARCH_PATH: &str = "/api/v1/repos/search";
|
||||
const GITEA_NODEINFO: &str = "/api/v1/nodeinfo";
|
||||
const GITEA_IDENTIFIER: &str = "gitea";
|
||||
const FORGEJO_IDENTIFIER: &str = "forgejo";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Gitea {
|
||||
|
@ -178,16 +177,13 @@ mod tests {
|
|||
use super::*;
|
||||
use url::Url;
|
||||
|
||||
pub const GITEA_HOST: &str = "http://localhost:8080";
|
||||
pub const NET_REPOSITORIES: u64 = 100;
|
||||
pub const PER_CRAWL: u64 = 10;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn gitea_works() {
|
||||
let ctx = Gitea::new(
|
||||
Url::parse(&std::env::var("FORGEJO_HOST").unwrap()).unwrap(),
|
||||
Client::new(),
|
||||
);
|
||||
|
||||
let ctx = Gitea::new(Url::parse(GITEA_HOST).unwrap(), Client::new());
|
||||
assert!(ctx.is_forge().await);
|
||||
let steps = NET_REPOSITORIES / PER_CRAWL;
|
||||
|
||||
|
|
21
renovate.json
Normal file
21
renovate.json
Normal file
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:recommended",
|
||||
":dependencyDashboard"
|
||||
],
|
||||
"labels": [
|
||||
"renovate-bot"
|
||||
],
|
||||
"prHourlyLimit": 0,
|
||||
"timezone": "Asia/kolkata",
|
||||
"prCreation": "immediate",
|
||||
"vulnerabilityAlerts": {
|
||||
"enabled": true,
|
||||
"labels": [
|
||||
"renovate-bot",
|
||||
"renovate-security",
|
||||
"security"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -20,7 +20,7 @@ def check_online():
|
|||
while True:
|
||||
try:
|
||||
res = requests.get(
|
||||
"http://localhost:3000/api/v1/nodeinfo", allow_redirects=False
|
||||
"http://localhost:8080/api/v1/nodeinfo", allow_redirects=False
|
||||
)
|
||||
if any([res.status_code == 302, res.status_code == 200]):
|
||||
break
|
||||
|
@ -30,6 +30,46 @@ def check_online():
|
|||
count += 1
|
||||
continue
|
||||
|
||||
|
||||
def install():
|
||||
INSTALL_PAYLOAD = {
|
||||
"db_type": "sqlite3",
|
||||
"db_host": "localhost:3306",
|
||||
"db_user": "root",
|
||||
"db_passwd": "",
|
||||
"db_name": "gitea",
|
||||
"ssl_mode": "disable",
|
||||
"db_schema": "",
|
||||
"charset": "utf8",
|
||||
"db_path": "/data/gitea/gitea.db",
|
||||
"app_name": "Gitea:+Git+with+a+cup+of+tea",
|
||||
"repo_root_path": "/data/git/repositories",
|
||||
"lfs_root_path": "/data/git/lfs",
|
||||
"run_user": "git",
|
||||
"domain": "localhost",
|
||||
"ssh_port": "2221",
|
||||
"http_port": "3000",
|
||||
"app_url": "http://localhost:8080/",
|
||||
"log_root_path": "/data/gitea/log",
|
||||
"smtp_host": "",
|
||||
"smtp_from": "",
|
||||
"smtp_user": "",
|
||||
"smtp_passwd": "",
|
||||
"enable_federated_avatar": "on",
|
||||
"enable_open_id_sign_in": "on",
|
||||
"enable_open_id_sign_up": "on",
|
||||
"default_allow_create_organization": "on",
|
||||
"default_enable_timetracking": "on",
|
||||
"no_reply_address": "noreply.localhost",
|
||||
"password_algorithm": "pbkdf2",
|
||||
"admin_name": "",
|
||||
"admin_passwd": "",
|
||||
"admin_confirm_passwd": "",
|
||||
"admin_email": "",
|
||||
}
|
||||
requests.post(f"http://localhost:8080", data=INSTALL_PAYLOAD)
|
||||
|
||||
|
||||
class ParseCSRFGiteaForm(HTMLParser):
|
||||
token: str = None
|
||||
|
||||
|
@ -70,7 +110,7 @@ class HTMLClient:
|
|||
|
||||
def get_csrf_token(self, url: str) -> str:
|
||||
resp = self.session.get(url, allow_redirects=False)
|
||||
if resp.status_code != 200 and resp.status_code != 302 and resp.status_code != 303:
|
||||
if resp.status_code != 200 and resp.status_code != 302:
|
||||
print(resp.status_code, resp.text)
|
||||
raise Exception(f"Can't get csrf token: {resp.status_code}")
|
||||
csrf = self.__get_csrf_token(resp.text)
|
||||
|
@ -78,7 +118,7 @@ class HTMLClient:
|
|||
|
||||
|
||||
def register(client: HTMLClient):
|
||||
url = "http://localhost:3000/user/sign_up"
|
||||
url = "http://localhost:8080/user/sign_up"
|
||||
csrf = client.get_csrf_token(url)
|
||||
payload = {
|
||||
"_csrf": csrf,
|
||||
|
@ -91,7 +131,7 @@ def register(client: HTMLClient):
|
|||
|
||||
|
||||
def login(client: HTMLClient):
|
||||
url = "http://localhost:3000/user/login"
|
||||
url = "http://localhost:8080/user/login"
|
||||
csrf = client.get_csrf_token(url)
|
||||
payload = {
|
||||
"_csrf": csrf,
|
||||
|
@ -101,7 +141,7 @@ def login(client: HTMLClient):
|
|||
}
|
||||
resp = client.session.post(url, data=payload, allow_redirects=False)
|
||||
print(f"login {client.session.cookies}")
|
||||
if resp.status_code == 303:
|
||||
if resp.status_code == 302:
|
||||
print("User logged in")
|
||||
return
|
||||
|
||||
|
@ -127,7 +167,7 @@ def create_repositories(client: HTMLClient):
|
|||
}
|
||||
return data
|
||||
|
||||
url = "http://localhost:3000/repo/create"
|
||||
url = "http://localhost:8080/repo/create"
|
||||
for repo in REPOS:
|
||||
csrf = client.get_csrf_token(url)
|
||||
resp = client.session.post(url, data=get_repository_payload(csrf, repo))
|
||||
|
@ -142,7 +182,7 @@ def create_repositories(client: HTMLClient):
|
|||
def add_tag(repo: str, client: HTMLClient):
|
||||
print("adding tags")
|
||||
tag = "testing"
|
||||
url = f"http://{GITEA_USER}:{GITEA_PASSWORD}@localhost:3000/api/v1/repos/{GITEA_USER}/{repo}/topics/{tag}"
|
||||
url = f"http://{GITEA_USER}:{GITEA_PASSWORD}@localhost:8080/api/v1/repos/{GITEA_USER}/{repo}/topics/{tag}"
|
||||
resp = requests.put(url)
|
||||
if resp.status_code != 204:
|
||||
print(f"Error while adding tags repository: {repo} {resp.status_code}")
|
||||
|
@ -156,6 +196,8 @@ if __name__ == "__main__":
|
|||
REPOS.append(f"repository_{i}")
|
||||
check_online()
|
||||
print("Instance online")
|
||||
install()
|
||||
print("Instance configured and installed")
|
||||
client = HTMLClient()
|
||||
count = 0
|
||||
while True:
|
||||
|
|
|
@ -105,17 +105,16 @@ mod tests {
|
|||
use crate::errors::ServiceError;
|
||||
use crate::pages::auth::add::{AddChallenge, AddChallengePayload, ReadableError};
|
||||
use crate::pages::errors::*;
|
||||
use crate::settings::Settings;
|
||||
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_page_works() {
|
||||
let (_, ctx, _, _tmp) = crate::tests::sqlx_sqlite::get_ctx().await;
|
||||
// let settings = Settings::new().unwrap();
|
||||
AddChallenge::page(&ctx.settings);
|
||||
#[test]
|
||||
fn add_page_works() {
|
||||
let settings = Settings::new().unwrap();
|
||||
AddChallenge::page(&settings);
|
||||
let payload = AddChallengePayload {
|
||||
hostname: url::Url::parse("https://example.com").unwrap(),
|
||||
};
|
||||
let page = AddChallenge::new(&ctx.settings, Some(&payload));
|
||||
let page = AddChallenge::new(&settings, Some(&payload));
|
||||
page.with_error(&ReadableError::new(&ServiceError::ClosedForRegistration));
|
||||
page.render();
|
||||
}
|
||||
|
|
|
@ -63,12 +63,6 @@ pub struct Auth {
|
|||
pub verify: &'static str,
|
||||
}
|
||||
|
||||
impl Default for Auth {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Auth {
|
||||
/// create new instance of Authentication route
|
||||
pub const fn new() -> Auth {
|
||||
|
|
114
src/settings.rs
114
src/settings.rs
|
@ -309,15 +309,63 @@ fn set_separator_field(mut s: ConfigBuilder<DefaultState>) -> ConfigBuilder<Defa
|
|||
&format!("{PREFIX}{SEPARATOR}CRAWLER{SEPARATOR}WAIT_BEFORE_NEXT_API_CALL"),
|
||||
"crawler.wait_before_next_api_call",
|
||||
);
|
||||
|
||||
s = from_env(
|
||||
s,
|
||||
&format!("{PREFIX}{SEPARATOR}REPOSITORY{SEPARATOR}ROOT"),
|
||||
"repository.root",
|
||||
);
|
||||
s
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::utils::get_random;
|
||||
|
||||
#[test]
|
||||
fn root_dir_is_created_test() {
|
||||
let dir;
|
||||
loop {
|
||||
let mut tmp = env::temp_dir();
|
||||
tmp = tmp.join(get_random(10));
|
||||
|
||||
if tmp.exists() {
|
||||
continue;
|
||||
} else {
|
||||
dir = tmp;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let repo = Repository {
|
||||
root: dir.to_str().unwrap().to_owned(),
|
||||
};
|
||||
|
||||
repo.create_root_dir();
|
||||
assert!(dir.exists());
|
||||
assert!(dir.is_dir());
|
||||
let file = dir.join("foo");
|
||||
fs::write(&file, "foo").unwrap();
|
||||
repo.create_root_dir();
|
||||
assert!(dir.exists());
|
||||
assert!(dir.is_dir());
|
||||
|
||||
assert!(file.exists());
|
||||
assert!(file.is_file());
|
||||
|
||||
let repo = Repository {
|
||||
root: file.to_str().unwrap().to_owned(),
|
||||
};
|
||||
|
||||
repo.create_root_dir();
|
||||
assert!(file.exists());
|
||||
assert!(file.is_dir());
|
||||
|
||||
let mut license_path = Path::new(&repo.root).to_path_buf();
|
||||
license_path.push(LICENSE_FILE);
|
||||
assert!(license_path.exists());
|
||||
assert!(license_path.is_file());
|
||||
assert!(fs::read_to_string(license_path)
|
||||
.unwrap()
|
||||
.contains(CC0_LICENSE_TXT));
|
||||
}
|
||||
}
|
||||
|
||||
const CC0_LICENSE_TXT: &str = r#"
|
||||
Creative Commons Legal Code
|
||||
|
||||
|
@ -443,57 +491,3 @@ express Statement of Purpose.
|
|||
"#;
|
||||
|
||||
const LICENSE_FILE: &str = "LICENSE.txt";
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::utils::get_random;
|
||||
|
||||
#[test]
|
||||
fn root_dir_is_created_test() {
|
||||
let dir;
|
||||
loop {
|
||||
let mut tmp = env::temp_dir();
|
||||
tmp = tmp.join(get_random(10));
|
||||
|
||||
if tmp.exists() {
|
||||
continue;
|
||||
} else {
|
||||
dir = tmp;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let repo = Repository {
|
||||
root: dir.to_str().unwrap().to_owned(),
|
||||
};
|
||||
|
||||
repo.create_root_dir();
|
||||
assert!(dir.exists());
|
||||
assert!(dir.is_dir());
|
||||
let file = dir.join("foo");
|
||||
fs::write(&file, "foo").unwrap();
|
||||
repo.create_root_dir();
|
||||
assert!(dir.exists());
|
||||
assert!(dir.is_dir());
|
||||
|
||||
assert!(file.exists());
|
||||
assert!(file.is_file());
|
||||
|
||||
let repo = Repository {
|
||||
root: file.to_str().unwrap().to_owned(),
|
||||
};
|
||||
|
||||
repo.create_root_dir();
|
||||
assert!(file.exists());
|
||||
assert!(file.is_dir());
|
||||
|
||||
let mut license_path = Path::new(&repo.root).to_path_buf();
|
||||
license_path.push(LICENSE_FILE);
|
||||
assert!(license_path.exists());
|
||||
assert!(license_path.is_file());
|
||||
assert!(fs::read_to_string(license_path)
|
||||
.unwrap()
|
||||
.contains(CC0_LICENSE_TXT));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ use url::Url;
|
|||
|
||||
use db_core::prelude::*;
|
||||
use forge_core::prelude::*;
|
||||
use forgeflux::ForgeFlux;
|
||||
use gitea::Gitea;
|
||||
|
||||
use crate::ctx::Ctx;
|
||||
|
@ -37,17 +36,10 @@ impl Ctx {
|
|||
pub async fn crawl(&self, instance_url: &Url, db: &BoxDB, federate: &ArcFederate) {
|
||||
info!("[crawl][{instance_url}] Init crawling");
|
||||
let forge: Box<dyn SCForge> =
|
||||
Box::new(ForgeFlux::new(instance_url.clone(), self.client.clone()));
|
||||
let forge = if forge.is_forge().await {
|
||||
forge
|
||||
} else {
|
||||
let gitea = Box::new(Gitea::new(instance_url.clone(), self.client.clone()));
|
||||
if gitea.is_forge().await {
|
||||
gitea
|
||||
} else {
|
||||
Box::new(Gitea::new(instance_url.clone(), self.client.clone()));
|
||||
if !forge.is_forge().await {
|
||||
unimplemented!("Forge type unimplemented");
|
||||
}
|
||||
};
|
||||
|
||||
let mut page = 1;
|
||||
let url = forge.get_url();
|
||||
|
@ -237,12 +229,13 @@ mod tests {
|
|||
|
||||
use url::Url;
|
||||
|
||||
pub const GITEA_HOST: &str = "http://localhost:8080";
|
||||
pub const GITEA_USERNAME: &str = "bot";
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn crawl_gitea() {
|
||||
let (db, ctx, federate, _tmp_dir) = sqlx_sqlite::get_ctx().await;
|
||||
let url = Url::parse(&std::env::var("FORGEJO_HOST").unwrap()).unwrap();
|
||||
let url = Url::parse(GITEA_HOST).unwrap();
|
||||
ctx.crawl(&url, &db, &federate).await;
|
||||
// let hostname = get_hostname(&Url::parse(GITEA_HOST).unwrap());
|
||||
assert!(db.forge_exists(&url).await.unwrap());
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
|
||||
use std::env;
|
||||
pub use std::sync::Arc;
|
||||
|
||||
use crate::ctx::Ctx;
|
||||
pub use crate::db::BoxDB;
|
||||
|
@ -56,18 +57,12 @@ pub mod sqlx_sqlite {
|
|||
let url = env::var("SQLITE_DATABASE_URL").unwrap();
|
||||
env::set_var("DATABASE_URL", &url);
|
||||
println!("found db url: {url}");
|
||||
let tmp_dir = Temp::new_dir().unwrap();
|
||||
let d = tmp_dir.as_path();
|
||||
let _ = std::fs::remove_dir_all(d);
|
||||
let _ = std::fs::create_dir(d);
|
||||
|
||||
env::set_var("STARCHART__REPOSITORY__ROOT", tmp_dir.to_str().unwrap());
|
||||
|
||||
let mut settings = Settings::new().unwrap();
|
||||
settings.database.url = url.clone();
|
||||
settings.database.database_type = DBType::Sqlite;
|
||||
let db = sqlite::get_data(Some(settings.clone())).await;
|
||||
|
||||
let tmp_dir = Temp::new_dir().unwrap();
|
||||
settings.repository.root = tmp_dir.to_str().unwrap().to_string();
|
||||
let federate = get_federate(Some(settings.clone())).await;
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue