Compare commits

..

No commits in common. "master" and "wip-deployment-tombstone" have entirely different histories.

45 changed files with 504 additions and 2696 deletions

View file

@ -1,4 +1,4 @@
**/target/
/target
tarpaulin-report.html
.env
cobertura.xml

View file

@ -68,9 +68,6 @@ jobs:
- name: download deps
run: make dev-env
env:
LPCONDUCTOR_CREDS_USERNAME: "librepages_api"
LPCONDUCTOR_CREDS_PASSWORD: "longrandomlygeneratedpassword"
- name: Apply migrations
run: make migrate

View file

@ -55,9 +55,6 @@ jobs:
- name: download deps
run: make dev-env
env:
LPCONDUCTOR_CREDS_USERNAME: "librepages_api"
LPCONDUCTOR_CREDS_PASSWORD: "longrandomlygeneratedpassword"
- name: configure GPG key
if: (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'realaravinth/librepages'

View file

@ -1,82 +0,0 @@
pipeline:
backend:
image: rust
environment:
- DATABASE_URL=postgres://postgres:password@database:5432/postgres
commands:
- curl -fsSL https://deb.nodesource.com/setup_16.x | bash - &&\
- apt update && apt-get -y --no-install-recommends install nodejs tar gpg curl wget
- rustup component add rustfmt
- rustup component add clippy
# rewrite conducotr configuration
- sed -i 's%url = "http:\/\/localhost:5000"%url = "http:\/\/librepages-conductor:5000"%' config/default.toml
- make dev-env
- make migrate
- make lint
- make test
- make release
build_docker_img:
image: plugins/docker
when:
event: [pull_request]
settings:
dry_run: true
repo: realaravinth/librepages
tags: latest
build_and_publish_docker_img:
image: plugins/docker
when:
event: [push, tag, deployment]
settings:
username: realaravinth
password:
from_secret: DOCKER_TOKEN
repo: realaravinth/librepages
tags: latest
# build_publisher_docker_img:
# image: plugins/docker
# when:
# event: [push, tag, deployment]
# settings:
# dry_run: true
# dockerfile: scripts/publish-bins-docker
# purge: false
# repo: realaravinth/librepages-publisher
# tags: latest
#
publish_bins:
image: rust
when:
event: [push, tag, deployment]
commands:
- apt update
- apt-get -y --no-install-recommends install gpg tar curl wget
- echo -n "$RELEASE_BOT_GPG_SIGNING_KEY" | gpg --batch --import --pinentry-mode loopback
- scripts/bin-publish.sh publish master latest $DUMBSERVE_PASSWORD
secrets: [RELEASE_BOT_GPG_SIGNING_KEY, DUMBSERVE_PASSWORD, GPG_PASSWORD]
services:
database:
image: postgres
environment:
- POSTGRES_PASSWORD=password
librepages-conductor:
image: realaravinth/librepages-conductor
command: conductor serve
environment:
- LPCONDUCTOR_SERVER__PROXY_HAS_TLS=false
- LPCONDUCTOR_DEBUG=false
- LPCONDUCTOR_CONDUCTOR=dummy
- LPCONDUCTOR_SERVER_URL_PREFIX=""
- LPCONDUCTOR_SERVER_DOMAIN="librepages.test"
- LPCONDUCTOR_SERVER_IP=0.0.0.0
- LPCONDUCTOR_SERVER_PROXY_HAS_TLS=false
- LPCONDUCTOR_SERVER_PORT=7000
- LPCONDUCTOR_SOURCE_CODE=https://example.org
- LPCONDUCTOR_CREDS_USERNAME="librepages_api"
- LPCONDUCTOR_CREDS_PASSWORD="longrandomlygeneratedpassword"
- PORT=5000

1054
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,12 @@
[package]
name = "librepages"
name = "pages"
version = "0.1.0"
edition = "2021"
build = "build.rs"
homepage = "https://git.batsense.net/LibrePages/librepages"
repository = "https://git.batsense.net/LibrePages/librepages"
documentation = "https://git.batsense.net/LibrePages/librepages"
readme = "https://git.batsense.net/LibrePages/librepages/blob/master/README.md"
homepage = "https://github.com/realaravinth/pages"
repository = "https://github.com/realaravinth/pages"
documentation = "https://github.con/realaravinth/pages"
readme = "https://github.com/realaravinth/pages/blob/master/README.md"
license = "AGPLv3 or later version"
authors = ["realaravinth <realaravinth@batsense.net>"]
@ -19,10 +19,8 @@ actix-identity = "0.4.0"
actix-rt = "2"
actix-web-codegen-const-routes = { version = "0.1.0", tag = "0.1.0", git = "https://github.com/realaravinth/actix-web-codegen-const-routes" }
argon2-creds = { branch = "master", git = "https://github.com/realaravinth/argon2-creds"}
sqlx = { version = "0.6.2", features = ["runtime-actix-rustls", "postgres", "time", "offline", "json", "uuid"] }
sqlx = { version = "0.6.1", features = ["runtime-actix-rustls", "postgres", "time", "offline", "json", "uuid"] }
clap = { version = "3.2.20", features = ["derive"]}
libconfig = { version = "0.1.0", git = "https://git.batsense.net/librepages/libconfig" }
libconductor = { version = "0.1.0", git = "https://git.batsense.net/librepages/conductor/" }
config = "0.13"
git2 = "0.14.2"
@ -33,7 +31,8 @@ serde_json = "1"
pretty_env_logger = "0.4"
lazy_static = "1.4"
url = { version = "2.2", features = ["serde"] }
url = "2.2"
urlencoding = "2.1.0"
derive_more = "0.99"
@ -52,10 +51,6 @@ tracing-actix-web = "0.6.2"
toml = "0.5.9"
serde_yaml = "0.9.14"
uuid = { version = "1.2.2", features = ["serde"] }
reqwest = { version = "0.11.13", features = ["json"] }
sha2 = "0.10.6"
hmac = "0.12.1"
hex= "0.4.3"
[dependencies.cache-buster]
git = "https://github.com/realaravinth/cache-buster"

View file

@ -19,11 +19,11 @@ RUN cd utils/cache-bust && cargo run
RUN cargo build --release
FROM debian:bullseye-slim
#RUN useradd -ms /bin/bash -u 1000 librepages
#RUN mkdir -p /var/www/librepages && chown librepages /var/www/librepages
#RUN useradd -ms /bin/bash -u 1000 pages
#RUN mkdir -p /var/www/pages && chown pages /var/www/pages
RUN apt-get update && apt-get install -y ca-certificates
COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh
COPY --from=rust /src/target/release/librepages /usr/local/bin/
COPY --from=rust /src/target/release/pages /usr/local/bin/
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View file

@ -30,14 +30,14 @@ doc: ## Prepare documentation
docker: ## Build docker images
docker build \
-t realaravinth/librepages:master \
-t realaravinth/librepages:latest \
-t realaravinth/librepages:0.1.0 .
-t realaravinth/pages:master \
-t realaravinth/pages:latest \
-t realaravinth/pages:0.1.0 .
docker-publish: docker ## Build and publish docker images
docker push realaravinth/librepages:master
docker push realaravinth/librepages:latest
docker push realaravinth/librepages:0.1.0
docker push realaravinth/pages:master
docker push realaravinth/pages:latest
docker push realaravinth/pages:0.1.0
lint: ## Lint codebase
cargo fmt -v --all -- --emit files
@ -46,7 +46,7 @@ lint: ## Lint codebase
migrate: ## run migrations
$(call cache_bust)
unset DATABASE_URL && cargo build
DATABASE_URL=${DATABASE_URL} cargo run -- migrate
cargo run -- migrate
release: ## Release build
$(call cache_bust)

View file

@ -5,8 +5,10 @@
**Auto-deploy static websites from git repositories**
</p>
[![status-badge](https://ci.batsense.net/api/badges/LibrePages/librepages/status.svg)](https://ci.batsense.net/LibrePages/librepages)
[![Build](https://github.com/realaravinth/pages/actions/workflows/linux.yml/badge.svg)](https://github.com/realaravinth/pages/actions/workflows/linux.yml)
[![dependency status](https://deps.rs/repo/github/realaravinth/pages/status.svg)](https://deps.rs/repo/github/realaravinth/pages)
[![codecov](https://codecov.io/gh/realaravinth/pages/branch/master/graph/badge.svg)](https://codecov.io/gh/realaravinth/pages)
</div>

View file

@ -1,11 +1,8 @@
debug = true
allow_registration = true
# source code of your copy of pages server.
source_code = "https://git.batsense.net/LibrePages/pages"
source_code = "https://github.com/realaravinth/pages"
support_email = "support@librepages.example.org"
conductors = [
{ username = "librepages_api", api_key = "longrandomlygeneratedpassword", url = "http://localhost:5000"}
]
[server]
# The port at which you want Pages to listen to
@ -19,6 +16,7 @@ workers = 2
domain = "localhost"
cookie_secret = "94b2b2732626fdb7736229a7c777cb451e6304c147c4549f30"
[page]
base_path = "/tmp/librepages-defualt-config/"
base_domain = "librepages.test" # domain where customer pages will be deployed.

View file

@ -3,10 +3,10 @@
The process is tedious, most of this will be automated with a script in
the future.
## 1. Create new user for running `librepages`:
## 1. Create new user for running `pages`:
```bash
sudo useradd -b /srv -m -s /usr/bin/zsh librepages
sudo useradd -b /srv -m -s /usr/bin/zsh pages
```
## 2. Install Runtime dependencies
@ -19,11 +19,11 @@ On Debian-based systems, run:
sudo apt install nginx
```
## 3. Build `librepages`
## 3. Build `Pages`
### i. Install Build Dependencies
To build `librepages`, you need the following dependencies:
To build `pages`, you need the following dependencies:
1. [Git](https://packages.debian.org/bullseye/git)
2. [pkg-config](https://packages.debian.org/bullseye/pkg-config)
@ -71,23 +71,23 @@ Install binary and copy demo configuration file into default configuration
lookup path(`/etc/static-pages/config.toml`)
```bash
sudo cp ./target/release/librepages /usr/local/bin/ && \
sudo cp ./target/release/pages /usr/local/bin/ && \
sudo mkdir /etc/static-pages && \
sudo cp config/default.toml /etc/static-pages/config.toml
```
## 4. Systemd service configuration:
### i. Copy the following to `/etc/systemd/system/librepages.service`:
### i. Copy the following to `/etc/systemd/system/pages.service`:
```systemd
[Unit]
Description=librepages: Auto-deploy static websites from git repositories
Description=pages: Auto-deploy static websites from git repositories
[Service]
Type=simple
User=librepages
ExecStart=/usr/local/bin/librepages
User=pages
ExecStart=/usr/local/bin/pages
Restart=on-failure
RestartSec=1
MemoryDenyWriteExecute=true
@ -107,13 +107,13 @@ WantedBy=multi-user.target
```bash
sudo systemctl daemon-reload && \
sudo systemctl enable librepages && \ # Auto startup during boot
sudo systemctl start librepages
sudo systemctl enable pages && \ # Auto startup during boot
sudo systemctl start pages
```
## 5. Optionally configure Nginx to reverse proxy requests to LibrePages
## 5. Optionally configure Nginx to reverse proxy requests to Pages
**NOTE: This sections includes instructions to reverse proxy requests to
LibrePages API, not the websites managed by librepages.**
Pages API, not the websites managed by Pages.**
See [here](../../config/librepages-nginx-config) for sample Nginx configuration.
See [here](../../config/pages-nginx-config) for sample Nginx configuration.

View file

@ -1,15 +0,0 @@
CREATE TABLE IF NOT EXISTS librepages_forgejo_webhooks (
forgejo_webhook_secret VARCHAR(40) NOT NULL UNIQUE,
forgejo_url VARCHAR(3000) NOT NULL,
auth_token VARCHAR(40) NOT NULL UNIQUE,
ID SERIAL PRIMARY KEY NOT NULL,
owned_by INTEGER NOT NULL references librepages_users(ID) ON DELETE CASCADE
);
CREATE UNIQUE INDEX librepages_forgejo_webhook_auth_token_index ON librepages_forgejo_webhooks(auth_token);
CREATE TABLE IF NOT EXISTS librepages_forgejo_webhook_site_mapping (
site_id INTEGER NOT NULL references librepages_sites(ID) ON DELETE CASCADE,
forgejo_webhook_id INTEGER NOT NULL references librepages_forgejo_webhooks(ID) ON DELETE CASCADE,
UNIQUE(site_id, forgejo_webhook_id)
);

View file

@ -33,15 +33,14 @@ FILENAME="$NAME-$2-linux-amd64"
TARBALL=$FILENAME.tar.gz
TARGET_DIR="$TMP_DIR/$FILENAME/"
mkdir -p $TARGET_DIR
DOCKER_IMG="realaravinth/librepages:$3"
DOCKER_IMG="realaravinth/pages:$3"
get_bin(){
echo "[*] Grabbing binary"
#container_id=$(docker create $DOCKER_IMG)
#docker cp $container_id:/usr/local/bin/pages $TARGET_DIR/
#docker rm -v $container_id
cp target/release/librepages $TARGET_DIR
container_id=$(docker create $DOCKER_IMG)
docker cp $container_id:/usr/local/bin/pages $TARGET_DIR/
docker rm -v $container_id
}
copy() {

View file

@ -1,23 +0,0 @@
#!/bin/bash
readonly NAME=librepages-conductor
docker rm -f $NAME
docker create --name $NAME -p 5000:5000 \
-e LPCONDUCTOR__SOURCE_CODE="https://git.batsense.net/LibrePages/conductor" \
-e LPCONDUCTOR_SERVER__PROXY_HAS_TLS=false \
-e LPCONDUCTOR_DEBUG="false" \
-e LPCONDUCTOR_CONDUCTOR="dummy" \
-e LPCONDUCTOR_SERVER_URL_PREFIX="" \
-e LPCONDUCTOR_SERVER_DOMAIN="librepages.test" \
-e LPCONDUCTOR_SERVER_IP="0.0.0.0" \
-e LPCONDUCTOR_SERVER_PROXY_HAS_TLS="false" \
-e LPCONDUCTOR_SERVER_PORT=7000 \
-e LPCONDUCTOR_SOURCE_CODE="https://example.org" \
-e LPCONDUCTOR_CREDS_USERNAME=$LPCONDUCTOR_CREDS_USERNAME \
-e LPCONDUCTOR_CREDS_PASSWORD=$LPCONDUCTOR_CREDS_PASSWORD \
-e PORT="5000"\
realaravinth/librepages-conductor conductor serve
docker start $NAME

View file

@ -15,4 +15,4 @@ else
useradd --uid $USER_ID -b /home -m -s /bin/bash $LIBREPAGES_USER
fi
su $LIBREPAGES_USER -c 'librepages'
su $LIBREPAGES_USER -c 'pages'

View file

@ -1,14 +0,0 @@
FROM realaravinth/librepages:latest as base
RUN echo foo
FROM debian:bullseye-slim
RUN apt update
RUN apt-get -y --no-install-recommends install gpg tar curl wget
WORKDIR /src
COPY --from=base /usr/local/bin/librepages .
COPY . .
ARG RELEASE_BOT_GPG_SIGNING_KEY
RUN echo -n "$RELEASE_BOT_GPG_SIGNING_KEY"
RUN echo -n "$RELEASE_BOT_GPG_SIGNING_KEY" | gpg --batch --import --pinentry-mode loopback
env GPG_PASSWORD=$GPG_PASSWORD
RUN /src/scripts/bin-publish.sh publish master latest $DUMBSERVE_PASSWORD

View file

@ -144,39 +144,6 @@
},
"query": "SELECT\n librepages_deploy_event_type.name,\n librepages_site_deploy_events.time,\n librepages_site_deploy_events.pub_id\n FROM\n librepages_site_deploy_events\n INNER JOIN librepages_deploy_event_type ON\n librepages_deploy_event_type.ID = librepages_site_deploy_events.event_type\n WHERE\n librepages_site_deploy_events.site = (\n SELECT ID FROM librepages_sites WHERE hostname = $1\n )\n AND\n librepages_site_deploy_events.pub_id = $2\n "
},
"3ecc3a4c89b1289368ef9d9c97204330f74138a0da614ef2174c59a687119595": {
"describe": {
"columns": [
{
"name": "forgejo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "auth_token",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "forgejo_webhook_secret",
"ordinal": 2,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false
],
"parameters": {
"Left": [
"Text",
"Text"
]
}
},
"query": "SELECT\n forgejo_url, auth_token, forgejo_webhook_secret\n FROM\n librepages_forgejo_webhooks\n WHERE\n auth_token = $1\n AND\n owned_by = (SELECT ID FROM librepages_users WHERE name = $2);\n "
},
"432fe829719ce8110f768b4a611724bb34191ac224d2143ff4c81548da75c103": {
"describe": {
"columns": [
@ -222,76 +189,6 @@
},
"query": "SELECT repo_url, branch, hostname, owned_by, site_secret\n FROM librepages_sites\n WHERE pub_id = $1\n AND\n owned_by = (SELECT ID from librepages_users WHERE name = $2)\n AND\n deleted = false;\n "
},
"4445ff3226af3b5a24b255c5bb012c99b222cc7bd6dda80f232809ed273fc712": {
"describe": {
"columns": [
{
"name": "repo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "site_secret",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "branch",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "owned_by",
"ordinal": 4,
"type_info": "Int4"
},
{
"name": "pub_id",
"ordinal": 5,
"type_info": "Uuid"
}
],
"nullable": [
false,
false,
false,
false,
false,
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT repo_url, site_secret, branch, hostname, owned_by, pub_id\n FROM librepages_sites\n WHERE repo_url = $1\n AND deleted = false;\n "
},
"4cddf1049783251bfc79090055724e894a2be9451302f7691ce2f4240f1ee3ad": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int4"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT ID FROM librepages_sites WHERE repo_url = $1"
},
"53f3c21c06c8d1c218537dfa9183fd0604aaf28200d8aa12e97db4ac317df39e": {
"describe": {
"columns": [
@ -493,41 +390,6 @@
},
"query": "SELECT EXISTS (SELECT 1 from librepages_users WHERE email = $1)"
},
"8bf4e01b8c38d035fe6bdbfbe8ad9cb35e3fc2fd11107bae92880d157ed11379": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Varchar",
"Text"
]
}
},
"query": "INSERT INTO librepages_forgejo_webhooks\n (forgejo_url , auth_token, forgejo_webhook_secret, owned_by) VALUES ($1, $2, $3, \n (SELECT ID FROM librepages_users WHERE name = $4)\n )"
},
"90907d6cb4ca3b485f7b583584fb5821a950362679d061e490545c76634c211e": {
"describe": {
"columns": [
{
"name": "exists",
"ordinal": 0,
"type_info": "Bool"
}
],
"nullable": [
null
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT EXISTS (SELECT 1 from librepages_sites WHERE repo_url = $1)"
},
"924e756de5544cece865a10a7e136ecc6126e3a603947264cc7899387c18c819": {
"describe": {
"columns": [],
@ -541,70 +403,6 @@
},
"query": "UPDATE librepages_users set password = $1\n WHERE name = $2"
},
"9710a01bc4c5c5cda2db27d14baca3d7a6ceffa66c7d539da6fda7947c222e71": {
"describe": {
"columns": [
{
"name": "forgejo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "auth_token",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "forgejo_webhook_secret",
"ordinal": 2,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT\n forgejo_url, auth_token, forgejo_webhook_secret\n FROM\n librepages_forgejo_webhooks\n WHERE\n owned_by = (SELECT ID FROM librepages_users WHERE name = $1);\n "
},
"a6284ede1dbf340942dd97afb75865ba0a41009a145254117b03002bd9afa588": {
"describe": {
"columns": [
{
"name": "forgejo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "auth_token",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "forgejo_webhook_secret",
"ordinal": 2,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT forgejo_url, auth_token, forgejo_webhook_secret\n FROM librepages_forgejo_webhooks\n WHERE auth_token = $1\n "
},
"b48c77db6e663d97df44bf9ec2ee92fd3e02f2dcbcdbd1d491e09fab2da68494": {
"describe": {
"columns": [
@ -631,20 +429,6 @@
},
"query": "SELECT name, password FROM librepages_users WHERE email = ($1)"
},
"b7e51e976a4a80a78df8dbfed1f195af212023d00faee88ab2d09326896bd653": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Int4",
"Text"
]
}
},
"query": "INSERT INTO librepages_forgejo_webhook_site_mapping\n (site_id, forgejo_webhook_id) VALUES (\n (SELECT ID FROM librepages_sites WHERE repo_url = $1 AND ID = $2),\n (SELECT ID FROM librepages_forgejo_webhooks WHERE auth_token = $3)\n ) ON CONFLICT (site_id, forgejo_webhook_id) DO NOTHING;"
},
"b8b1b3c5fa205b071f577b2ce9993ddfc7c99ada26aea48aa1c201c8c3c7fcf6": {
"describe": {
"columns": [],

View file

@ -1,282 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_identity::Identity;
use actix_web::{web, HttpRequest, HttpResponse, Responder};
use serde::{Deserialize, Serialize};
use tracing::info;
use url::Url;
use super::get_auth_middleware;
use crate::{errors::*, AppCtx};
pub mod routes {
use crate::ctx::Ctx;
pub struct Forgejo {
pub add_webhook: &'static str,
pub view_webhook: &'static str,
pub list_webhooks: &'static str,
pub webhook: &'static str,
}
impl Forgejo {
pub const fn new() -> Self {
Self {
add_webhook: "/api/v1/forgejo/webhook/add",
list_webhooks: "/api/v1/forgejo/webhook/add",
view_webhook: "/api/v1/forgejo/webhook/view/{auth_token}",
webhook: "/api/v1/forgejo/webhook/event/new",
}
}
pub fn get_view(&self, auth_token: &str) -> String {
self.view_webhook.replace("{auth_token}", auth_token)
}
pub fn get_webhook_url(&self, ctx: &Ctx, auth_token: &str) -> String {
format!(
"https://{}{}?auth={auth_token}",
&ctx.settings.server.domain, self.webhook
)
}
}
}
#[derive(Serialize, Deserialize)]
pub struct AddWebhook {
pub forgejo_url: Url,
}
#[actix_web_codegen_const_routes::post(
path = "crate::V1_API_ROUTES.forgejo.add_webhook",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Add webhook" skip(id, ctx, payload))]
async fn add_webhook(
ctx: AppCtx,
id: Identity,
payload: web::Json<AddWebhook>,
) -> ServiceResult<impl Responder> {
info!(
"Adding webhook for Forgejo instance: {}",
payload.forgejo_url.as_str()
);
let owner = id.identity().unwrap();
let payload = payload.into_inner();
let hook = ctx.db.new_webhook(payload.forgejo_url, &owner).await?;
Ok(HttpResponse::Ok().json(hook))
}
#[actix_web_codegen_const_routes::get(
path = "crate::V1_API_ROUTES.forgejo.list_webhooks",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Delete webhook" skip(id, ctx))]
async fn list_webhooks(ctx: AppCtx, id: Identity) -> ServiceResult<impl Responder> {
let owner = id.identity().unwrap();
info!("Getting all webhooks created by {}", owner);
let hooks = ctx.db.list_all_webhooks_with_owner(&owner).await?;
Ok(HttpResponse::Ok().json(hooks))
}
#[actix_web_codegen_const_routes::get(
path = "crate::V1_API_ROUTES.forgejo.view_webhook",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Delete webhook" skip(id, ctx, path))]
async fn view_webhook(
ctx: AppCtx,
id: Identity,
path: web::Path<String>,
) -> ServiceResult<impl Responder> {
let path = path.into_inner();
let owner = id.identity().unwrap();
info!("Gitting webhook webhook for Forgejo instance: {}", path,);
let hook = ctx.db.get_webhook_with_owner(&path, &owner).await?;
Ok(HttpResponse::Ok().json(hook))
}
#[derive(Serialize, Deserialize)]
struct Auth {
auth: String,
}
#[actix_web_codegen_const_routes::post(path = "crate::V1_API_ROUTES.forgejo.webhook")]
#[tracing::instrument(name = "Update ", skip(body, ctx, req, q))]
async fn webhook(
ctx: AppCtx,
body: web::Bytes,
req: HttpRequest,
q: web::Query<Auth>,
) -> ServiceResult<impl Responder> {
ctx.process_webhook(&body, &req, &q.auth).await?;
Ok(HttpResponse::Ok())
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(add_webhook);
cfg.service(view_webhook);
cfg.service(list_webhooks);
cfg.service(webhook);
}
#[cfg(test)]
mod tests {
use actix_web::{error::ResponseError, http::StatusCode, test};
use hmac::Mac;
use crate::ctx::api::v1::forgejo::{HmacSha256, WebhookPayload};
use crate::db::ForgejoWebhook;
use crate::tests;
use crate::*;
use super::*;
#[actix_rt::test]
async fn test_api_forgejo_webhook() {
const NAME: &str = "apiforgejowebhookuser";
const PASSWORD: &str = "longpasswordasdfa2";
const EMAIL: &str = "apiforgejowebhookuser@a.com";
let (_dir, ctx) = tests::get_ctx().await;
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let page = ctx.add_test_site(NAME.into()).await;
let cookies = get_cookie!(signin_resp);
let app = get_app!(ctx).await;
let payload = AddWebhook {
forgejo_url: Url::parse("https://git.batnsense.net").unwrap(),
};
let add_webhook_resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.forgejo.add_webhook)
.cookie(cookies.clone())
.to_request(),
)
.await;
check_status!(add_webhook_resp, StatusCode::OK);
let response: ForgejoWebhook = actix_web::test::read_body_json(add_webhook_resp).await;
assert_eq!(response.forgejo_url, payload.forgejo_url);
let view_webhook_resp = get_request!(
&app,
&V1_API_ROUTES.forgejo.get_view(&response.auth_token),
cookies.clone()
);
check_status!(view_webhook_resp, StatusCode::OK);
let hook: ForgejoWebhook = actix_web::test::read_body_json(view_webhook_resp).await;
assert_eq!(hook, response);
let list_all_webhooks_resp =
get_request!(&app, V1_API_ROUTES.forgejo.list_webhooks, cookies.clone());
check_status!(list_all_webhooks_resp, StatusCode::OK);
let hooks: Vec<ForgejoWebhook> =
actix_web::test::read_body_json(list_all_webhooks_resp).await;
assert_eq!(vec![hook.clone()], hooks);
let webhook_url = format!("{}?auth={}", V1_API_ROUTES.forgejo.webhook, hook.auth_token);
// test webhook
let mut webhook_payload = WebhookPayload::default();
webhook_payload.reference = format!("refs/origin/{}", page.branch);
webhook_payload.repository.html_url = page.repo;
let body = serde_json::to_string(&webhook_payload).unwrap();
let body = body.as_bytes();
let mut mac = HmacSha256::new_from_slice(hook.forgejo_webhook_secret.as_bytes())
.expect("HMAC can take key of any size");
mac.update(body);
let res = mac.finalize();
let sig = res.into_bytes();
let sig = hex::encode(&sig[..]);
let post_to_webhook_resp = test::call_service(
&app,
post_request!(&webhook_payload, &webhook_url)
.insert_header(("X-Gitea-Delivery", "foobar213randomuuid"))
.insert_header(("X-Gitea-Signature", sig.clone()))
.insert_header(("X-Gitea-Event", "push"))
.cookie(cookies.clone())
.to_request(),
)
.await;
check_status!(post_to_webhook_resp, StatusCode::OK);
// no webhook
let fake_webhook_url = format!(
"{}?auth={}",
V1_API_ROUTES.forgejo.webhook, hook.forgejo_webhook_secret
);
let body = serde_json::to_string(&webhook_payload).unwrap();
let body = body.as_bytes();
let mut mac =
HmacSha256::new_from_slice(b"nosecret").expect("HMAC can take key of any size");
mac.update(body);
let res = mac.finalize();
let fake_sig = res.into_bytes();
let fake_sig = hex::encode(&fake_sig[..]);
let post_to_no_exist_webhook_resp = test::call_service(
&app,
post_request!(&webhook_payload, &fake_webhook_url)
.insert_header(("X-Gitea-Delivery", "foobar213randomuuid"))
.insert_header(("X-Gitea-Signature", fake_sig))
.insert_header(("X-Gitea-Event", "push"))
.cookie(cookies.clone())
.to_request(),
)
.await;
let err = ServiceError::WebhookNotFound;
assert_eq!(post_to_no_exist_webhook_resp.status(), err.status_code());
let resp_err: ErrorToResponse =
actix_web::test::read_body_json(post_to_no_exist_webhook_resp).await;
assert_eq!(resp_err.error, err.to_string());
// no website
let mut webhook_payload = WebhookPayload::default();
webhook_payload.reference = format!("refs/origin/{}", page.branch);
webhook_payload.repository.html_url = "https://no-exist-git.example.org".into();
let body = serde_json::to_string(&webhook_payload).unwrap();
let body = body.as_bytes();
let mut mac = HmacSha256::new_from_slice(hook.forgejo_webhook_secret.as_bytes())
.expect("HMAC can take key of any size");
mac.update(body);
let res = mac.finalize();
let sig = res.into_bytes();
let sig = hex::encode(&sig[..]);
let post_to_no_website_webhook_resp = test::call_service(
&app,
post_request!(&webhook_payload, &webhook_url)
.insert_header(("X-Gitea-Delivery", "foobar213randomuuid"))
.insert_header(("X-Gitea-Signature", sig.clone()))
.insert_header(("X-Gitea-Event", "push"))
.cookie(cookies.clone())
.to_request(),
)
.await;
let err = ServiceError::WebsiteNotFound;
assert_eq!(post_to_no_website_webhook_resp.status(), err.status_code());
let resp_err: ErrorToResponse =
actix_web::test::read_body_json(post_to_no_website_webhook_resp).await;
assert_eq!(resp_err.error, err.to_string());
}
}

View file

@ -20,7 +20,6 @@ use serde::Deserialize;
pub mod account;
pub mod auth;
pub mod forgejo;
pub mod meta;
pub mod pages;
pub mod routes;
@ -31,7 +30,6 @@ pub fn services(cfg: &mut ServiceConfig) {
auth::services(cfg);
account::services(cfg);
meta::services(cfg);
forgejo::services(cfg);
pages::services(cfg);
}

View file

@ -19,7 +19,6 @@ use actix_auth_middleware::GetLoginRoute;
use crate::serve::routes::Serve;
use super::forgejo::routes::Forgejo;
use super::meta::routes::Meta;
use super::pages::routes::Deploy;
@ -93,7 +92,6 @@ pub struct Routes {
pub account: Account,
/// Meta routes
pub meta: Meta,
pub forgejo: Forgejo,
pub deploy: Deploy,
pub serve: Serve,
}
@ -105,7 +103,6 @@ impl Routes {
auth: Auth::new(),
account: Account::new(),
meta: Meta::new(),
forgejo: Forgejo::new(),
deploy: Deploy::new(),
serve: Serve::new(),
}

View file

@ -1,96 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use reqwest::Client;
use libconductor::EventType;
use libconfig::Config;
use tracing::info;
use crate::errors::ServiceResult;
use crate::{page::Page, settings::Settings};
#[derive(Clone)]
pub struct Conductor {
client: Client,
pub settings: Settings,
}
impl Conductor {
pub fn new(settings: Settings, client: Option<Client>) -> Self {
let client = if let Some(client) = client {
client
} else {
Client::new()
};
Self { client, settings }
}
async fn tx(&self, e: &EventType) -> ServiceResult<()> {
for c in self.settings.conductors.iter() {
info!("Tx event to {}", c.url);
let mut event_url = c.url.clone();
event_url.set_path("/api/v1/events/new");
self.client
.post(event_url)
.basic_auth(&c.username, Some(&c.api_key))
.json(e)
.send()
.await
.unwrap();
}
Ok(())
}
pub async fn new_site(&self, page: Page) -> ServiceResult<()> {
let msg = EventType::NewSite {
hostname: page.domain,
branch: page.branch,
path: page.path,
};
self.tx(&msg).await
}
pub async fn tx_config(&self, config: Config) -> ServiceResult<()> {
self.tx(&EventType::Config { data: config }).await
}
pub async fn delete_site(&self, hostname: String) -> ServiceResult<()> {
self.tx(&EventType::DeleteSite { hostname }).await
}
}
#[cfg(test)]
mod tests {
use super::*;
use uuid::Uuid;
#[actix_rt::test]
pub async fn test_conductor() {
let settings = Settings::new().unwrap();
let c = Conductor::new(settings.clone(), None);
c.delete_site("example.org".into()).await.unwrap();
let page = Page {
secret: "foo".into(),
repo: "foo".into(),
path: "foo".into(),
branch: "foo".into(),
domain: "foo".into(),
pub_id: Uuid::new_v4(),
};
c.new_site(page).await.unwrap();
}
}

View file

@ -1,204 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::web;
use actix_web::HttpRequest;
use hmac::{Hmac, Mac};
use serde::{Deserialize, Serialize};
use sha2::Sha256;
use tracing::{info, warn};
use url::Url;
use crate::ctx::Ctx;
use crate::errors::ServiceError;
use crate::errors::ServiceResult;
pub type HmacSha256 = Hmac<Sha256>;
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct CommitPerson {
pub name: String,
pub email: String,
pub username: String,
}
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
pub struct Commit {
pub id: String,
pub message: String,
pub url: String,
pub author: CommitPerson,
pub committer: CommitPerson,
pub verification: serde_json::Value,
pub timestamp: String,
pub added: serde_json::Value,
pub removed: serde_json::Value,
pub modified: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct Person {
pub id: usize,
pub login: String,
pub full_name: String,
pub email: String,
pub avatar_url: String,
pub language: String,
pub is_admin: bool,
pub last_login: String,
pub created: String,
pub restricted: bool,
pub active: bool,
pub prohibit_login: bool,
pub location: String,
pub website: String,
pub description: String,
pub visibility: String,
pub followers_count: usize,
pub following_count: usize,
pub starred_repos_count: usize,
pub username: String,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct Permissions {
pub admin: bool,
pub push: bool,
pub pull: bool,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct InternalTracker {
pub enable_time_tracker: bool,
pub allow_only_contributors_to_track_time: bool,
pub enable_issue_dependencies: bool,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct Repository {
pub id: usize,
pub owner: Person,
pub name: String,
pub full_name: String,
pub description: String,
pub empty: bool,
pub private: bool,
pub fork: bool,
pub template: bool,
pub parent: Option<serde_json::Value>,
pub mirror: bool,
pub size: usize,
pub html_url: String,
pub ssh_url: String,
pub clone_url: String,
pub original_url: String,
pub website: String,
pub stars_count: usize,
pub forks_count: usize,
pub watchers_count: usize,
pub open_issues_count: usize,
pub open_pr_counter: usize,
pub release_counter: usize,
pub default_branch: String,
pub archived: bool,
pub created_at: String,
pub updated_at: String,
pub permissions: Permissions,
pub has_issues: bool,
pub internal_tracker: InternalTracker,
pub has_wiki: bool,
pub has_pull_requests: bool,
pub has_projects: bool,
pub ignore_whitespace_conflicts: bool,
pub allow_merge_commits: bool,
pub allow_rebase: bool,
pub allow_rebase_explicit: bool,
pub allow_squash_merge: bool,
pub default_merge_style: String,
pub avatar_url: String,
pub internal: bool,
pub mirror_interval: String,
pub mirror_updated: String,
pub repo_transfer: Option<serde_json::Value>,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct WebhookPayload {
#[serde(rename(serialize = "ref", deserialize = "ref"))]
pub reference: String,
pub before: String,
pub after: String,
pub compare_url: String,
pub repository: Repository,
pub pusher: Person,
pub sender: Person,
}
impl Ctx {
pub async fn process_webhook(
&self,
body: &web::Bytes,
req: &HttpRequest,
auth_token: &str,
) -> ServiceResult<()> {
let headers = req.headers();
let _uuid = headers.get("X-Gitea-Delivery").unwrap();
let sig = headers.get("X-Gitea-Signature").unwrap();
let sig = hex::decode(sig).unwrap();
let event_type = headers.get("X-Gitea-Event").unwrap();
let payload: WebhookPayload = serde_json::from_slice(body).unwrap();
let hook = self.db.get_webhook(auth_token).await?;
for url in [
&payload.repository.html_url,
&payload.repository.ssh_url,
&payload.repository.clone_url,
] {
if self.db.site_with_repository_exists(url).await? {
let mut mac = HmacSha256::new_from_slice(hook.forgejo_webhook_secret.as_bytes())?;
mac.update(body);
mac.verify_slice(&sig[..])?;
let site = self.db.get_site_from_repo_url(url).await?;
self.db
.webhook_link_site(auth_token, &Url::parse(&site.repo_url)?)
.await?;
if payload.reference.contains(&site.branch) {
info!(
"[webhook][forgejo/gitea] received update {:?} from {url} repository on deployed branch",
event_type
);
self.update_site(&site.site_secret, Some(site.branch))
.await?;
} else {
info!(
"[webhook][forgejo/gitea] received update {:?} from {url} repository on non-deployed branch {}",
event_type,
payload.reference
);
}
return Ok(());
}
}
warn!(
"[webhook][forgejo/gitea] stray update from {} repository",
payload.repository.html_url
);
Err(ServiceError::WebsiteNotFound)
}
}

View file

@ -16,7 +16,6 @@
*/
pub mod account;
pub mod auth;
pub mod forgejo;
pub mod pages;
#[cfg(test)]

View file

@ -61,15 +61,12 @@ impl Ctx {
self.db.add_site(&db_site).await?;
let page = Page::from_site(&self.settings, db_site);
page.update(&page.branch)?;
if let Some(_config) = page_config::Config::load(&page.path, &page.branch) {
unimplemented!();
}
self.db
.log_event(&page.domain, &db::EVENT_TYPE_CREATE)
.await?;
self.conductor.new_site(page.clone()).await?;
if let Some(config) = page_config::load(&page.path, &page.branch) {
self.conductor.tx_config(config).await?;
unimplemented!("Parse and store custom domains in DB");
}
Ok(page)
}
@ -90,9 +87,8 @@ impl Ctx {
.unwrap();
}
rx.await.unwrap()?;
if let Some(config) = page_config::load(&page.path, &page.branch) {
self.conductor.tx_config(config).await?;
unimplemented!("Parse and store custom domains in DB");
if let Some(_config) = page_config::Config::load(&page.path, &page.branch) {
unimplemented!();
}
self.db
.log_event(&page.domain, &db::EVENT_TYPE_UPDATE)
@ -114,7 +110,6 @@ impl Ctx {
self.db
.delete_site(&db_site.owner, &db_site.hostname)
.await?;
self.conductor.delete_site(db_site.hostname).await?;
Ok(())
} else {
Err(ServiceError::WebsiteNotFound)

View file

@ -20,23 +20,18 @@ use std::thread;
use crate::db::*;
use crate::settings::Settings;
use argon2_creds::{Config as ArgonConfig, ConfigBuilder as ArgonConfigBuilder, PasswordPolicy};
use reqwest::Client;
use tracing::info;
pub mod api;
use crate::conductor::Conductor;
pub type ArcCtx = Arc<Ctx>;
#[derive(Clone)]
pub struct Ctx {
pub settings: Settings,
pub db: Database,
pub conductor: Conductor,
/// credential-procession policy
pub creds: ArgonConfig,
client: Client,
}
impl Ctx {
@ -54,8 +49,6 @@ impl Ctx {
pub async fn new(settings: Settings) -> Arc<Self> {
let creds = Self::get_creds();
let c = creds.clone();
let client = Client::default();
let conductor = Conductor::new(settings.clone(), Some(client.clone()));
#[allow(unused_variables)]
let init = thread::spawn(move || {
@ -70,10 +63,8 @@ impl Ctx {
Arc::new(Self {
settings,
client,
db,
creds,
conductor,
})
}
}

270
src/db.rs
View file

@ -23,11 +23,9 @@ use sqlx::types::time::OffsetDateTime;
use sqlx::ConnectOptions;
use sqlx::PgPool;
use tracing::error;
use url::Url;
use uuid::Uuid;
use crate::errors::*;
use crate::utils;
/// Connect to databse
pub enum ConnectionOptions {
@ -50,7 +48,6 @@ impl ConnectionOptions {
async fn connect(self) -> ServiceResult<Database> {
let pool = match self {
Self::Fresh(fresh) => {
tracing::info!("DATABASE URL: {}", fresh.url);
let mut connect_options =
sqlx::postgres::PgConnectOptions::from_str(&fresh.url).unwrap();
if fresh.disable_logging {
@ -276,53 +273,6 @@ impl Database {
Ok(())
}
pub async fn get_site_from_repo_url(&self, repo_url: &str) -> ServiceResult<Site> {
struct S {
repo_url: String,
branch: String,
hostname: String,
owned_by: i32,
site_secret: String,
pub_id: Uuid,
}
let site = sqlx::query_as!(
S,
"SELECT repo_url, site_secret, branch, hostname, owned_by, pub_id
FROM librepages_sites
WHERE repo_url = $1
AND deleted = false;
",
repo_url,
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebsiteNotFound))?;
struct Owner {
name: String,
}
let owner = sqlx::query_as!(
Owner,
"SELECT name FROM librepages_users WHERE ID = $1",
site.owned_by
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebsiteNotFound))?;
let site = Site {
site_secret: site.site_secret,
branch: site.branch,
hostname: site.hostname,
owner: owner.name,
repo_url: site.repo_url,
pub_id: site.pub_id,
};
Ok(site)
}
pub async fn get_site_from_pub_id(&self, pub_id: Uuid, owner: String) -> ServiceResult<Site> {
struct S {
repo_url: String,
@ -480,24 +430,6 @@ impl Database {
Ok(resp)
}
/// check if site with repository exists
pub async fn site_with_repository_exists(&self, url: &str) -> ServiceResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (SELECT 1 from librepages_sites WHERE repo_url = $1)",
url,
)
.fetch_one(&self.pool)
.await
.map_err(map_register_err)?;
let mut resp = false;
if let Some(x) = res.exists {
resp = x;
}
Ok(resp)
}
/// check if event type exists
async fn event_type_exists(&self, event: &Event) -> ServiceResult<bool> {
let res = sqlx::query!(
@ -689,134 +621,7 @@ impl Database {
}
Ok(events)
}
/// register a new webhook
pub async fn new_webhook(
&self,
forgejo_url: Url,
owner: &str,
) -> ServiceResult<ForgejoWebhook> {
let hook = ForgejoWebhook::new(forgejo_url);
sqlx::query!(
"INSERT INTO librepages_forgejo_webhooks
(forgejo_url , auth_token, forgejo_webhook_secret, owned_by) VALUES ($1, $2, $3,
(SELECT ID FROM librepages_users WHERE name = $4)
)",
hook.forgejo_url.as_str(),
&hook.auth_token,
&hook.forgejo_webhook_secret,
owner,
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::AccountNotFound))?;
Ok(hook)
}
pub async fn list_all_webhooks_with_owner(
&self,
owner: &str,
) -> ServiceResult<Vec<ForgejoWebhook>> {
let mut db_hooks = sqlx::query_as!(
InnerForgejoWebhook,
"SELECT
forgejo_url, auth_token, forgejo_webhook_secret
FROM
librepages_forgejo_webhooks
WHERE
owned_by = (SELECT ID FROM librepages_users WHERE name = $1);
",
owner
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
let mut hooks = Vec::with_capacity(db_hooks.len());
for hook in db_hooks.drain(0..) {
hooks.push(hook.to_webhook()?)
}
Ok(hooks)
}
pub async fn get_webhook_with_owner(
&self,
auth_token: &str,
owner: &str,
) -> ServiceResult<ForgejoWebhook> {
let h = sqlx::query_as!(
InnerForgejoWebhook,
"SELECT
forgejo_url, auth_token, forgejo_webhook_secret
FROM
librepages_forgejo_webhooks
WHERE
auth_token = $1
AND
owned_by = (SELECT ID FROM librepages_users WHERE name = $2);
",
auth_token,
owner
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
h.to_webhook()
}
pub async fn get_webhook(&self, auth_token: &str) -> ServiceResult<ForgejoWebhook> {
let h = sqlx::query_as!(
InnerForgejoWebhook,
"SELECT forgejo_url, auth_token, forgejo_webhook_secret
FROM librepages_forgejo_webhooks
WHERE auth_token = $1
",
auth_token,
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
h.to_webhook()
}
/// register a webhook against a site
pub async fn webhook_link_site(&self, auth_token: &str, repo_url: &Url) -> ServiceResult<()> {
struct Site {
id: i32,
}
let sites = sqlx::query_as!(
Site,
"SELECT ID FROM librepages_sites WHERE repo_url = $1",
repo_url.as_str()
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebsiteNotFound))?;
for site in sites {
sqlx::query!(
"INSERT INTO librepages_forgejo_webhook_site_mapping
(site_id, forgejo_webhook_id) VALUES (
(SELECT ID FROM librepages_sites WHERE repo_url = $1 AND ID = $2),
(SELECT ID FROM librepages_forgejo_webhooks WHERE auth_token = $3)
) ON CONFLICT (site_id, forgejo_webhook_id) DO NOTHING;",
repo_url.as_str(),
site.id,
auth_token
)
.execute(&self.pool)
.await
//.unwrap();
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
}
Ok(())
}
}
struct InnerSite {
site_secret: String,
repo_url: String,
@ -926,39 +731,6 @@ pub struct LibrePagesEvent {
pub id: Uuid,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct ForgejoWebhook {
pub forgejo_url: Url,
pub forgejo_webhook_secret: String,
pub auth_token: String,
}
impl ForgejoWebhook {
fn new(forgejo_url: Url) -> Self {
Self {
forgejo_url,
forgejo_webhook_secret: utils::get_random(40),
auth_token: utils::get_random(40),
}
}
}
struct InnerForgejoWebhook {
forgejo_url: String,
auth_token: String,
forgejo_webhook_secret: String,
}
impl InnerForgejoWebhook {
fn to_webhook(self) -> ServiceResult<ForgejoWebhook> {
Ok(ForgejoWebhook {
forgejo_url: Url::parse(&self.forgejo_url)?,
auth_token: self.auth_token,
forgejo_webhook_secret: self.forgejo_webhook_secret,
})
}
}
fn now_unix_time_stamp() -> OffsetDateTime {
OffsetDateTime::now_utc()
}
@ -1178,7 +950,7 @@ mod tests {
let site = Site {
site_secret: "foobar".into(),
repo_url: "https://git.test_db_sites.example.org/LibrePages/librepages.git".into(),
repo_url: "https://git.batsense.net/LibrePages/librepages.git".into(),
branch: "librepages".into(),
hostname: "db_works.tests.librepages.librepages.org".into(),
pub_id: Uuid::new_v4(),
@ -1187,20 +959,12 @@ mod tests {
// test if hostname exists. Should be false
assert!(!db.hostname_exists(&site.hostname).await.unwrap());
assert!(!db
.site_with_repository_exists(&site.repo_url)
.await
.unwrap());
// testing adding site
db.add_site(&site).await.unwrap();
// test if hostname exists. Should be true
assert!(db.hostname_exists(&site.hostname).await.unwrap());
assert!(db
.site_with_repository_exists(&site.repo_url)
.await
.unwrap());
// get site
let db_site = db.get_site(p.username, &site.hostname).await.unwrap();
@ -1212,12 +976,6 @@ mod tests {
db.get_site_from_secret(&site.site_secret).await.unwrap()
);
// get site by repo_url
assert_eq!(
db_site,
db.get_site_from_repo_url(&site.repo_url).await.unwrap()
);
// list all sites owned by user
let db_sites = db.list_all_sites(p.username).await.unwrap();
assert_eq!(db_sites.len(), 1);
@ -1264,32 +1022,6 @@ mod tests {
);
assert_eq!(latest_update_event_id_from_db.id, latest_update_event_id);
// add webhook
let forgejo_url = Url::parse("https://example.org").unwrap();
let hook = db.new_webhook(forgejo_url, NAME).await.unwrap();
assert_eq!(hook, db.get_webhook(&hook.auth_token).await.unwrap());
assert_eq!(
vec![hook.clone()],
db.list_all_webhooks_with_owner(NAME).await.unwrap()
);
assert_eq!(
hook,
db.get_webhook_with_owner(&hook.auth_token, NAME)
.await
.unwrap()
);
assert_eq!(
db.get_webhook(&hook.forgejo_webhook_secret).await.err(),
Some(ServiceError::WebhookNotFound)
);
db.webhook_link_site(&hook.auth_token, &Url::parse(&site.repo_url).unwrap())
.await
.unwrap();
db.webhook_link_site(&hook.auth_token, &Url::parse(&site.repo_url).unwrap())
.await
.unwrap();
// delete site
db.delete_site(p.username, &site.hostname).await.unwrap();

View file

@ -28,8 +28,6 @@ use argon2_creds::errors::CredsError;
use config::ConfigError as ConfigErrorInner;
use derive_more::{Display, Error};
use git2::Error as GitError;
use hmac::digest::InvalidLength;
use hmac::digest::MacError;
use serde::{Deserialize, Serialize};
use url::ParseError;
@ -180,24 +178,6 @@ pub enum ServiceError {
#[display(fmt = "Passwords don't match")]
/// passwords don't match
PasswordsDontMatch,
/// Webhook not found
#[display(fmt = "Webhook not found")]
WebhookNotFound,
}
impl From<InvalidLength> for ServiceError {
#[cfg(not(tarpaulin_include))]
fn from(_: InvalidLength) -> ServiceError {
ServiceError::InternalServerError
}
}
impl From<MacError> for ServiceError {
#[cfg(not(tarpaulin_include))]
fn from(_: MacError) -> ServiceError {
ServiceError::WebhookNotFound
}
}
impl From<ParseError> for ServiceError {
@ -272,7 +252,6 @@ impl ResponseError for ServiceError {
ServiceError::ClosedForRegistration => StatusCode::FORBIDDEN, //FORBIDDEN,
ServiceError::NotAnEmail => StatusCode::BAD_REQUEST, //BADREQUEST,
ServiceError::WrongPassword => StatusCode::UNAUTHORIZED, //UNAUTHORIZED,
ServiceError::WebhookNotFound => StatusCode::NOT_FOUND, //NOT FOUND,
}
}
}

View file

@ -27,7 +27,6 @@ use tracing::info;
use tracing_actix_web::TracingLogger;
mod api;
mod conductor;
mod ctx;
mod db;
mod errors;

View file

@ -16,11 +16,41 @@
*/
use std::path::Path;
use libconfig::Config;
use serde::{Deserialize, Serialize};
use crate::git::{ContentType, GitFileMode};
#[derive(Deserialize, Serialize, Debug, Eq, PartialEq, Clone)]
pub struct Config {
pub source: Source,
pub domains: Option<Vec<String>>,
pub forms: Option<Forms>,
pub image_compression: Option<ImageCompression>,
pub redirects: Option<Vec<Redirects>>,
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Source {
production_branch: String,
staging: Option<String>,
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Forms {
pub enable: bool,
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct ImageCompression {
pub enable: bool,
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Redirects {
pub from: String,
pub to: String,
}
#[derive(Deserialize, Debug, Serialize, PartialEq, Eq)]
struct Policy<'a> {
rel_path: &'a str,
@ -40,75 +70,77 @@ enum SupportedFormat {
Toml,
}
pub fn load<P: AsRef<Path>>(repo_path: &P, branch: &str) -> Option<Config> {
const POLICIES: [Policy; 2] = [
Policy::new("librepages.toml", SupportedFormat::Toml),
Policy::new("librepages.json", SupportedFormat::Json),
];
impl Config {
pub fn load<P: AsRef<Path>>(repo_path: &P, branch: &str) -> Option<Config> {
const POLICIES: [Policy; 2] = [
Policy::new("librepages.toml", SupportedFormat::Toml),
Policy::new("librepages.json", SupportedFormat::Json),
];
if let Some(policy) = discover(repo_path, branch, &POLICIES) {
// let path = p.repo.as_ref().join(policy.rel_path);
//let contents = fs::read_to_string(path).await.unwrap();
if let Some(policy) = Self::discover(repo_path, branch, &POLICIES) {
// let path = p.repo.as_ref().join(policy.rel_path);
//let contents = fs::read_to_string(path).await.unwrap();
let file =
crate::git::read_preview_file(&repo_path.as_ref().into(), branch, policy.rel_path)
.unwrap();
if let ContentType::Text(contents) = file.content {
let res = match policy.format {
SupportedFormat::Json => load_json(&contents),
SupportedFormat::Yaml => load_yaml(&contents),
SupportedFormat::Toml => load_toml(&contents),
let file =
crate::git::read_preview_file(&repo_path.as_ref().into(), branch, policy.rel_path)
.unwrap();
if let ContentType::Text(contents) = file.content {
let res = match policy.format {
SupportedFormat::Json => Self::load_json(&contents),
SupportedFormat::Yaml => Self::load_yaml(&contents),
SupportedFormat::Toml => Self::load_toml(&contents),
};
return Some(res);
};
}
return Some(res);
};
None
}
fn discover<'a, P: AsRef<Path>>(
repo_path: &P,
branch: &str,
policies: &'a [Policy<'a>],
) -> Option<&'a Policy<'a>> {
let repo = git2::Repository::open(repo_path).unwrap();
None
}
fn discover<'a, P: AsRef<Path>>(
repo_path: &P,
branch: &str,
policies: &'a [Policy<'a>],
) -> Option<&'a Policy<'a>> {
let repo = git2::Repository::open(repo_path).unwrap();
let branch = repo.find_branch(branch, git2::BranchType::Local).unwrap();
// let tree = head.peel_to_tree().unwrap();
let branch = branch.into_reference();
let tree = branch.peel_to_tree().unwrap();
let branch = repo.find_branch(branch, git2::BranchType::Local).unwrap();
// let tree = head.peel_to_tree().unwrap();
let branch = branch.into_reference();
let tree = branch.peel_to_tree().unwrap();
for p in policies.iter() {
let file_exists = tree.iter().any(|x| {
if let Some(name) = x.name() {
if policies.iter().any(|p| p.rel_path == name) {
let mode: GitFileMode = x.into();
matches!(mode, GitFileMode::Executable | GitFileMode::Regular)
for p in policies.iter() {
let file_exists = tree.iter().any(|x| {
if let Some(name) = x.name() {
if policies.iter().any(|p| p.rel_path == name) {
let mode: GitFileMode = x.into();
matches!(mode, GitFileMode::Executable | GitFileMode::Regular)
} else {
false
}
} else {
false
}
} else {
false
});
if file_exists {
return Some(p);
}
});
if file_exists {
return Some(p);
}
None
}
None
}
fn load_toml(c: &str) -> Config {
toml::from_str(c).unwrap()
}
fn load_toml(c: &str) -> Config {
toml::from_str(c).unwrap()
}
fn load_yaml(c: &str) -> Config {
serde_yaml::from_str(c).unwrap()
}
fn load_yaml(c: &str) -> Config {
serde_yaml::from_str(c).unwrap()
}
fn load_json(c: &str) -> Config {
serde_json::from_str(c).unwrap()
fn load_json(c: &str) -> Config {
serde_json::from_str(c).unwrap()
}
}
#[cfg(test)]
@ -117,8 +149,6 @@ mod tests {
use crate::git::tests::write_file_util;
use mktemp::Temp;
use libconfig::*;
#[actix_rt::test]
async fn page_config_test() {
let tmp_dir = Temp::new_dir().unwrap();
@ -137,7 +167,7 @@ mod tests {
Some(&content),
);
let config = load(&repo_path, "master").unwrap();
let config = Config::load(&repo_path, "master").unwrap();
assert!(config.forms.as_ref().unwrap().enable);
assert!(config.image_compression.as_ref().unwrap().enable);
assert_eq!(config.source.production_branch, "librepages");

View file

@ -1,193 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use tera::Context;
use tracing::info;
use super::get_auth_middleware;
use crate::api::v1::forgejo::AddWebhook;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_FORGEJO_WEBHOOK_ADD: TemplateFile =
TemplateFile::new("dash_forgejo_webhook_add", "pages/dash/forgejo/add.html");
pub struct Add {
ctx: RefCell<Context>,
}
impl CtxError for Add {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl Add {
pub fn new(settings: &Settings) -> Self {
let ctx = RefCell::new(context(settings));
Self { ctx }
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_FORGEJO_WEBHOOK_ADD.name, &self.ctx.borrow())
.unwrap()
}
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.forgejo_webhook.add",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Dashboard add forgejo webhook webpage", skip(ctx))]
pub async fn get_add_forgejo_webhook(ctx: AppCtx) -> PageResult<impl Responder, Add> {
let add = Add::new(&ctx.settings).render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(add))
}
#[actix_web_codegen_const_routes::post(
path = "PAGES.dash.forgejo_webhook.add",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(
name = "Post Dashboard add Forgejo webhook webpage",
skip(ctx, id, payload)
)]
pub async fn post_add_forgejo_webhook(
ctx: AppCtx,
id: Identity,
payload: web::Form<AddWebhook>,
) -> PageResult<impl Responder, Add> {
let owner = id.identity().unwrap();
let payload = payload.into_inner();
info!(
"Adding webhook for Forgejo instance: {}",
payload.forgejo_url.as_str()
);
let hook = ctx
.db
.new_webhook(payload.forgejo_url, &owner)
.await
.map_err(|e| PageError::new(Add::new(&ctx.settings), e))?;
Ok(HttpResponse::Found()
.append_header((
http::header::LOCATION,
PAGES.dash.forgejo_webhook.get_view(&hook.auth_token),
))
.finish())
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_add_forgejo_webhook);
cfg.service(post_add_forgejo_webhook);
}
#[cfg(test)]
mod tests {
use actix_web::http::StatusCode;
use actix_web::test;
use url::Url;
use crate::api::v1::forgejo::AddWebhook;
use crate::ctx::ArcCtx;
use crate::tests;
use crate::*;
use super::PAGES;
#[actix_rt::test]
async fn postgres_dashboadr_add_forgejo_webhook_works() {
let (_, ctx) = tests::get_ctx().await;
dashboadr_add_forgejo_webhook_works(ctx.clone()).await;
}
async fn dashboadr_add_forgejo_webhook_works(ctx: ArcCtx) {
const NAME: &str = "testdashwebhookforgejoadduser";
const EMAIL: &str = "testdashwebhookforgejoadduser@foo.com";
const PASSWORD: &str = "longpassword";
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let cookies = get_cookie!(signin_resp);
let app = get_app!(ctx.clone()).await;
let resp = get_request!(&app, PAGES.dash.forgejo_webhook.add, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains("Add Forgejo Webhook"));
let payload = AddWebhook {
forgejo_url: Url::parse("https://git.batsense.net").unwrap(),
};
let add_webhook = test::call_service(
&app,
post_request!(&payload, PAGES.dash.forgejo_webhook.add, FORM)
.cookie(cookies.clone())
.to_request(),
)
.await;
assert_eq!(add_webhook.status(), StatusCode::FOUND);
let mut hooks = ctx.db.list_all_webhooks_with_owner(NAME).await.unwrap();
let hook = hooks.pop().unwrap();
// let mut event = ctx.db.list(&site.hostname).await.unwrap();
// let event = event.pop().unwrap();
let headers = add_webhook.headers();
let view_webhook_url = PAGES.dash.forgejo_webhook.get_view(&hook.auth_token);
assert_eq!(
headers.get(actix_web::http::header::LOCATION).unwrap(),
&view_webhook_url
);
// list webhooks
let resp = get_request!(&app, PAGES.dash.forgejo_webhook.list, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains(hook.forgejo_url.as_str()));
// view webhook
let resp = get_request!(&app, &view_webhook_url, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains("****"));
assert!(res.contains(
&crate::V1_API_ROUTES
.forgejo
.get_webhook_url(&ctx, &hook.auth_token)
));
let show_forgejo_webhook_secret =
format!("{view_webhook_url}?show_forgejo_webhook_secret=true");
let resp = get_request!(&app, &show_forgejo_webhook_secret, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains(&hook.forgejo_webhook_secret));
}
}

View file

@ -1,93 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use tera::Context;
use super::get_auth_middleware;
use crate::errors::ServiceResult;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_FORGEJO_WEBHOOK_LIST: TemplateFile =
TemplateFile::new("dash_forgejo_webhook_list", "pages/dash/forgejo/list.html");
pub struct List {
ctx: RefCell<Context>,
}
impl CtxError for List {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl List {
pub fn new(settings: &Settings, hooks: Option<&[TemplateForgejoWebhook]>) -> Self {
let ctx = RefCell::new(context(settings));
if let Some(hooks) = hooks {
ctx.borrow_mut().insert(PAYLOAD_KEY, hooks);
}
Self { ctx }
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_FORGEJO_WEBHOOK_LIST.name, &self.ctx.borrow())
.unwrap()
}
}
async fn get_webhook_data(
ctx: &AppCtx,
id: &Identity,
) -> ServiceResult<Vec<TemplateForgejoWebhook>> {
let db_hooks = ctx
.db
.list_all_webhooks_with_owner(&id.identity().unwrap())
.await?;
let mut hooks = Vec::with_capacity(db_hooks.len());
for hook in db_hooks {
hooks.push(TemplateForgejoWebhook::new(ctx, hook));
}
Ok(hooks)
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.forgejo_webhook.list",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "List all Forgejo webhooks", skip(ctx, id))]
pub async fn list_hooks(ctx: AppCtx, id: Identity) -> PageResult<impl Responder, List> {
let sites = get_webhook_data(&ctx, &id)
.await
.map_err(|e| PageError::new(List::new(&ctx.settings, None), e))?;
let home = List::new(&ctx.settings, Some(&sites)).render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(home))
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(list_hooks);
}

View file

@ -1,69 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::*;
use serde::{Deserialize, Serialize};
use super::get_auth_middleware;
pub use super::home::TemplateSite;
pub use super::{context, Footer, TemplateFile, PAGES, PAYLOAD_KEY, TEMPLATES};
use crate::ctx::Ctx;
use crate::db::ForgejoWebhook;
pub mod add;
pub mod list;
pub mod view;
pub fn register_templates(t: &mut tera::Tera) {
add::DASH_FORGEJO_WEBHOOK_ADD
.register(t)
.expect(add::DASH_FORGEJO_WEBHOOK_ADD.name);
list::DASH_FORGEJO_WEBHOOK_LIST
.register(t)
.expect(list::DASH_FORGEJO_WEBHOOK_LIST.name);
view::DASH_FORGEJO_WEBHOOK_VIEW
.register(t)
.expect(view::DASH_FORGEJO_WEBHOOK_VIEW.name);
}
pub fn services(cfg: &mut web::ServiceConfig) {
add::services(cfg);
list::services(cfg);
view::services(cfg);
}
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
pub struct TemplateForgejoWebhook {
pub webhook: ForgejoWebhook,
pub view: String,
pub url: String,
}
impl TemplateForgejoWebhook {
pub fn new(ctx: &Ctx, hook: ForgejoWebhook) -> Self {
let view = PAGES.dash.forgejo_webhook.get_view(&hook.auth_token);
let url = crate::V1_API_ROUTES
.forgejo
.get_webhook_url(ctx, &hook.auth_token);
Self {
webhook: hook,
view,
url,
}
}
}

View file

@ -1,108 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use serde::{Deserialize, Serialize};
use tera::Context;
use super::get_auth_middleware;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_FORGEJO_WEBHOOK_VIEW: TemplateFile =
TemplateFile::new("dash_forgejo_webhook_view", "pages/dash/forgejo/view.html");
const SHOW_FORGEJO_WEBHOOK_SECRET_KEY: &str = "show_forgejo_webhook_secret";
pub struct View {
ctx: RefCell<Context>,
}
impl CtxError for View {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl View {
pub fn new(settings: &Settings, payload: Option<TemplateForgejoWebhook>) -> Self {
let ctx = RefCell::new(context(settings));
if let Some(payload) = payload {
ctx.borrow_mut().insert(PAYLOAD_KEY, &payload);
}
Self { ctx }
}
pub fn show_forgejo_webhook_secret(&mut self) {
self.ctx
.borrow_mut()
.insert(SHOW_FORGEJO_WEBHOOK_SECRET_KEY, &true);
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_FORGEJO_WEBHOOK_VIEW.name, &self.ctx.borrow())
.unwrap()
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct ViewOptions {
show_forgejo_webhook_secret: Option<bool>,
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.forgejo_webhook.view",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Dashboard Forgejo webhook webpage", skip(ctx, id))]
pub async fn get_view_site(
ctx: AppCtx,
id: Identity,
path: web::Path<String>,
query: web::Query<ViewOptions>,
) -> PageResult<impl Responder, View> {
let auth_token = path.into_inner();
let owner = id.identity().unwrap();
let hook = ctx
.db
.get_webhook_with_owner(&auth_token, &owner)
.await
.map_err(|e| PageError::new(View::new(&ctx.settings, None), e))?;
let payload = TemplateForgejoWebhook::new(&ctx, hook);
let mut page = View::new(&ctx.settings, Some(payload));
if let Some(true) = query.show_forgejo_webhook_secret {
page.show_forgejo_webhook_secret();
}
let add = page.render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(add))
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_view_site);
}

View file

@ -24,7 +24,6 @@ pub use super::{context, Footer, TemplateFile, PAGES, PAYLOAD_KEY, TEMPLATES};
use crate::db::Event;
use crate::db::LibrePagesEvent;
pub mod forgejo;
pub mod home;
pub mod sites;
@ -50,11 +49,9 @@ impl From<LibrePagesEvent> for TemplateSiteEvent {
pub fn register_templates(t: &mut tera::Tera) {
home::DASH_HOME.register(t).expect(home::DASH_HOME.name);
sites::register_templates(t);
forgejo::register_templates(t);
}
pub fn services(cfg: &mut web::ServiceConfig) {
home::services(cfg);
sites::services(cfg);
forgejo::services(cfg);
}

View file

@ -72,7 +72,6 @@ pub struct Dash {
/// home route
pub home: &'static str,
pub site: DashSite,
pub forgejo_webhook: ForgejoWebhook,
}
impl Dash {
@ -80,37 +79,7 @@ impl Dash {
pub const fn new() -> Dash {
let home = "/dash";
let site = DashSite::new();
let forgejo_webhook = ForgejoWebhook::new();
Dash {
home,
site,
forgejo_webhook,
}
}
}
#[derive(Serialize)]
/// Dashboard ForgejoWebhook routes
pub struct ForgejoWebhook {
/// add forgejo webhook route
pub add: &'static str,
/// view forgejo webhook route
pub view: &'static str,
/// list forgejo webhooks route
pub list: &'static str,
}
impl ForgejoWebhook {
/// create new instance of ForgejoWebhook route
pub const fn new() -> ForgejoWebhook {
let add = "/dash/forgejo/webhook/add";
let list = "/dash/forgejo/webhook/list";
let view = "/dash/forgejo/webhook/view/{auth_token}";
ForgejoWebhook { add, view, list }
}
pub fn get_view(&self, auth_token: &str) -> String {
self.view.replace("{auth_token}", auth_token)
Dash { home, site }
}
}

View file

@ -82,14 +82,6 @@ pub struct Settings {
pub source_code: String,
pub database: Database,
pub page: PageConfig,
pub conductors: Vec<Conductor>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Conductor {
pub username: String,
pub api_key: String,
pub url: Url,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -136,6 +128,15 @@ impl Settings {
Err(e) => warn!("couldn't interpret PORT: {}", e),
}
if let Ok(val) = env::var("DATABASE_URL") {
let url = Url::parse(&val).expect("couldn't parse Database URL");
s = s.set_override("database.url", url.to_string()).unwrap();
let database_type = DBType::from_url(&url).unwrap();
s = s
.set_override("database.database_type", database_type.to_string())
.unwrap();
}
let intermediate_config = s.build_cloned().unwrap();
s = s
@ -162,15 +163,6 @@ impl Settings {
)
.expect("Couldn't set database url");
if let Ok(val) = env::var("DATABASE_URL") {
let url = Url::parse(&val).expect("couldn't parse Database URL");
s = s.set_override("database.url", url.to_string()).unwrap();
let database_type = DBType::from_url(&url).unwrap();
s = s
.set_override("database.database_type", database_type.to_string())
.unwrap();
}
let settings = s.build()?.try_deserialize::<Settings>()?;
settings.check_url();

View file

@ -33,7 +33,7 @@ use crate::page::Page;
use crate::settings::Settings;
use crate::*;
pub const REPO_URL: &str = "https://github.com/mCaptcha/website/";
pub const REPO_URL: &str = "http://localhost:8080/mCaptcha/website/";
pub const BRANCH: &str = "gh-pages";
pub async fn get_ctx() -> (Temp, Arc<Ctx>) {

View file

@ -16,11 +16,6 @@
<div class="nav__link-container">
<a class="nav__link" rel="noreferrer" href="{{ page.dash.site.add }}">New Site</a>
</div>
<div class="nav__link-container">
<a class="nav__link" rel="noreferrer" href="{{ page.dash.forgejo_webhook.list }}">Webhooks</a>
</div>
<div class="nav__link-container">
<a class="nav__link" rel="noreferrer" href="{{ page.auth.logout }}">Log out</a>
</div>

View file

@ -1,23 +0,0 @@
{% extends 'base' %}{% block title %} Add Forgejo Webhook{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="add-site__container">
<form class="auth-form" action="{{ page.dash.forgejo_webhook.add }}" method="POST">
<label class="auth-form__label" for="forgejo_url">
Forgejo instance URL
<input
type="url"
name="forgejo_url"
id="forgejo_url"
class="auth-form__input"
/>
</label>
<div class="auth-form__action-container">
<button class="auth-form__submit" type="submit">Add Webhook</button>
</div>
</form>
</div>
</main>
{% endblock main %}

View file

@ -1,33 +0,0 @@
{% extends 'base' %}{% block title %} Forgejo Webhooks{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="sites__collection">
<div class="sites__actions">
<a class="sites__actions__new-site" href="{{ page.dash.forgejo_webhook.add }}">
<button>Add New Forgejo Webhook</button>
</a>
</div>
{% if payload|length > 0 %}
{% for hook in payload %}
<a href="{{ hook.view }}" class="site__container">
<div class="site__info--head">
<img
class="site__container--preview"
src="{{ hook.webhook.forgejo_url }}/favicon.ico"
alt="Webhook icon"
/>
<div class="site__info--column">
<p><b>{{ hook.webhook.forgejo_url }}</b></p>
<p>Forgejo instance {{ hook.webhook.forgejo_url }}</p>
</div>
</div>
</a>
{% endfor %}
{% else %}
<p class="sites__banner">Nothing to show, click <a href="{{page.dash.forgejo_webhook.add}}">here</a> to add new Forgejo webhook!</p>
{% endif %}
</div>
</main>
{% endblock main %}

View file

@ -1,32 +0,0 @@
{% extends 'base' %}{% block title %} {{ payload.webhook.forgejo_url }}{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="add-site__container">
<section>
<table>
<tr>
<th>Webhook URL</th>
<td>{{ payload.url }}</td>
</tr>
<tr>
<th>Secret</th>
<td>
{% if show_forgejo_webhook_secret %}
{{ payload.webhook.forgejo_webhook_secret }} <a href="{{ payload.view }}">Hide</a>
{% else %}
****
<a href="{{ payload.view }}?show_forgejo_webhook_secret=true">
Show
</a>
{% endif %}
</td>
</tr>
</table>
</section>
</div>
</main>
{% endblock main %}

View file

@ -15,7 +15,7 @@
<img
class="site__container--preview"
src="{{ deployment.site.hostname }}/favicon.ico"
alt="Website icon"
alt="{{ deployment.site.hostname }}'s favicon"
/>
<div class="site__info--column">
<p><b>{{ deployment.site.hostname }}</b></p>

View file

@ -167,15 +167,6 @@ version = "0.2.125"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b"
[[package]]
name = "librepages"
version = "0.1.0"
dependencies = [
"cache-buster",
"serde",
"serde_json",
]
[[package]]
name = "mime"
version = "0.3.16"
@ -192,6 +183,15 @@ dependencies = [
"unicase",
]
[[package]]
name = "pages"
version = "0.1.0"
dependencies = [
"cache-buster",
"serde",
"serde_json",
]
[[package]]
name = "proc-macro2"
version = "1.0.38"

View file

@ -1,11 +1,11 @@
[package]
name = "librepages"
name = "pages"
version = "0.1.0"
edition = "2021"
homepage = "https://git.batsense.net/LibrePages/librepages"
repository = "https://git.batsense.net/LibrePages/librepages"
documentation = "https://github.con/realaravinth/librepages"
readme = "https://git.batsense.net/LibrePages/librepages/blob/master/README.md"
homepage = "https://github.com/realaravinth/pages"
repository = "https://github.com/realaravinth/pages"
documentation = "https://github.con/realaravinth/pages"
readme = "https://github.com/realaravinth/pages/blob/master/README.md"
license = "AGPLv3 or later version"
authors = ["realaravinth <realaravinth@batsense.net>"]