Compare commits

..

No commits in common. "master" and "wip-tracing" have entirely different histories.

78 changed files with 1067 additions and 4846 deletions

View file

@ -1,4 +1,4 @@
**/target/
/target
tarpaulin-report.html
.env
cobertura.xml

View file

@ -68,9 +68,6 @@ jobs:
- name: download deps
run: make dev-env
env:
LPCONDUCTOR_CREDS_USERNAME: "librepages_api"
LPCONDUCTOR_CREDS_PASSWORD: "longrandomlygeneratedpassword"
- name: Apply migrations
run: make migrate

View file

@ -55,9 +55,6 @@ jobs:
- name: download deps
run: make dev-env
env:
LPCONDUCTOR_CREDS_USERNAME: "librepages_api"
LPCONDUCTOR_CREDS_PASSWORD: "longrandomlygeneratedpassword"
- name: configure GPG key
if: (github.ref == 'refs/heads/master' || github.event_name == 'push') && github.repository == 'realaravinth/librepages'

View file

@ -1,82 +0,0 @@
pipeline:
backend:
image: rust
environment:
- DATABASE_URL=postgres://postgres:password@database:5432/postgres
commands:
- curl -fsSL https://deb.nodesource.com/setup_16.x | bash - &&\
- apt update && apt-get -y --no-install-recommends install nodejs tar gpg curl wget
- rustup component add rustfmt
- rustup component add clippy
# rewrite conducotr configuration
- sed -i 's%url = "http:\/\/localhost:5000"%url = "http:\/\/librepages-conductor:5000"%' config/default.toml
- make dev-env
- make migrate
- make lint
- make test
- make release
build_docker_img:
image: plugins/docker
when:
event: [pull_request]
settings:
dry_run: true
repo: realaravinth/librepages
tags: latest
build_and_publish_docker_img:
image: plugins/docker
when:
event: [push, tag, deployment]
settings:
username: realaravinth
password:
from_secret: DOCKER_TOKEN
repo: realaravinth/librepages
tags: latest
# build_publisher_docker_img:
# image: plugins/docker
# when:
# event: [push, tag, deployment]
# settings:
# dry_run: true
# dockerfile: scripts/publish-bins-docker
# purge: false
# repo: realaravinth/librepages-publisher
# tags: latest
#
publish_bins:
image: rust
when:
event: [push, tag, deployment]
commands:
- apt update
- apt-get -y --no-install-recommends install gpg tar curl wget
- echo -n "$RELEASE_BOT_GPG_SIGNING_KEY" | gpg --batch --import --pinentry-mode loopback
- scripts/bin-publish.sh publish master latest $DUMBSERVE_PASSWORD
secrets: [RELEASE_BOT_GPG_SIGNING_KEY, DUMBSERVE_PASSWORD, GPG_PASSWORD]
services:
database:
image: postgres
environment:
- POSTGRES_PASSWORD=password
librepages-conductor:
image: realaravinth/librepages-conductor
command: conductor serve
environment:
- LPCONDUCTOR_SERVER__PROXY_HAS_TLS=false
- LPCONDUCTOR_DEBUG=false
- LPCONDUCTOR_CONDUCTOR=dummy
- LPCONDUCTOR_SERVER_URL_PREFIX=""
- LPCONDUCTOR_SERVER_DOMAIN="librepages.test"
- LPCONDUCTOR_SERVER_IP=0.0.0.0
- LPCONDUCTOR_SERVER_PROXY_HAS_TLS=false
- LPCONDUCTOR_SERVER_PORT=7000
- LPCONDUCTOR_SOURCE_CODE=https://example.org
- LPCONDUCTOR_CREDS_USERNAME="librepages_api"
- LPCONDUCTOR_CREDS_PASSWORD="longrandomlygeneratedpassword"
- PORT=5000

1058
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,12 @@
[package]
name = "librepages"
name = "pages"
version = "0.1.0"
edition = "2021"
build = "build.rs"
homepage = "https://git.batsense.net/LibrePages/librepages"
repository = "https://git.batsense.net/LibrePages/librepages"
documentation = "https://git.batsense.net/LibrePages/librepages"
readme = "https://git.batsense.net/LibrePages/librepages/blob/master/README.md"
homepage = "https://github.com/realaravinth/pages"
repository = "https://github.com/realaravinth/pages"
documentation = "https://github.con/realaravinth/pages"
readme = "https://github.com/realaravinth/pages/blob/master/README.md"
license = "AGPLv3 or later version"
authors = ["realaravinth <realaravinth@batsense.net>"]
@ -19,10 +19,8 @@ actix-identity = "0.4.0"
actix-rt = "2"
actix-web-codegen-const-routes = { version = "0.1.0", tag = "0.1.0", git = "https://github.com/realaravinth/actix-web-codegen-const-routes" }
argon2-creds = { branch = "master", git = "https://github.com/realaravinth/argon2-creds"}
sqlx = { version = "0.6.2", features = ["runtime-actix-rustls", "postgres", "time", "offline", "json", "uuid"] }
sqlx = { version = "0.6.1", features = [ "runtime-actix-rustls", "postgres", "time", "offline", "json"] }
clap = { version = "3.2.20", features = ["derive"]}
libconfig = { version = "0.1.0", git = "https://git.batsense.net/librepages/libconfig" }
libconductor = { version = "0.1.0", git = "https://git.batsense.net/librepages/conductor/" }
config = "0.13"
git2 = "0.14.2"
@ -33,7 +31,8 @@ serde_json = "1"
pretty_env_logger = "0.4"
lazy_static = "1.4"
url = { version = "2.2", features = ["serde"] }
url = "2.2"
urlencoding = "2.1.0"
derive_more = "0.99"
@ -49,13 +48,6 @@ rust-embed = "6.3.0"
rand = "0.8.5"
tracing = { version = "0.1.37", features = ["log"]}
tracing-actix-web = "0.6.2"
toml = "0.5.9"
serde_yaml = "0.9.14"
uuid = { version = "1.2.2", features = ["serde"] }
reqwest = { version = "0.11.13", features = ["json"] }
sha2 = "0.10.6"
hmac = "0.12.1"
hex= "0.4.3"
[dependencies.cache-buster]
git = "https://github.com/realaravinth/cache-buster"

View file

@ -19,11 +19,11 @@ RUN cd utils/cache-bust && cargo run
RUN cargo build --release
FROM debian:bullseye-slim
#RUN useradd -ms /bin/bash -u 1000 librepages
#RUN mkdir -p /var/www/librepages && chown librepages /var/www/librepages
#RUN useradd -ms /bin/bash -u 1000 pages
#RUN mkdir -p /var/www/pages && chown pages /var/www/pages
RUN apt-get update && apt-get install -y ca-certificates
COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh
COPY --from=rust /src/target/release/librepages /usr/local/bin/
COPY --from=rust /src/target/release/pages /usr/local/bin/
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View file

@ -30,14 +30,14 @@ doc: ## Prepare documentation
docker: ## Build docker images
docker build \
-t realaravinth/librepages:master \
-t realaravinth/librepages:latest \
-t realaravinth/librepages:0.1.0 .
-t realaravinth/pages:master \
-t realaravinth/pages:latest \
-t realaravinth/pages:0.1.0 .
docker-publish: docker ## Build and publish docker images
docker push realaravinth/librepages:master
docker push realaravinth/librepages:latest
docker push realaravinth/librepages:0.1.0
docker push realaravinth/pages:master
docker push realaravinth/pages:latest
docker push realaravinth/pages:0.1.0
lint: ## Lint codebase
cargo fmt -v --all -- --emit files
@ -46,7 +46,7 @@ lint: ## Lint codebase
migrate: ## run migrations
$(call cache_bust)
unset DATABASE_URL && cargo build
DATABASE_URL=${DATABASE_URL} cargo run -- migrate
cargo run -- migrate
release: ## Release build
$(call cache_bust)

View file

@ -5,8 +5,10 @@
**Auto-deploy static websites from git repositories**
</p>
[![status-badge](https://ci.batsense.net/api/badges/LibrePages/librepages/status.svg)](https://ci.batsense.net/LibrePages/librepages)
[![Build](https://github.com/realaravinth/pages/actions/workflows/linux.yml/badge.svg)](https://github.com/realaravinth/pages/actions/workflows/linux.yml)
[![dependency status](https://deps.rs/repo/github/realaravinth/pages/status.svg)](https://deps.rs/repo/github/realaravinth/pages)
[![codecov](https://codecov.io/gh/realaravinth/pages/branch/master/graph/badge.svg)](https://codecov.io/gh/realaravinth/pages)
</div>

View file

@ -18,7 +18,7 @@ use std::process::Command;
fn main() {
let output = Command::new("git")
.args(["rev-parse", "HEAD"])
.args(&["rev-parse", "HEAD"])
.output()
.expect("error in git command, is git installed?");
let git_hash = String::from_utf8(output.stdout).unwrap();

View file

@ -1,11 +1,8 @@
debug = true
allow_registration = true
# source code of your copy of pages server.
source_code = "https://git.batsense.net/LibrePages/pages"
source_code = "https://github.com/realaravinth/pages"
support_email = "support@librepages.example.org"
conductors = [
{ username = "librepages_api", api_key = "longrandomlygeneratedpassword", url = "http://localhost:5000"}
]
[server]
# The port at which you want Pages to listen to
@ -16,9 +13,10 @@ ip= "0.0.0.0"
# Minimum of two threads are advisable for top async performance but can work
# with one also.
workers = 2
domain = "localhost"
domain = "demo.librepages.org"
cookie_secret = "94b2b2732626fdb7736229a7c777cb451e6304c147c4549f30"
[page]
base_path = "/tmp/librepages-defualt-config/"
base_domain = "librepages.test" # domain where customer pages will be deployed.

View file

@ -3,10 +3,10 @@
The process is tedious, most of this will be automated with a script in
the future.
## 1. Create new user for running `librepages`:
## 1. Create new user for running `pages`:
```bash
sudo useradd -b /srv -m -s /usr/bin/zsh librepages
sudo useradd -b /srv -m -s /usr/bin/zsh pages
```
## 2. Install Runtime dependencies
@ -19,11 +19,11 @@ On Debian-based systems, run:
sudo apt install nginx
```
## 3. Build `librepages`
## 3. Build `Pages`
### i. Install Build Dependencies
To build `librepages`, you need the following dependencies:
To build `pages`, you need the following dependencies:
1. [Git](https://packages.debian.org/bullseye/git)
2. [pkg-config](https://packages.debian.org/bullseye/pkg-config)
@ -71,23 +71,23 @@ Install binary and copy demo configuration file into default configuration
lookup path(`/etc/static-pages/config.toml`)
```bash
sudo cp ./target/release/librepages /usr/local/bin/ && \
sudo cp ./target/release/pages /usr/local/bin/ && \
sudo mkdir /etc/static-pages && \
sudo cp config/default.toml /etc/static-pages/config.toml
```
## 4. Systemd service configuration:
### i. Copy the following to `/etc/systemd/system/librepages.service`:
### i. Copy the following to `/etc/systemd/system/pages.service`:
```systemd
[Unit]
Description=librepages: Auto-deploy static websites from git repositories
Description=pages: Auto-deploy static websites from git repositories
[Service]
Type=simple
User=librepages
ExecStart=/usr/local/bin/librepages
User=pages
ExecStart=/usr/local/bin/pages
Restart=on-failure
RestartSec=1
MemoryDenyWriteExecute=true
@ -107,13 +107,13 @@ WantedBy=multi-user.target
```bash
sudo systemctl daemon-reload && \
sudo systemctl enable librepages && \ # Auto startup during boot
sudo systemctl start librepages
sudo systemctl enable pages && \ # Auto startup during boot
sudo systemctl start pages
```
## 5. Optionally configure Nginx to reverse proxy requests to LibrePages
## 5. Optionally configure Nginx to reverse proxy requests to Pages
**NOTE: This sections includes instructions to reverse proxy requests to
LibrePages API, not the websites managed by librepages.**
Pages API, not the websites managed by Pages.**
See [here](../../config/librepages-nginx-config) for sample Nginx configuration.
See [here](../../config/pages-nginx-config) for sample Nginx configuration.

View file

@ -3,11 +3,8 @@ CREATE TABLE IF NOT EXISTS librepages_sites (
repo_url VARCHAR(3000) NOT NULL,
branch TEXT NOT NULL,
hostname VARCHAR(3000) NOT NULL UNIQUE,
pub_id uuid NOT NULL UNIQUE,
ID SERIAL PRIMARY KEY NOT NULL,
deleted BOOLEAN DEFAULT FALSE,
owned_by INTEGER NOT NULL references librepages_users(ID) ON DELETE CASCADE
);
CREATE UNIQUE INDEX librepages_sites_site_secret ON librepages_sites(site_secret);
CREATE UNIQUE INDEX librepages_sites_site_pub_id ON librepages_sites(pub_id);

View file

@ -1,14 +0,0 @@
CREATE TABLE IF NOT EXISTS librepages_deploy_event_type (
name VARCHAR(30) NOT NULL UNIQUE,
ID SERIAL PRIMARY KEY NOT NULL
);
CREATE UNIQUE INDEX librepages_deploy_event_name_index ON librepages_deploy_event_type(name);
CREATE TABLE IF NOT EXISTS librepages_site_deploy_events (
site INTEGER NOT NULL references librepages_sites(ID) ON DELETE CASCADE,
event_type INTEGER NOT NULL references librepages_deploy_event_type(ID),
time timestamptz NOT NULL,
pub_id uuid NOT NULL UNIQUE,
ID SERIAL PRIMARY KEY NOT NULL
);

View file

@ -1,15 +0,0 @@
CREATE TABLE IF NOT EXISTS librepages_forgejo_webhooks (
forgejo_webhook_secret VARCHAR(40) NOT NULL UNIQUE,
forgejo_url VARCHAR(3000) NOT NULL,
auth_token VARCHAR(40) NOT NULL UNIQUE,
ID SERIAL PRIMARY KEY NOT NULL,
owned_by INTEGER NOT NULL references librepages_users(ID) ON DELETE CASCADE
);
CREATE UNIQUE INDEX librepages_forgejo_webhook_auth_token_index ON librepages_forgejo_webhooks(auth_token);
CREATE TABLE IF NOT EXISTS librepages_forgejo_webhook_site_mapping (
site_id INTEGER NOT NULL references librepages_sites(ID) ON DELETE CASCADE,
forgejo_webhook_id INTEGER NOT NULL references librepages_forgejo_webhooks(ID) ON DELETE CASCADE,
UNIQUE(site_id, forgejo_webhook_id)
);

View file

@ -33,15 +33,14 @@ FILENAME="$NAME-$2-linux-amd64"
TARBALL=$FILENAME.tar.gz
TARGET_DIR="$TMP_DIR/$FILENAME/"
mkdir -p $TARGET_DIR
DOCKER_IMG="realaravinth/librepages:$3"
DOCKER_IMG="realaravinth/pages:$3"
get_bin(){
echo "[*] Grabbing binary"
#container_id=$(docker create $DOCKER_IMG)
#docker cp $container_id:/usr/local/bin/pages $TARGET_DIR/
#docker rm -v $container_id
cp target/release/librepages $TARGET_DIR
container_id=$(docker create $DOCKER_IMG)
docker cp $container_id:/usr/local/bin/pages $TARGET_DIR/
docker rm -v $container_id
}
copy() {

View file

@ -1,23 +0,0 @@
#!/bin/bash
readonly NAME=librepages-conductor
docker rm -f $NAME
docker create --name $NAME -p 5000:5000 \
-e LPCONDUCTOR__SOURCE_CODE="https://git.batsense.net/LibrePages/conductor" \
-e LPCONDUCTOR_SERVER__PROXY_HAS_TLS=false \
-e LPCONDUCTOR_DEBUG="false" \
-e LPCONDUCTOR_CONDUCTOR="dummy" \
-e LPCONDUCTOR_SERVER_URL_PREFIX="" \
-e LPCONDUCTOR_SERVER_DOMAIN="librepages.test" \
-e LPCONDUCTOR_SERVER_IP="0.0.0.0" \
-e LPCONDUCTOR_SERVER_PROXY_HAS_TLS="false" \
-e LPCONDUCTOR_SERVER_PORT=7000 \
-e LPCONDUCTOR_SOURCE_CODE="https://example.org" \
-e LPCONDUCTOR_CREDS_USERNAME=$LPCONDUCTOR_CREDS_USERNAME \
-e LPCONDUCTOR_CREDS_PASSWORD=$LPCONDUCTOR_CREDS_PASSWORD \
-e PORT="5000"\
realaravinth/librepages-conductor conductor serve
docker start $NAME

View file

@ -15,4 +15,4 @@ else
useradd --uid $USER_ID -b /home -m -s /bin/bash $LIBREPAGES_USER
fi
su $LIBREPAGES_USER -c 'librepages'
su $LIBREPAGES_USER -c 'pages'

View file

@ -1,14 +0,0 @@
FROM realaravinth/librepages:latest as base
RUN echo foo
FROM debian:bullseye-slim
RUN apt update
RUN apt-get -y --no-install-recommends install gpg tar curl wget
WORKDIR /src
COPY --from=base /usr/local/bin/librepages .
COPY . .
ARG RELEASE_BOT_GPG_SIGNING_KEY
RUN echo -n "$RELEASE_BOT_GPG_SIGNING_KEY"
RUN echo -n "$RELEASE_BOT_GPG_SIGNING_KEY" | gpg --batch --import --pinentry-mode loopback
env GPG_PASSWORD=$GPG_PASSWORD
RUN /src/scripts/bin-publish.sh publish master latest $DUMBSERVE_PASSWORD

View file

@ -1,52 +1,33 @@
{
"db": "PostgreSQL",
"10d30dade86d79210203bdbce4b6db5d2aa446b0f88ca834771ecbbe11be51fb": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Text"
]
}
},
"query": "UPDATE librepages_sites SET deleted = true\n WHERE hostname = ($1)\n AND owned_by = ( SELECT ID FROM librepages_users WHERE name = $2);\n "
},
"12391b10cf16a807322c49c9cc7e0a015f26b445beacf4cdd4e7714f36b4adf6": {
"1ac91b492001493430c686d9cd7d6be03ada4b4c431d7bc112ef2105eba0e82d": {
"describe": {
"columns": [
{
"name": "site_secret",
"name": "repo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "repo_url",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "branch",
"ordinal": 2,
"ordinal": 1,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 3,
"ordinal": 2,
"type_info": "Varchar"
},
{
"name": "pub_id",
"ordinal": 4,
"type_info": "Uuid"
"name": "owned_by",
"ordinal": 3,
"type_info": "Int4"
}
],
"nullable": [
false,
false,
false,
false,
false
],
"parameters": {
@ -55,22 +36,7 @@
]
}
},
"query": "SELECT site_secret, repo_url, branch, hostname, pub_id\n FROM librepages_sites\n WHERE deleted = false\n AND owned_by = (SELECT ID FROM librepages_users WHERE name = $1 );\n "
},
"14cdc724af64942e93994f97e9eafc8272d15605eff7aab9e5177d01f2bf6118": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Timestamptz",
"Text",
"Uuid"
]
}
},
"query": "INSERT INTO librepages_site_deploy_events\n (event_type, time, site, pub_id) VALUES (\n (SELECT iD from librepages_deploy_event_type WHERE name = $1),\n $2,\n (SELECT ID from librepages_sites WHERE hostname = $3),\n $4\n );\n "
"query": "SELECT repo_url, branch, hostname, owned_by\n FROM librepages_sites\n WHERE site_secret = $1\n "
},
"1be33ea4fe0e6079c88768ff912b824f4b0250193f2d086046c1fd0da125ae0c": {
"describe": {
@ -111,63 +77,30 @@
},
"query": "UPDATE librepages_users set name = $1\n WHERE name = $2"
},
"39854fcbfb0247377c6c5ca70c2c0fa7804548848bf56f881eea2f8242e7a09d": {
"3705b8869aab99d749c08d9c9633931f0b74216957b6a2881bd56a33c33a8c47": {
"describe": {
"columns": [
{
"name": "name",
"name": "exists",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "time",
"ordinal": 1,
"type_info": "Timestamptz"
},
{
"name": "pub_id",
"ordinal": 2,
"type_info": "Uuid"
"type_info": "Bool"
}
],
"nullable": [
false,
false,
false
null
],
"parameters": {
"Left": [
"Text",
"Uuid"
"Text"
]
}
},
"query": "SELECT\n librepages_deploy_event_type.name,\n librepages_site_deploy_events.time,\n librepages_site_deploy_events.pub_id\n FROM\n librepages_site_deploy_events\n INNER JOIN librepages_deploy_event_type ON\n librepages_deploy_event_type.ID = librepages_site_deploy_events.event_type\n WHERE\n librepages_site_deploy_events.site = (\n SELECT ID FROM librepages_sites WHERE hostname = $1\n )\n AND\n librepages_site_deploy_events.pub_id = $2\n "
"query": "SELECT EXISTS (SELECT 1 from librepages_sites WHERE hostname = $1)"
},
"3ecc3a4c89b1289368ef9d9c97204330f74138a0da614ef2174c59a687119595": {
"416b9f0412f0d7ee05d4a350839c5a6d1e06c1d7f8942744f6d806ddc47084c2": {
"describe": {
"columns": [
{
"name": "forgejo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "auth_token",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "forgejo_webhook_secret",
"ordinal": 2,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false
],
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
@ -175,122 +108,7 @@
]
}
},
"query": "SELECT\n forgejo_url, auth_token, forgejo_webhook_secret\n FROM\n librepages_forgejo_webhooks\n WHERE\n auth_token = $1\n AND\n owned_by = (SELECT ID FROM librepages_users WHERE name = $2);\n "
},
"432fe829719ce8110f768b4a611724bb34191ac224d2143ff4c81548da75c103": {
"describe": {
"columns": [
{
"name": "repo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "branch",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 2,
"type_info": "Varchar"
},
{
"name": "owned_by",
"ordinal": 3,
"type_info": "Int4"
},
{
"name": "site_secret",
"ordinal": 4,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false,
false,
false
],
"parameters": {
"Left": [
"Uuid",
"Text"
]
}
},
"query": "SELECT repo_url, branch, hostname, owned_by, site_secret\n FROM librepages_sites\n WHERE pub_id = $1\n AND\n owned_by = (SELECT ID from librepages_users WHERE name = $2)\n AND\n deleted = false;\n "
},
"4445ff3226af3b5a24b255c5bb012c99b222cc7bd6dda80f232809ed273fc712": {
"describe": {
"columns": [
{
"name": "repo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "site_secret",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "branch",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "owned_by",
"ordinal": 4,
"type_info": "Int4"
},
{
"name": "pub_id",
"ordinal": 5,
"type_info": "Uuid"
}
],
"nullable": [
false,
false,
false,
false,
false,
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT repo_url, site_secret, branch, hostname, owned_by, pub_id\n FROM librepages_sites\n WHERE repo_url = $1\n AND deleted = false;\n "
},
"4cddf1049783251bfc79090055724e894a2be9451302f7691ce2f4240f1ee3ad": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int4"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT ID FROM librepages_sites WHERE repo_url = $1"
"query": "DELETE FROM librepages_sites\n WHERE hostname = ($1)\n AND owned_by = ( SELECT ID FROM librepages_users WHERE name = $2);\n "
},
"53f3c21c06c8d1c218537dfa9183fd0604aaf28200d8aa12e97db4ac317df39e": {
"describe": {
@ -312,50 +130,6 @@
},
"query": "SELECT name FROM librepages_users WHERE ID = $1"
},
"54f1ad328c83997d5e80686665d4cfef58d3529d24cb6caaa7ff301479e5d735": {
"describe": {
"columns": [
{
"name": "repo_url",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "branch",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 2,
"type_info": "Varchar"
},
{
"name": "owned_by",
"ordinal": 3,
"type_info": "Int4"
},
{
"name": "pub_id",
"ordinal": 4,
"type_info": "Uuid"
}
],
"nullable": [
false,
false,
false,
false,
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT repo_url, branch, hostname, owned_by, pub_id\n FROM librepages_sites\n WHERE site_secret = $1\n AND deleted = false;\n "
},
"5c5d774bde06c0ab83c3616a56a28f12dfd9c546cbaac9f246d3b350c587823e": {
"describe": {
"columns": [],
@ -368,50 +142,21 @@
},
"query": "DELETE FROM librepages_users WHERE name = ($1)"
},
"65f6181364cd8c6ed4eae3f62b5ae771a27e8da6e698c235ca77d4cec784d04b": {
"67311c6196639edd153b7b7dd56a37703b67abe750b88f5afdcf0d3d779432e7": {
"describe": {
"columns": [
{
"name": "site_secret",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "repo_url",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "branch",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "pub_id",
"ordinal": 4,
"type_info": "Uuid"
}
],
"nullable": [
false,
false,
false,
false,
false
],
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Text",
"Varchar",
"Text"
]
}
},
"query": "SELECT site_secret, repo_url, branch, hostname, pub_id\n FROM librepages_sites\n WHERE deleted = false\n AND owned_by = (SELECT ID FROM librepages_users WHERE name = $1 )\n AND hostname = $2;\n "
"query": "\n INSERT INTO librepages_sites\n (site_secret, repo_url, branch, hostname, owned_by)\n VALUES ($1, $2, $3, $4, ( SELECT ID FROM librepages_users WHERE name = $5 ));\n "
},
"6a557f851d4f47383b864085093beb0954e79779f21b655978f07e285281e0ac": {
"describe": {
@ -426,53 +171,6 @@
},
"query": "UPDATE librepages_users set email = $1\n WHERE name = $2"
},
"6db98c6ae90b8eb98ace1a5acfa3c8af2b6ed7d51c6debda12637f5d7b076c15": {
"describe": {
"columns": [
{
"name": "exists",
"ordinal": 0,
"type_info": "Bool"
}
],
"nullable": [
null
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT EXISTS (SELECT 1 from librepages_sites WHERE hostname = $1 AND deleted = false)"
},
"77612c85be99e6de2e4a6e3105ebaeb470d6cc57b2999b673a085da41c035f9e": {
"describe": {
"columns": [
{
"name": "time",
"ordinal": 0,
"type_info": "Timestamptz"
},
{
"name": "pub_id",
"ordinal": 1,
"type_info": "Uuid"
}
],
"nullable": [
false,
false
],
"parameters": {
"Left": [
"Text",
"Text"
]
}
},
"query": "SELECT\n time,\n pub_id\n FROM\n librepages_site_deploy_events\n WHERE\n site = (SELECT ID FROM librepages_sites WHERE hostname = $1)\n AND\n event_type = (SELECT ID FROM librepages_deploy_event_type WHERE name = $2)\n AND\n time = (\n SELECT MAX(time) \n FROM\n librepages_site_deploy_events\n WHERE\n site = (SELECT ID FROM librepages_sites WHERE hostname = $1)\n )\n "
},
"8735b654bc261571e6a5908d55a8217474c76bdff7f3cbcc71500a0fe13249db": {
"describe": {
"columns": [
@ -493,41 +191,6 @@
},
"query": "SELECT EXISTS (SELECT 1 from librepages_users WHERE email = $1)"
},
"8bf4e01b8c38d035fe6bdbfbe8ad9cb35e3fc2fd11107bae92880d157ed11379": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Varchar",
"Text"
]
}
},
"query": "INSERT INTO librepages_forgejo_webhooks\n (forgejo_url , auth_token, forgejo_webhook_secret, owned_by) VALUES ($1, $2, $3, \n (SELECT ID FROM librepages_users WHERE name = $4)\n )"
},
"90907d6cb4ca3b485f7b583584fb5821a950362679d061e490545c76634c211e": {
"describe": {
"columns": [
{
"name": "exists",
"ordinal": 0,
"type_info": "Bool"
}
],
"nullable": [
null
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT EXISTS (SELECT 1 from librepages_sites WHERE repo_url = $1)"
},
"924e756de5544cece865a10a7e136ecc6126e3a603947264cc7899387c18c819": {
"describe": {
"columns": [],
@ -541,26 +204,32 @@
},
"query": "UPDATE librepages_users set password = $1\n WHERE name = $2"
},
"9710a01bc4c5c5cda2db27d14baca3d7a6ceffa66c7d539da6fda7947c222e71": {
"9fd163d10860ad4519f9398582aaa0615d6d7b784e844ee71038f77dcd069eed": {
"describe": {
"columns": [
{
"name": "forgejo_url",
"name": "site_secret",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "auth_token",
"name": "repo_url",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "forgejo_webhook_secret",
"name": "branch",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 3,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false,
false
@ -571,39 +240,46 @@
]
}
},
"query": "SELECT\n forgejo_url, auth_token, forgejo_webhook_secret\n FROM\n librepages_forgejo_webhooks\n WHERE\n owned_by = (SELECT ID FROM librepages_users WHERE name = $1);\n "
"query": "SELECT site_secret, repo_url, branch, hostname\n FROM librepages_sites\n WHERE owned_by = (SELECT ID FROM librepages_users WHERE name = $1 );\n "
},
"a6284ede1dbf340942dd97afb75865ba0a41009a145254117b03002bd9afa588": {
"aad26d1f932001cbe49b147348aa528eca5101ec6ef83cb034e1ccd0dbd17878": {
"describe": {
"columns": [
{
"name": "forgejo_url",
"name": "site_secret",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "auth_token",
"name": "repo_url",
"ordinal": 1,
"type_info": "Varchar"
},
{
"name": "forgejo_webhook_secret",
"name": "branch",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "hostname",
"ordinal": 3,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false,
false
],
"parameters": {
"Left": [
"Text",
"Text"
]
}
},
"query": "SELECT forgejo_url, auth_token, forgejo_webhook_secret\n FROM librepages_forgejo_webhooks\n WHERE auth_token = $1\n "
"query": "SELECT site_secret, repo_url, branch, hostname\n FROM librepages_sites\n WHERE owned_by = (SELECT ID FROM librepages_users WHERE name = $1 )\n AND hostname = $2;\n "
},
"b48c77db6e663d97df44bf9ec2ee92fd3e02f2dcbcdbd1d491e09fab2da68494": {
"describe": {
@ -631,37 +307,6 @@
},
"query": "SELECT name, password FROM librepages_users WHERE email = ($1)"
},
"b7e51e976a4a80a78df8dbfed1f195af212023d00faee88ab2d09326896bd653": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Int4",
"Text"
]
}
},
"query": "INSERT INTO librepages_forgejo_webhook_site_mapping\n (site_id, forgejo_webhook_id) VALUES (\n (SELECT ID FROM librepages_sites WHERE repo_url = $1 AND ID = $2),\n (SELECT ID FROM librepages_forgejo_webhooks WHERE auth_token = $3)\n ) ON CONFLICT (site_id, forgejo_webhook_id) DO NOTHING;"
},
"b8b1b3c5fa205b071f577b2ce9993ddfc7c99ada26aea48aa1c201c8c3c7fcf6": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Text",
"Varchar",
"Uuid",
"Text"
]
}
},
"query": "\n INSERT INTO librepages_sites\n (site_secret, repo_url, branch, hostname, pub_id, owned_by)\n VALUES ($1, $2, $3, $4, $5, ( SELECT ID FROM librepages_users WHERE name = $6 ));\n "
},
"bdd4d2a1b0b97ebf8ed61cfd120b40146fbf3ea9afb5cd0e03c9d29860b6a26b": {
"describe": {
"columns": [
@ -702,70 +347,6 @@
},
"query": "SELECT email FROM librepages_users WHERE name = $1"
},
"d2327c1bcb40e18518c2112413a19a9b26eb0f54f83c53e968c9752d70c8dd4e": {
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "time",
"ordinal": 1,
"type_info": "Timestamptz"
},
{
"name": "pub_id",
"ordinal": 2,
"type_info": "Uuid"
}
],
"nullable": [
false,
false,
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT\n librepages_deploy_event_type.name,\n librepages_site_deploy_events.time,\n librepages_site_deploy_events.pub_id\n FROM\n librepages_site_deploy_events\n INNER JOIN librepages_deploy_event_type ON\n librepages_deploy_event_type.ID = librepages_site_deploy_events.event_type\n WHERE\n librepages_site_deploy_events.site = (\n SELECT ID FROM librepages_sites WHERE hostname = $1\n );\n "
},
"e4adf1bc9175eeb9d61b495653bb452039cc38818c8792acdc6a1c732b6f4554": {
"describe": {
"columns": [
{
"name": "exists",
"ordinal": 0,
"type_info": "Bool"
}
],
"nullable": [
null
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT EXISTS (SELECT 1 from librepages_deploy_event_type WHERE name = $1)"
},
"f651da8f411b7977cb87dd8d4bd5d167661d7ef1d865747e76219453d386d593": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Varchar"
]
}
},
"query": "INSERT INTO librepages_deploy_event_type\n (name) VALUES ($1) ON CONFLICT (name) DO NOTHING;"
},
"faa4170a309f19a4abf1ca3f8dd3c0526945aa00f028ebf8bd7063825d448f5b": {
"describe": {
"columns": [],

View file

@ -1,282 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_identity::Identity;
use actix_web::{web, HttpRequest, HttpResponse, Responder};
use serde::{Deserialize, Serialize};
use tracing::info;
use url::Url;
use super::get_auth_middleware;
use crate::{errors::*, AppCtx};
pub mod routes {
use crate::ctx::Ctx;
pub struct Forgejo {
pub add_webhook: &'static str,
pub view_webhook: &'static str,
pub list_webhooks: &'static str,
pub webhook: &'static str,
}
impl Forgejo {
pub const fn new() -> Self {
Self {
add_webhook: "/api/v1/forgejo/webhook/add",
list_webhooks: "/api/v1/forgejo/webhook/add",
view_webhook: "/api/v1/forgejo/webhook/view/{auth_token}",
webhook: "/api/v1/forgejo/webhook/event/new",
}
}
pub fn get_view(&self, auth_token: &str) -> String {
self.view_webhook.replace("{auth_token}", auth_token)
}
pub fn get_webhook_url(&self, ctx: &Ctx, auth_token: &str) -> String {
format!(
"https://{}{}?auth={auth_token}",
&ctx.settings.server.domain, self.webhook
)
}
}
}
#[derive(Serialize, Deserialize)]
pub struct AddWebhook {
pub forgejo_url: Url,
}
#[actix_web_codegen_const_routes::post(
path = "crate::V1_API_ROUTES.forgejo.add_webhook",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Add webhook" skip(id, ctx, payload))]
async fn add_webhook(
ctx: AppCtx,
id: Identity,
payload: web::Json<AddWebhook>,
) -> ServiceResult<impl Responder> {
info!(
"Adding webhook for Forgejo instance: {}",
payload.forgejo_url.as_str()
);
let owner = id.identity().unwrap();
let payload = payload.into_inner();
let hook = ctx.db.new_webhook(payload.forgejo_url, &owner).await?;
Ok(HttpResponse::Ok().json(hook))
}
#[actix_web_codegen_const_routes::get(
path = "crate::V1_API_ROUTES.forgejo.list_webhooks",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Delete webhook" skip(id, ctx))]
async fn list_webhooks(ctx: AppCtx, id: Identity) -> ServiceResult<impl Responder> {
let owner = id.identity().unwrap();
info!("Getting all webhooks created by {}", owner);
let hooks = ctx.db.list_all_webhooks_with_owner(&owner).await?;
Ok(HttpResponse::Ok().json(hooks))
}
#[actix_web_codegen_const_routes::get(
path = "crate::V1_API_ROUTES.forgejo.view_webhook",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Delete webhook" skip(id, ctx, path))]
async fn view_webhook(
ctx: AppCtx,
id: Identity,
path: web::Path<String>,
) -> ServiceResult<impl Responder> {
let path = path.into_inner();
let owner = id.identity().unwrap();
info!("Gitting webhook webhook for Forgejo instance: {}", path,);
let hook = ctx.db.get_webhook_with_owner(&path, &owner).await?;
Ok(HttpResponse::Ok().json(hook))
}
#[derive(Serialize, Deserialize)]
struct Auth {
auth: String,
}
#[actix_web_codegen_const_routes::post(path = "crate::V1_API_ROUTES.forgejo.webhook")]
#[tracing::instrument(name = "Update ", skip(body, ctx, req, q))]
async fn webhook(
ctx: AppCtx,
body: web::Bytes,
req: HttpRequest,
q: web::Query<Auth>,
) -> ServiceResult<impl Responder> {
ctx.process_webhook(&body, &req, &q.auth).await?;
Ok(HttpResponse::Ok())
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(add_webhook);
cfg.service(view_webhook);
cfg.service(list_webhooks);
cfg.service(webhook);
}
#[cfg(test)]
mod tests {
use actix_web::{error::ResponseError, http::StatusCode, test};
use hmac::Mac;
use crate::ctx::api::v1::forgejo::{HmacSha256, WebhookPayload};
use crate::db::ForgejoWebhook;
use crate::tests;
use crate::*;
use super::*;
#[actix_rt::test]
async fn test_api_forgejo_webhook() {
const NAME: &str = "apiforgejowebhookuser";
const PASSWORD: &str = "longpasswordasdfa2";
const EMAIL: &str = "apiforgejowebhookuser@a.com";
let (_dir, ctx) = tests::get_ctx().await;
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let page = ctx.add_test_site(NAME.into()).await;
let cookies = get_cookie!(signin_resp);
let app = get_app!(ctx).await;
let payload = AddWebhook {
forgejo_url: Url::parse("https://git.batnsense.net").unwrap(),
};
let add_webhook_resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.forgejo.add_webhook)
.cookie(cookies.clone())
.to_request(),
)
.await;
check_status!(add_webhook_resp, StatusCode::OK);
let response: ForgejoWebhook = actix_web::test::read_body_json(add_webhook_resp).await;
assert_eq!(response.forgejo_url, payload.forgejo_url);
let view_webhook_resp = get_request!(
&app,
&V1_API_ROUTES.forgejo.get_view(&response.auth_token),
cookies.clone()
);
check_status!(view_webhook_resp, StatusCode::OK);
let hook: ForgejoWebhook = actix_web::test::read_body_json(view_webhook_resp).await;
assert_eq!(hook, response);
let list_all_webhooks_resp =
get_request!(&app, V1_API_ROUTES.forgejo.list_webhooks, cookies.clone());
check_status!(list_all_webhooks_resp, StatusCode::OK);
let hooks: Vec<ForgejoWebhook> =
actix_web::test::read_body_json(list_all_webhooks_resp).await;
assert_eq!(vec![hook.clone()], hooks);
let webhook_url = format!("{}?auth={}", V1_API_ROUTES.forgejo.webhook, hook.auth_token);
// test webhook
let mut webhook_payload = WebhookPayload::default();
webhook_payload.reference = format!("refs/origin/{}", page.branch);
webhook_payload.repository.html_url = page.repo;
let body = serde_json::to_string(&webhook_payload).unwrap();
let body = body.as_bytes();
let mut mac = HmacSha256::new_from_slice(hook.forgejo_webhook_secret.as_bytes())
.expect("HMAC can take key of any size");
mac.update(body);
let res = mac.finalize();
let sig = res.into_bytes();
let sig = hex::encode(&sig[..]);
let post_to_webhook_resp = test::call_service(
&app,
post_request!(&webhook_payload, &webhook_url)
.insert_header(("X-Gitea-Delivery", "foobar213randomuuid"))
.insert_header(("X-Gitea-Signature", sig.clone()))
.insert_header(("X-Gitea-Event", "push"))
.cookie(cookies.clone())
.to_request(),
)
.await;
check_status!(post_to_webhook_resp, StatusCode::OK);
// no webhook
let fake_webhook_url = format!(
"{}?auth={}",
V1_API_ROUTES.forgejo.webhook, hook.forgejo_webhook_secret
);
let body = serde_json::to_string(&webhook_payload).unwrap();
let body = body.as_bytes();
let mut mac =
HmacSha256::new_from_slice(b"nosecret").expect("HMAC can take key of any size");
mac.update(body);
let res = mac.finalize();
let fake_sig = res.into_bytes();
let fake_sig = hex::encode(&fake_sig[..]);
let post_to_no_exist_webhook_resp = test::call_service(
&app,
post_request!(&webhook_payload, &fake_webhook_url)
.insert_header(("X-Gitea-Delivery", "foobar213randomuuid"))
.insert_header(("X-Gitea-Signature", fake_sig))
.insert_header(("X-Gitea-Event", "push"))
.cookie(cookies.clone())
.to_request(),
)
.await;
let err = ServiceError::WebhookNotFound;
assert_eq!(post_to_no_exist_webhook_resp.status(), err.status_code());
let resp_err: ErrorToResponse =
actix_web::test::read_body_json(post_to_no_exist_webhook_resp).await;
assert_eq!(resp_err.error, err.to_string());
// no website
let mut webhook_payload = WebhookPayload::default();
webhook_payload.reference = format!("refs/origin/{}", page.branch);
webhook_payload.repository.html_url = "https://no-exist-git.example.org".into();
let body = serde_json::to_string(&webhook_payload).unwrap();
let body = body.as_bytes();
let mut mac = HmacSha256::new_from_slice(hook.forgejo_webhook_secret.as_bytes())
.expect("HMAC can take key of any size");
mac.update(body);
let res = mac.finalize();
let sig = res.into_bytes();
let sig = hex::encode(&sig[..]);
let post_to_no_website_webhook_resp = test::call_service(
&app,
post_request!(&webhook_payload, &webhook_url)
.insert_header(("X-Gitea-Delivery", "foobar213randomuuid"))
.insert_header(("X-Gitea-Signature", sig.clone()))
.insert_header(("X-Gitea-Event", "push"))
.cookie(cookies.clone())
.to_request(),
)
.await;
let err = ServiceError::WebsiteNotFound;
assert_eq!(post_to_no_website_webhook_resp.status(), err.status_code());
let resp_err: ErrorToResponse =
actix_web::test::read_body_json(post_to_no_website_webhook_resp).await;
assert_eq!(resp_err.error, err.to_string());
}
}

View file

@ -20,9 +20,6 @@ use serde::Deserialize;
pub mod account;
pub mod auth;
pub mod forgejo;
pub mod meta;
pub mod pages;
pub mod routes;
pub use routes::ROUTES;
@ -30,9 +27,8 @@ pub use routes::ROUTES;
pub fn services(cfg: &mut ServiceConfig) {
auth::services(cfg);
account::services(cfg);
meta::services(cfg);
forgejo::services(cfg);
pages::services(cfg);
crate::meta::services(cfg);
crate::deploy::services(cfg);
}
#[derive(Deserialize)]

View file

@ -14,180 +14,4 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::{web, HttpResponse, Responder};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::errors::*;
use crate::page::Page;
use crate::AppCtx;
pub mod routes {
pub struct Deploy {
pub update: &'static str,
pub info: &'static str,
}
impl Deploy {
pub const fn new() -> Self {
Self {
update: "/api/v1/update",
info: "/api/v1/info",
}
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct DeployEvent {
pub secret: String,
pub branch: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct DeployEventResp {
pub id: Uuid,
}
#[actix_web_codegen_const_routes::post(path = "crate::V1_API_ROUTES.deploy.update")]
#[tracing::instrument(name = "Update webpages", skip(payload, ctx))]
async fn update(payload: web::Json<DeployEvent>, ctx: AppCtx) -> ServiceResult<impl Responder> {
let payload = payload.into_inner();
let id = ctx
.update_site(&payload.secret, Some(payload.branch))
.await?;
Ok(HttpResponse::Ok().json(DeployEventResp { id }))
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct DeploySecret {
pub secret: String,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct DeployInfo {
pub head: String,
pub remote: String,
pub commit: String,
}
impl DeployInfo {
pub fn from_page(page: &Page) -> ServiceResult<Self> {
let repo = page.open_repo()?;
let head = page.get_deploy_branch(&repo)?;
let commit = Page::get_deploy_commit(&repo)?.to_string();
let remote = Page::get_deploy_remote(&repo)?;
let remote = remote.url().unwrap().to_owned();
Ok(Self {
head,
remote,
commit,
})
}
}
#[actix_web_codegen_const_routes::post(path = "crate::V1_API_ROUTES.deploy.info")]
#[tracing::instrument(name = "Get webpage deploy info", skip(payload, ctx))]
async fn deploy_info(
payload: web::Json<DeploySecret>,
ctx: AppCtx,
) -> ServiceResult<impl Responder> {
if let Ok(page) = ctx.db.get_site_from_secret(&payload.secret).await {
let resp = DeployInfo::from_page(&Page::from_site(&ctx.settings, page))?;
Ok(HttpResponse::Ok().json(resp))
} else {
Err(ServiceError::WebsiteNotFound)
}
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(update);
cfg.service(deploy_info);
}
#[cfg(test)]
mod tests {
use actix_web::{http::StatusCode, test};
use crate::tests;
use crate::*;
use super::*;
#[actix_rt::test]
async fn deploy_update_works() {
const NAME: &str = "dplyupdwrkuser";
const PASSWORD: &str = "longpasswordasdfa2";
const EMAIL: &str = "dplyupdwrkuser@a.com";
let (_dir, ctx) = tests::get_ctx().await;
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, _signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let page = ctx.add_test_site(NAME.into()).await;
let app = get_app!(ctx).await;
let mut payload = DeployEvent {
secret: page.secret.clone(),
branch: page.branch.clone(),
};
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.update).to_request(),
)
.await;
check_status!(resp, StatusCode::OK);
let event_id: DeployEventResp = actix_web::test::read_body_json(resp).await;
let update_event = ctx.db.get_event(&page.domain, &event_id.id).await.unwrap();
assert_eq!(&update_event.site, &page.domain);
assert_eq!(update_event.id, event_id.id);
payload.secret = page.branch.clone();
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.update).to_request(),
)
.await;
check_status!(resp, StatusCode::NOT_FOUND);
}
#[actix_rt::test]
async fn deploy_info_works() {
const NAME: &str = "dplyinfwrkuser";
const PASSWORD: &str = "longpasswordasdfa2";
const EMAIL: &str = "dplyinfwrkuser@a.com";
let (_dir, ctx) = tests::get_ctx().await;
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, _signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let page = ctx.add_test_site(NAME.into()).await;
let app = get_app!(ctx).await;
let mut payload = DeploySecret {
secret: page.secret.clone(),
};
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.info).to_request(),
)
.await;
check_status!(resp, StatusCode::OK);
let response: DeployInfo = actix_web::test::read_body_json(resp).await;
assert_eq!(response.head, page.branch);
assert_eq!(response.remote, page.repo);
payload.secret = page.branch.clone();
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.info).to_request(),
)
.await;
check_status!(resp, StatusCode::NOT_FOUND);
}
}

View file

@ -17,12 +17,10 @@
//! V1 API Routes
use actix_auth_middleware::GetLoginRoute;
use crate::deploy::routes::Deploy;
use crate::meta::routes::Meta;
use crate::serve::routes::Serve;
use super::forgejo::routes::Forgejo;
use super::meta::routes::Meta;
use super::pages::routes::Deploy;
/// constant [Routes](Routes) instance
pub const ROUTES: Routes = Routes::new();
@ -93,7 +91,6 @@ pub struct Routes {
pub account: Account,
/// Meta routes
pub meta: Meta,
pub forgejo: Forgejo,
pub deploy: Deploy,
pub serve: Serve,
}
@ -105,7 +102,6 @@ impl Routes {
auth: Auth::new(),
account: Account::new(),
meta: Meta::new(),
forgejo: Forgejo::new(),
deploy: Deploy::new(),
serve: Serve::new(),
}

View file

@ -1,96 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use reqwest::Client;
use libconductor::EventType;
use libconfig::Config;
use tracing::info;
use crate::errors::ServiceResult;
use crate::{page::Page, settings::Settings};
#[derive(Clone)]
pub struct Conductor {
client: Client,
pub settings: Settings,
}
impl Conductor {
pub fn new(settings: Settings, client: Option<Client>) -> Self {
let client = if let Some(client) = client {
client
} else {
Client::new()
};
Self { client, settings }
}
async fn tx(&self, e: &EventType) -> ServiceResult<()> {
for c in self.settings.conductors.iter() {
info!("Tx event to {}", c.url);
let mut event_url = c.url.clone();
event_url.set_path("/api/v1/events/new");
self.client
.post(event_url)
.basic_auth(&c.username, Some(&c.api_key))
.json(e)
.send()
.await
.unwrap();
}
Ok(())
}
pub async fn new_site(&self, page: Page) -> ServiceResult<()> {
let msg = EventType::NewSite {
hostname: page.domain,
branch: page.branch,
path: page.path,
};
self.tx(&msg).await
}
pub async fn tx_config(&self, config: Config) -> ServiceResult<()> {
self.tx(&EventType::Config { data: config }).await
}
pub async fn delete_site(&self, hostname: String) -> ServiceResult<()> {
self.tx(&EventType::DeleteSite { hostname }).await
}
}
#[cfg(test)]
mod tests {
use super::*;
use uuid::Uuid;
#[actix_rt::test]
pub async fn test_conductor() {
let settings = Settings::new().unwrap();
let c = Conductor::new(settings.clone(), None);
c.delete_site("example.org".into()).await.unwrap();
let page = Page {
secret: "foo".into(),
repo: "foo".into(),
path: "foo".into(),
branch: "foo".into(),
domain: "foo".into(),
pub_id: Uuid::new_v4(),
};
c.new_site(page).await.unwrap();
}
}

View file

@ -1,204 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::web;
use actix_web::HttpRequest;
use hmac::{Hmac, Mac};
use serde::{Deserialize, Serialize};
use sha2::Sha256;
use tracing::{info, warn};
use url::Url;
use crate::ctx::Ctx;
use crate::errors::ServiceError;
use crate::errors::ServiceResult;
pub type HmacSha256 = Hmac<Sha256>;
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct CommitPerson {
pub name: String,
pub email: String,
pub username: String,
}
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
pub struct Commit {
pub id: String,
pub message: String,
pub url: String,
pub author: CommitPerson,
pub committer: CommitPerson,
pub verification: serde_json::Value,
pub timestamp: String,
pub added: serde_json::Value,
pub removed: serde_json::Value,
pub modified: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct Person {
pub id: usize,
pub login: String,
pub full_name: String,
pub email: String,
pub avatar_url: String,
pub language: String,
pub is_admin: bool,
pub last_login: String,
pub created: String,
pub restricted: bool,
pub active: bool,
pub prohibit_login: bool,
pub location: String,
pub website: String,
pub description: String,
pub visibility: String,
pub followers_count: usize,
pub following_count: usize,
pub starred_repos_count: usize,
pub username: String,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct Permissions {
pub admin: bool,
pub push: bool,
pub pull: bool,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct InternalTracker {
pub enable_time_tracker: bool,
pub allow_only_contributors_to_track_time: bool,
pub enable_issue_dependencies: bool,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct Repository {
pub id: usize,
pub owner: Person,
pub name: String,
pub full_name: String,
pub description: String,
pub empty: bool,
pub private: bool,
pub fork: bool,
pub template: bool,
pub parent: Option<serde_json::Value>,
pub mirror: bool,
pub size: usize,
pub html_url: String,
pub ssh_url: String,
pub clone_url: String,
pub original_url: String,
pub website: String,
pub stars_count: usize,
pub forks_count: usize,
pub watchers_count: usize,
pub open_issues_count: usize,
pub open_pr_counter: usize,
pub release_counter: usize,
pub default_branch: String,
pub archived: bool,
pub created_at: String,
pub updated_at: String,
pub permissions: Permissions,
pub has_issues: bool,
pub internal_tracker: InternalTracker,
pub has_wiki: bool,
pub has_pull_requests: bool,
pub has_projects: bool,
pub ignore_whitespace_conflicts: bool,
pub allow_merge_commits: bool,
pub allow_rebase: bool,
pub allow_rebase_explicit: bool,
pub allow_squash_merge: bool,
pub default_merge_style: String,
pub avatar_url: String,
pub internal: bool,
pub mirror_interval: String,
pub mirror_updated: String,
pub repo_transfer: Option<serde_json::Value>,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct WebhookPayload {
#[serde(rename(serialize = "ref", deserialize = "ref"))]
pub reference: String,
pub before: String,
pub after: String,
pub compare_url: String,
pub repository: Repository,
pub pusher: Person,
pub sender: Person,
}
impl Ctx {
pub async fn process_webhook(
&self,
body: &web::Bytes,
req: &HttpRequest,
auth_token: &str,
) -> ServiceResult<()> {
let headers = req.headers();
let _uuid = headers.get("X-Gitea-Delivery").unwrap();
let sig = headers.get("X-Gitea-Signature").unwrap();
let sig = hex::decode(sig).unwrap();
let event_type = headers.get("X-Gitea-Event").unwrap();
let payload: WebhookPayload = serde_json::from_slice(body).unwrap();
let hook = self.db.get_webhook(auth_token).await?;
for url in [
&payload.repository.html_url,
&payload.repository.ssh_url,
&payload.repository.clone_url,
] {
if self.db.site_with_repository_exists(url).await? {
let mut mac = HmacSha256::new_from_slice(hook.forgejo_webhook_secret.as_bytes())?;
mac.update(body);
mac.verify_slice(&sig[..])?;
let site = self.db.get_site_from_repo_url(url).await?;
self.db
.webhook_link_site(auth_token, &Url::parse(&site.repo_url)?)
.await?;
if payload.reference.contains(&site.branch) {
info!(
"[webhook][forgejo/gitea] received update {:?} from {url} repository on deployed branch",
event_type
);
self.update_site(&site.site_secret, Some(site.branch))
.await?;
} else {
info!(
"[webhook][forgejo/gitea] received update {:?} from {url} repository on non-deployed branch {}",
event_type,
payload.reference
);
}
return Ok(());
}
}
warn!(
"[webhook][forgejo/gitea] stray update from {} repository",
payload.repository.html_url
);
Err(ServiceError::WebsiteNotFound)
}
}

View file

@ -16,7 +16,6 @@
*/
pub mod account;
pub mod auth;
pub mod forgejo;
pub mod pages;
#[cfg(test)]

View file

@ -16,20 +16,15 @@
*/
use actix_web::web;
use serde::{Deserialize, Serialize};
use tokio::fs;
use tokio::sync::oneshot;
use uuid::Uuid;
use crate::ctx::Ctx;
use crate::db;
use crate::db::Site;
use crate::errors::*;
use crate::page::Page;
use crate::page_config;
use crate::settings::Settings;
use crate::subdomains::get_random_subdomain;
use crate::utils::get_random;
use crate::utils::get_website_path;
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
/// Data required to add site
@ -43,14 +38,12 @@ impl AddSite {
fn to_site(self, s: &Settings) -> Site {
let site_secret = get_random(32);
let hostname = get_random_subdomain(s);
let pub_id = Uuid::new_v4();
Site {
site_secret,
repo_url: self.repo_url,
branch: self.branch,
hostname,
owner: self.owner,
pub_id,
}
}
}
@ -61,60 +54,24 @@ impl Ctx {
self.db.add_site(&db_site).await?;
let page = Page::from_site(&self.settings, db_site);
page.update(&page.branch)?;
self.db
.log_event(&page.domain, &db::EVENT_TYPE_CREATE)
.await?;
self.conductor.new_site(page.clone()).await?;
if let Some(config) = page_config::load(&page.path, &page.branch) {
self.conductor.tx_config(config).await?;
unimplemented!("Parse and store custom domains in DB");
}
Ok(page)
}
pub async fn update_site(&self, secret: &str, branch: Option<String>) -> ServiceResult<Uuid> {
pub async fn update_site(&self, secret: &str, branch: Option<String>) -> ServiceResult<()> {
if let Ok(db_site) = self.db.get_site_from_secret(secret).await {
let page = Page::from_site(&self.settings, db_site);
let (tx, rx) = oneshot::channel();
{
let page = page.clone();
web::block(move || {
if let Some(branch) = branch {
tx.send(page.update(&branch)).unwrap();
} else {
tx.send(page.update(&page.branch)).unwrap();
}
})
.await
.unwrap();
}
let page = page.clone();
web::block(move || {
if let Some(branch) = branch {
tx.send(page.update(&branch)).unwrap();
} else {
tx.send(page.update(&page.branch)).unwrap();
}
})
.await
.unwrap();
rx.await.unwrap()?;
if let Some(config) = page_config::load(&page.path, &page.branch) {
self.conductor.tx_config(config).await?;
unimplemented!("Parse and store custom domains in DB");
}
self.db
.log_event(&page.domain, &db::EVENT_TYPE_UPDATE)
.await
} else {
Err(ServiceError::WebsiteNotFound)
}
}
pub async fn delete_site(&self, owner: String, site_id: Uuid) -> ServiceResult<()> {
if let Ok(db_site) = self.db.get_site_from_pub_id(site_id, owner).await {
let path = get_website_path(&self.settings, &db_site.hostname);
self.db
.log_event(&db_site.hostname, &db::EVENT_TYPE_DELETE)
.await?;
fs::remove_dir_all(&path).await?;
self.db
.delete_site(&db_site.owner, &db_site.hostname)
.await?;
self.conductor.delete_site(db_site.hostname).await?;
Ok(())
} else {
Err(ServiceError::WebsiteNotFound)

View file

@ -20,23 +20,18 @@ use std::thread;
use crate::db::*;
use crate::settings::Settings;
use argon2_creds::{Config as ArgonConfig, ConfigBuilder as ArgonConfigBuilder, PasswordPolicy};
use reqwest::Client;
use tracing::info;
pub mod api;
use crate::conductor::Conductor;
pub type ArcCtx = Arc<Ctx>;
#[derive(Clone)]
pub struct Ctx {
pub settings: Settings,
pub db: Database,
pub conductor: Conductor,
/// credential-procession policy
pub creds: ArgonConfig,
client: Client,
}
impl Ctx {
@ -54,8 +49,6 @@ impl Ctx {
pub async fn new(settings: Settings) -> Arc<Self> {
let creds = Self::get_creds();
let c = creds.clone();
let client = Client::default();
let conductor = Conductor::new(settings.clone(), Some(client.clone()));
#[allow(unused_variables)]
let init = thread::spawn(move || {
@ -70,10 +63,8 @@ impl Ctx {
Arc::new(Self {
settings,
client,
db,
creds,
conductor,
})
}
}

628
src/db.rs
View file

@ -16,18 +16,16 @@
*/
use std::str::FromStr;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPoolOptions;
use sqlx::types::time::OffsetDateTime;
//use sqlx::types::Json;
use sqlx::ConnectOptions;
use sqlx::PgPool;
use tracing::error;
use url::Url;
use uuid::Uuid;
use url::quirks::hostname;
use crate::errors::*;
use crate::utils;
/// Connect to databse
pub enum ConnectionOptions {
@ -50,7 +48,6 @@ impl ConnectionOptions {
async fn connect(self) -> ServiceResult<Database> {
let pool = match self {
Self::Fresh(fresh) => {
tracing::info!("DATABASE URL: {}", fresh.url);
let mut connect_options =
sqlx::postgres::PgConnectOptions::from_str(&fresh.url).unwrap();
if fresh.disable_logging {
@ -85,7 +82,6 @@ impl Database {
.await
.unwrap();
//.map_err(|e| ServiceError::ServiceError(Box::new(e)))?;
self.create_event_type().await?;
Ok(())
}
@ -259,14 +255,13 @@ impl Database {
sqlx::query!(
"
INSERT INTO librepages_sites
(site_secret, repo_url, branch, hostname, pub_id, owned_by)
VALUES ($1, $2, $3, $4, $5, ( SELECT ID FROM librepages_users WHERE name = $6 ));
(site_secret, repo_url, branch, hostname, owned_by)
VALUES ($1, $2, $3, $4, ( SELECT ID FROM librepages_users WHERE name = $5 ));
",
msg.site_secret,
msg.repo_url,
msg.branch,
msg.hostname,
msg.pub_id,
msg.owner,
)
.execute(&self.pool)
@ -276,106 +271,19 @@ impl Database {
Ok(())
}
pub async fn get_site_from_repo_url(&self, repo_url: &str) -> ServiceResult<Site> {
struct S {
repo_url: String,
branch: String,
hostname: String,
owned_by: i32,
site_secret: String,
pub_id: Uuid,
}
let site = sqlx::query_as!(
S,
"SELECT repo_url, site_secret, branch, hostname, owned_by, pub_id
FROM librepages_sites
WHERE repo_url = $1
AND deleted = false;
",
repo_url,
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebsiteNotFound))?;
struct Owner {
name: String,
}
let owner = sqlx::query_as!(
Owner,
"SELECT name FROM librepages_users WHERE ID = $1",
site.owned_by
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebsiteNotFound))?;
let site = Site {
site_secret: site.site_secret,
branch: site.branch,
hostname: site.hostname,
owner: owner.name,
repo_url: site.repo_url,
pub_id: site.pub_id,
};
Ok(site)
}
pub async fn get_site_from_pub_id(&self, pub_id: Uuid, owner: String) -> ServiceResult<Site> {
struct S {
repo_url: String,
branch: String,
hostname: String,
owned_by: i32,
site_secret: String,
}
let site = sqlx::query_as!(
S,
"SELECT repo_url, branch, hostname, owned_by, site_secret
FROM librepages_sites
WHERE pub_id = $1
AND
owned_by = (SELECT ID from librepages_users WHERE name = $2)
AND
deleted = false;
",
&pub_id,
&owner,
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebsiteNotFound))?;
let site = Site {
site_secret: site.site_secret,
branch: site.branch,
hostname: site.hostname,
owner,
repo_url: site.repo_url,
pub_id,
};
Ok(site)
}
pub async fn get_site_from_secret(&self, site_secret: &str) -> ServiceResult<Site> {
struct S {
repo_url: String,
branch: String,
hostname: String,
owned_by: i32,
pub_id: Uuid,
}
let site = sqlx::query_as!(
S,
"SELECT repo_url, branch, hostname, owned_by, pub_id
"SELECT repo_url, branch, hostname, owned_by
FROM librepages_sites
WHERE site_secret = $1
AND deleted = false;
",
site_secret,
)
@ -401,7 +309,6 @@ impl Database {
hostname: site.hostname,
owner: owner.name,
repo_url: site.repo_url,
pub_id: site.pub_id,
};
Ok(site)
@ -410,10 +317,9 @@ impl Database {
pub async fn get_site(&self, owner: &str, hostname: &str) -> ServiceResult<Site> {
let site = sqlx::query_as!(
InnerSite,
"SELECT site_secret, repo_url, branch, hostname, pub_id
"SELECT site_secret, repo_url, branch, hostname
FROM librepages_sites
WHERE deleted = false
AND owned_by = (SELECT ID FROM librepages_users WHERE name = $1 )
WHERE owned_by = (SELECT ID FROM librepages_users WHERE name = $1 )
AND hostname = $2;
",
owner,
@ -431,10 +337,9 @@ impl Database {
pub async fn list_all_sites(&self, owner: &str) -> ServiceResult<Vec<Site>> {
let mut sites = sqlx::query_as!(
InnerSite,
"SELECT site_secret, repo_url, branch, hostname, pub_id
"SELECT site_secret, repo_url, branch, hostname
FROM librepages_sites
WHERE deleted = false
AND owned_by = (SELECT ID FROM librepages_users WHERE name = $1 );
WHERE owned_by = (SELECT ID FROM librepages_users WHERE name = $1 );
",
owner,
)
@ -449,7 +354,7 @@ impl Database {
pub async fn delete_site(&self, owner: &str, hostname: &str) -> ServiceResult<()> {
sqlx::query!(
"UPDATE librepages_sites SET deleted = true
"DELETE FROM librepages_sites
WHERE hostname = ($1)
AND owned_by = ( SELECT ID FROM librepages_users WHERE name = $2);
",
@ -465,7 +370,7 @@ impl Database {
/// check if hostname exists
pub async fn hostname_exists(&self, hostname: &str) -> ServiceResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (SELECT 1 from librepages_sites WHERE hostname = $1 AND deleted = false)",
"SELECT EXISTS (SELECT 1 from librepages_sites WHERE hostname = $1)",
hostname,
)
.fetch_one(&self.pool)
@ -479,350 +384,12 @@ impl Database {
Ok(resp)
}
/// check if site with repository exists
pub async fn site_with_repository_exists(&self, url: &str) -> ServiceResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (SELECT 1 from librepages_sites WHERE repo_url = $1)",
url,
)
.fetch_one(&self.pool)
.await
.map_err(map_register_err)?;
let mut resp = false;
if let Some(x) = res.exists {
resp = x;
}
Ok(resp)
}
/// check if event type exists
async fn event_type_exists(&self, event: &Event) -> ServiceResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (SELECT 1 from librepages_deploy_event_type WHERE name = $1)",
event.name,
)
.fetch_one(&self.pool)
.await
.map_err(map_register_err)?;
let mut resp = false;
if let Some(x) = res.exists {
resp = x;
}
Ok(resp)
}
async fn create_event_type(&self) -> ServiceResult<()> {
for e in &*EVENTS {
if !self.event_type_exists(e).await? {
sqlx::query!(
"INSERT INTO librepages_deploy_event_type
(name) VALUES ($1) ON CONFLICT (name) DO NOTHING;",
e.name
)
.execute(&self.pool)
.await
.map_err(map_register_err)?;
}
}
Ok(())
}
pub async fn log_event(&self, hostname: &str, event: &Event) -> ServiceResult<Uuid> {
let now = now_unix_time_stamp();
let uuid = Uuid::new_v4();
sqlx::query!(
"INSERT INTO librepages_site_deploy_events
(event_type, time, site, pub_id) VALUES (
(SELECT iD from librepages_deploy_event_type WHERE name = $1),
$2,
(SELECT ID from librepages_sites WHERE hostname = $3),
$4
);
",
event.name,
&now,
hostname,
uuid,
)
.execute(&self.pool)
.await
.map_err(map_register_err)?;
Ok(uuid)
}
pub async fn get_event(
&self,
hostname: &str,
event_id: &Uuid,
) -> ServiceResult<LibrePagesEvent> {
let event = sqlx::query_as!(
InnerLibrepagesEvent,
"SELECT
librepages_deploy_event_type.name,
librepages_site_deploy_events.time,
librepages_site_deploy_events.pub_id
FROM
librepages_site_deploy_events
INNER JOIN librepages_deploy_event_type ON
librepages_deploy_event_type.ID = librepages_site_deploy_events.event_type
WHERE
librepages_site_deploy_events.site = (
SELECT ID FROM librepages_sites WHERE hostname = $1
)
AND
librepages_site_deploy_events.pub_id = $2
",
hostname,
event_id,
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::AccountNotFound))?;
Ok(LibrePagesEvent {
id: event.pub_id,
time: event.time,
event_type: Event::from_str(&event.name).unwrap(),
site: hostname.to_owned(),
})
}
pub async fn get_latest_update_event(
&self,
hostname: &str,
) -> ServiceResult<Option<LibrePagesEvent>> {
self.get_latest_event_of_type(hostname, &EVENT_TYPE_UPDATE)
.await
}
async fn get_latest_event_of_type(
&self,
hostname: &str,
event_type: &Event,
) -> ServiceResult<Option<LibrePagesEvent>> {
struct InnerLibrepagesEventNameless {
time: OffsetDateTime,
pub_id: Uuid,
}
let event = sqlx::query_as!(
InnerLibrepagesEventNameless,
"SELECT
time,
pub_id
FROM
librepages_site_deploy_events
WHERE
site = (SELECT ID FROM librepages_sites WHERE hostname = $1)
AND
event_type = (SELECT ID FROM librepages_deploy_event_type WHERE name = $2)
AND
time = (
SELECT MAX(time)
FROM
librepages_site_deploy_events
WHERE
site = (SELECT ID FROM librepages_sites WHERE hostname = $1)
)
",
hostname,
event_type.name
)
.fetch_one(&self.pool)
.await;
match event {
Ok(event) => Ok(Some(LibrePagesEvent {
id: event.pub_id,
time: event.time,
event_type: event_type.clone(),
site: hostname.to_owned(),
})),
Err(sqlx::Error::RowNotFound) => Ok(None),
Err(e) => Err(map_register_err(e)),
}
// map_row_not_found_err(e, ServiceError::AccountNotFound))?;
}
pub async fn list_all_site_events(
&self,
hostname: &str,
) -> ServiceResult<Vec<LibrePagesEvent>> {
let mut inner_events = sqlx::query_as!(
InnerLibrepagesEvent,
"SELECT
librepages_deploy_event_type.name,
librepages_site_deploy_events.time,
librepages_site_deploy_events.pub_id
FROM
librepages_site_deploy_events
INNER JOIN librepages_deploy_event_type ON
librepages_deploy_event_type.ID = librepages_site_deploy_events.event_type
WHERE
librepages_site_deploy_events.site = (
SELECT ID FROM librepages_sites WHERE hostname = $1
);
",
hostname,
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::AccountNotFound))?;
let mut events = Vec::with_capacity(inner_events.len());
for e in inner_events.drain(0..) {
events.push(LibrePagesEvent {
id: e.pub_id,
time: e.time,
event_type: Event::from_str(&e.name).unwrap(),
site: hostname.to_owned(),
})
}
Ok(events)
}
/// register a new webhook
pub async fn new_webhook(
&self,
forgejo_url: Url,
owner: &str,
) -> ServiceResult<ForgejoWebhook> {
let hook = ForgejoWebhook::new(forgejo_url);
sqlx::query!(
"INSERT INTO librepages_forgejo_webhooks
(forgejo_url , auth_token, forgejo_webhook_secret, owned_by) VALUES ($1, $2, $3,
(SELECT ID FROM librepages_users WHERE name = $4)
)",
hook.forgejo_url.as_str(),
&hook.auth_token,
&hook.forgejo_webhook_secret,
owner,
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::AccountNotFound))?;
Ok(hook)
}
pub async fn list_all_webhooks_with_owner(
&self,
owner: &str,
) -> ServiceResult<Vec<ForgejoWebhook>> {
let mut db_hooks = sqlx::query_as!(
InnerForgejoWebhook,
"SELECT
forgejo_url, auth_token, forgejo_webhook_secret
FROM
librepages_forgejo_webhooks
WHERE
owned_by = (SELECT ID FROM librepages_users WHERE name = $1);
",
owner
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
let mut hooks = Vec::with_capacity(db_hooks.len());
for hook in db_hooks.drain(0..) {
hooks.push(hook.to_webhook()?)
}
Ok(hooks)
}
pub async fn get_webhook_with_owner(
&self,
auth_token: &str,
owner: &str,
) -> ServiceResult<ForgejoWebhook> {
let h = sqlx::query_as!(
InnerForgejoWebhook,
"SELECT
forgejo_url, auth_token, forgejo_webhook_secret
FROM
librepages_forgejo_webhooks
WHERE
auth_token = $1
AND
owned_by = (SELECT ID FROM librepages_users WHERE name = $2);
",
auth_token,
owner
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
h.to_webhook()
}
pub async fn get_webhook(&self, auth_token: &str) -> ServiceResult<ForgejoWebhook> {
let h = sqlx::query_as!(
InnerForgejoWebhook,
"SELECT forgejo_url, auth_token, forgejo_webhook_secret
FROM librepages_forgejo_webhooks
WHERE auth_token = $1
",
auth_token,
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
h.to_webhook()
}
/// register a webhook against a site
pub async fn webhook_link_site(&self, auth_token: &str, repo_url: &Url) -> ServiceResult<()> {
struct Site {
id: i32,
}
let sites = sqlx::query_as!(
Site,
"SELECT ID FROM librepages_sites WHERE repo_url = $1",
repo_url.as_str()
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, ServiceError::WebsiteNotFound))?;
for site in sites {
sqlx::query!(
"INSERT INTO librepages_forgejo_webhook_site_mapping
(site_id, forgejo_webhook_id) VALUES (
(SELECT ID FROM librepages_sites WHERE repo_url = $1 AND ID = $2),
(SELECT ID FROM librepages_forgejo_webhooks WHERE auth_token = $3)
) ON CONFLICT (site_id, forgejo_webhook_id) DO NOTHING;",
repo_url.as_str(),
site.id,
auth_token
)
.execute(&self.pool)
.await
//.unwrap();
.map_err(|e| map_row_not_found_err(e, ServiceError::WebhookNotFound))?;
}
Ok(())
}
}
struct InnerSite {
site_secret: String,
repo_url: String,
branch: String,
hostname: String,
pub_id: Uuid,
}
impl InnerSite {
@ -832,7 +399,6 @@ impl InnerSite {
repo_url: self.repo_url,
branch: self.branch,
hostname: self.hostname,
pub_id: self.pub_id,
owner,
}
}
@ -843,7 +409,6 @@ impl InnerSite {
pub struct Site {
pub site_secret: String,
pub repo_url: String,
pub pub_id: Uuid,
pub branch: String,
pub hostname: String,
pub owner: String,
@ -887,78 +452,6 @@ pub struct NameHash {
pub hash: String,
}
#[derive(Deserialize, Serialize, Clone, Debug, Eq, PartialEq)]
pub struct Event {
pub name: String,
}
impl Event {
fn new(name: String) -> Self {
Self { name }
}
pub fn from_str(name: &str) -> Option<Event> {
(*EVENTS).into_iter().find(|e| e.name == name).cloned()
}
}
lazy_static! {
pub static ref EVENT_TYPE_CREATE: Event = Event::new("site.event.create".into());
pub static ref EVENT_TYPE_UPDATE: Event = Event::new("site.event.update".into());
pub static ref EVENT_TYPE_DELETE: Event = Event::new("site.event.delete".into());
pub static ref EVENTS: [&'static Event; 3] = [
&*EVENT_TYPE_CREATE,
&*EVENT_TYPE_DELETE,
&*EVENT_TYPE_UPDATE
];
}
struct InnerLibrepagesEvent {
name: String,
time: OffsetDateTime,
pub_id: Uuid,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct LibrePagesEvent {
pub event_type: Event,
pub time: OffsetDateTime,
pub site: String,
pub id: Uuid,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct ForgejoWebhook {
pub forgejo_url: Url,
pub forgejo_webhook_secret: String,
pub auth_token: String,
}
impl ForgejoWebhook {
fn new(forgejo_url: Url) -> Self {
Self {
forgejo_url,
forgejo_webhook_secret: utils::get_random(40),
auth_token: utils::get_random(40),
}
}
}
struct InnerForgejoWebhook {
forgejo_url: String,
auth_token: String,
forgejo_webhook_secret: String,
}
impl InnerForgejoWebhook {
fn to_webhook(self) -> ServiceResult<ForgejoWebhook> {
Ok(ForgejoWebhook {
forgejo_url: Url::parse(&self.forgejo_url)?,
auth_token: self.auth_token,
forgejo_webhook_secret: self.forgejo_webhook_secret,
})
}
}
fn now_unix_time_stamp() -> OffsetDateTime {
OffsetDateTime::now_utc()
}
@ -1011,17 +504,9 @@ fn map_register_err(e: sqlx::Error) -> ServiceError {
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use super::*;
use crate::settings::Settings;
#[test]
fn event_names_are_unique() {
let mut uniq = HashSet::new();
assert!(EVENTS.into_iter().all(move |x| uniq.insert(x.name.clone())));
}
#[actix_rt::test]
async fn db_works() {
let settings = Settings::new().unwrap();
@ -1153,13 +638,6 @@ mod tests {
const PASSWORD: &str = "pasdfasdfasdfadf";
db.migrate().await.unwrap();
// check if events are created
for e in &*EVENTS {
println!("Testing event type exists {}", e.name);
assert!(db.event_type_exists(e).await.unwrap());
}
let p = super::Register {
username: NAME,
email: EMAIL,
@ -1178,29 +656,20 @@ mod tests {
let site = Site {
site_secret: "foobar".into(),
repo_url: "https://git.test_db_sites.example.org/LibrePages/librepages.git".into(),
repo_url: "https://git.batsense.net/LibrePages/librepages.git".into(),
branch: "librepages".into(),
hostname: "db_works.tests.librepages.librepages.org".into(),
pub_id: Uuid::new_v4(),
owner: p.username.into(),
};
// test if hostname exists. Should be false
assert!(!db.hostname_exists(&site.hostname).await.unwrap());
assert!(!db
.site_with_repository_exists(&site.repo_url)
.await
.unwrap());
// testing adding site
db.add_site(&site).await.unwrap();
// test if hostname exists. Should be true
assert!(db.hostname_exists(&site.hostname).await.unwrap());
assert!(db
.site_with_repository_exists(&site.repo_url)
.await
.unwrap());
// get site
let db_site = db.get_site(p.username, &site.hostname).await.unwrap();
@ -1212,84 +681,11 @@ mod tests {
db.get_site_from_secret(&site.site_secret).await.unwrap()
);
// get site by repo_url
assert_eq!(
db_site,
db.get_site_from_repo_url(&site.repo_url).await.unwrap()
);
// list all sites owned by user
let db_sites = db.list_all_sites(p.username).await.unwrap();
assert_eq!(db_sites.len(), 1);
assert_eq!(db_sites, vec![site.clone()]);
// add event to site
let event_id = db
.log_event(&site.hostname, &EVENT_TYPE_CREATE)
.await
.unwrap();
let event = db.get_event(&site.hostname, &event_id).await.unwrap();
assert_eq!(event.id, event_id);
assert_eq!(event.event_type, *EVENT_TYPE_CREATE);
assert_eq!(event.site, site.hostname);
assert_eq!(
db.list_all_site_events(&site.hostname).await.unwrap(),
vec![event]
);
// when no update event exist, None is returned
assert!(db
.get_latest_update_event(&site.hostname)
.await
.unwrap()
.is_none());
// add multiple update events, see if latest is returned
db.log_event(&site.hostname, &EVENT_TYPE_UPDATE)
.await
.unwrap();
let latest_update_event_id = db
.log_event(&site.hostname, &EVENT_TYPE_UPDATE)
.await
.unwrap();
let latest_update_event_id_from_db = db
.get_latest_update_event(&site.hostname)
.await
.unwrap()
.unwrap();
assert_eq!(
latest_update_event_id_from_db.event_type,
*EVENT_TYPE_UPDATE
);
assert_eq!(latest_update_event_id_from_db.id, latest_update_event_id);
// add webhook
let forgejo_url = Url::parse("https://example.org").unwrap();
let hook = db.new_webhook(forgejo_url, NAME).await.unwrap();
assert_eq!(hook, db.get_webhook(&hook.auth_token).await.unwrap());
assert_eq!(
vec![hook.clone()],
db.list_all_webhooks_with_owner(NAME).await.unwrap()
);
assert_eq!(
hook,
db.get_webhook_with_owner(&hook.auth_token, NAME)
.await
.unwrap()
);
assert_eq!(
db.get_webhook(&hook.forgejo_webhook_secret).await.err(),
Some(ServiceError::WebhookNotFound)
);
db.webhook_link_site(&hook.auth_token, &Url::parse(&site.repo_url).unwrap())
.await
.unwrap();
db.webhook_link_site(&hook.auth_token, &Url::parse(&site.repo_url).unwrap())
.await
.unwrap();
// delete site
db.delete_site(p.username, &site.hostname).await.unwrap();

183
src/deploy.rs Normal file
View file

@ -0,0 +1,183 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::{web, HttpResponse, Responder};
use serde::{Deserialize, Serialize};
use tokio::sync::oneshot;
use crate::errors::*;
use crate::page::Page;
use crate::AppCtx;
pub mod routes {
pub struct Deploy {
pub update: &'static str,
pub info: &'static str,
}
impl Deploy {
pub const fn new() -> Self {
Self {
update: "/api/v1/update",
info: "/api/v1/info",
}
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct DeployEvent {
pub secret: String,
pub branch: String,
}
#[actix_web_codegen_const_routes::post(path = "crate::V1_API_ROUTES.deploy.update")]
#[tracing::instrument(name = "Update webpages", skip(payload, ctx))]
async fn update(payload: web::Json<DeployEvent>, ctx: AppCtx) -> ServiceResult<impl Responder> {
let payload = payload.into_inner();
ctx.update_site(&payload.secret, Some(payload.branch))
.await?;
Ok(HttpResponse::Ok())
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct DeploySecret {
pub secret: String,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct DeployInfo {
pub head: String,
pub remote: String,
pub commit: String,
}
impl DeployInfo {
pub fn from_page(page: &Page) -> ServiceResult<Self> {
let repo = page.open_repo()?;
let head = page.get_deploy_branch(&repo)?;
let commit = Page::get_deploy_commit(&repo)?.to_string();
let remote = Page::get_deploy_remote(&repo)?;
let remote = remote.url().unwrap().to_owned();
Ok(Self {
head,
remote,
commit,
})
}
}
#[actix_web_codegen_const_routes::post(path = "crate::V1_API_ROUTES.deploy.info")]
#[tracing::instrument(name = "Get webpage deploy info", skip(payload, ctx))]
async fn deploy_info(
payload: web::Json<DeploySecret>,
ctx: AppCtx,
) -> ServiceResult<impl Responder> {
if let Ok(page) = ctx.db.get_site_from_secret(&payload.secret).await {
let resp = DeployInfo::from_page(&Page::from_site(&ctx.settings, page))?;
Ok(HttpResponse::Ok().json(resp))
} else {
Err(ServiceError::WebsiteNotFound)
}
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(update);
cfg.service(deploy_info);
}
#[cfg(test)]
mod tests {
use actix_web::{http::StatusCode, test};
use crate::tests;
use crate::*;
use super::*;
#[actix_rt::test]
async fn deploy_update_works() {
const NAME: &str = "dplyupdwrkuser";
const PASSWORD: &str = "longpasswordasdfa2";
const EMAIL: &str = "dplyupdwrkuser@a.com";
let (_dir, ctx) = tests::get_ctx().await;
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, _signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let page = ctx.add_test_site(NAME.into()).await;
let app = get_app!(ctx).await;
let mut payload = DeployEvent {
secret: page.secret.clone(),
branch: page.branch.clone(),
};
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.update).to_request(),
)
.await;
check_status!(resp, StatusCode::OK);
payload.secret = page.branch.clone();
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.update).to_request(),
)
.await;
check_status!(resp, StatusCode::NOT_FOUND);
}
#[actix_rt::test]
async fn deploy_info_works() {
const NAME: &str = "dplyinfwrkuser";
const PASSWORD: &str = "longpasswordasdfa2";
const EMAIL: &str = "dplyinfwrkuser@a.com";
let (_dir, ctx) = tests::get_ctx().await;
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, _signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let page = ctx.add_test_site(NAME.into()).await;
let app = get_app!(ctx).await;
let mut payload = DeploySecret {
secret: page.secret.clone(),
};
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.info).to_request(),
)
.await;
check_status!(resp, StatusCode::OK);
let response: DeployInfo = actix_web::test::read_body_json(resp).await;
assert_eq!(response.head, page.branch);
assert_eq!(response.remote, page.repo);
payload.secret = page.branch.clone();
let resp = test::call_service(
&app,
post_request!(&payload, V1_API_ROUTES.deploy.info).to_request(),
)
.await;
check_status!(resp, StatusCode::NOT_FOUND);
}
}

View file

@ -28,8 +28,6 @@ use argon2_creds::errors::CredsError;
use config::ConfigError as ConfigErrorInner;
use derive_more::{Display, Error};
use git2::Error as GitError;
use hmac::digest::InvalidLength;
use hmac::digest::MacError;
use serde::{Deserialize, Serialize};
use url::ParseError;
@ -88,10 +86,6 @@ pub enum ServiceError {
/// website not found
WebsiteNotFound,
#[display(fmt = "File not found")]
/// File not found
FileNotFound,
/// when the a path configured for a page is already taken
#[display(
fmt = "Path already used for another website. lhs: {:?} rhs: {:?}",
@ -180,24 +174,6 @@ pub enum ServiceError {
#[display(fmt = "Passwords don't match")]
/// passwords don't match
PasswordsDontMatch,
/// Webhook not found
#[display(fmt = "Webhook not found")]
WebhookNotFound,
}
impl From<InvalidLength> for ServiceError {
#[cfg(not(tarpaulin_include))]
fn from(_: InvalidLength) -> ServiceError {
ServiceError::InternalServerError
}
}
impl From<MacError> for ServiceError {
#[cfg(not(tarpaulin_include))]
fn from(_: MacError) -> ServiceError {
ServiceError::WebhookNotFound
}
}
impl From<ParseError> for ServiceError {
@ -260,7 +236,6 @@ impl ResponseError for ServiceError {
ServiceError::EmailTaken => StatusCode::BAD_REQUEST,
ServiceError::UsernameTaken => StatusCode::BAD_REQUEST,
ServiceError::AccountNotFound => StatusCode::NOT_FOUND,
ServiceError::FileNotFound => StatusCode::NOT_FOUND,
ServiceError::ProfanityError => StatusCode::BAD_REQUEST, //BADREQUEST,
ServiceError::BlacklistError => StatusCode::BAD_REQUEST, //BADREQUEST,
@ -272,7 +247,6 @@ impl ResponseError for ServiceError {
ServiceError::ClosedForRegistration => StatusCode::FORBIDDEN, //FORBIDDEN,
ServiceError::NotAnEmail => StatusCode::BAD_REQUEST, //BADREQUEST,
ServiceError::WrongPassword => StatusCode::UNAUTHORIZED, //UNAUTHORIZED,
ServiceError::WebhookNotFound => StatusCode::NOT_FOUND, //NOT FOUND,
}
}
}

View file

@ -173,6 +173,8 @@ fn read_file_inner(
}
let inner = |repo: &git2::Repository, tree: &git2::Tree| -> ServiceResult<FileInfo> {
// let head = repo.head().unwrap();
// let tree = head.peel_to_tree().unwrap();
let mut path = path;
if path == "/" {
let content = get_index_file(tree.id(), repo);
@ -185,14 +187,8 @@ fn read_file_inner(
if path.starts_with('/') {
path = path.trim_start_matches('/');
}
fn file_not_found(e: git2::Error) -> ServiceError {
if e.code() == ErrorCode::NotFound && e.class() == ErrorClass::Tree {
return ServiceError::FileNotFound;
}
e.into()
}
let entry = tree.get_path(Path::new(path)).map_err(file_not_found)?;
let entry = tree.get_path(Path::new(path)).unwrap();
//FileType::Dir(items)
let mode: GitFileMode = entry.clone().into();
if let Some(name) = entry.name() {
@ -216,17 +212,17 @@ fn read_file_inner(
}
};
//let repo = git2::Repository::open(repo_path).unwrap();
inner(repo, tree)
}
#[cfg(test)]
pub mod tests {
use super::*;
use mktemp::Temp;
const FILE_CONTENT: &str = "foobar";
pub fn write_file_util(repo_path: &str, file_name: &str, content: Option<&str>) {
fn write_file_util(path: &str) {
// TODO change updated in DB
let inner = |repo: &mut Repository| -> ServiceResult<()> {
let mut tree_builder = match repo.head() {
@ -237,14 +233,10 @@ pub mod tests {
let odb = repo.odb().unwrap();
let content = if content.is_some() {
content.as_ref().unwrap()
} else {
FILE_CONTENT
};
let obj = odb.write(ObjectType::Blob, content.as_bytes()).unwrap();
tree_builder.insert(file_name, obj, 0o100644).unwrap();
let obj = odb
.write(ObjectType::Blob, FILE_CONTENT.as_bytes())
.unwrap();
tree_builder.insert("README.txt", obj, 0o100644).unwrap();
let tree_hash = tree_builder.write().unwrap();
let author = Signature::now("librepages", "admin@librepages.org").unwrap();
let committer = Signature::now("librepages", "admin@librepages.org").unwrap();
@ -276,34 +268,26 @@ pub mod tests {
Ok(())
};
if Repository::open(repo_path).is_err() {
let _ = Repository::init(repo_path);
if Repository::open(path).is_err() {
let _ = Repository::init(path);
}
let mut repo = Repository::open(repo_path).unwrap();
let mut repo = Repository::open(path).unwrap();
let _ = inner(&mut repo);
}
#[test]
fn test_git_write_read_works() {
const FILENAME: &str = "README.txt";
const PATH: &str = "/tmp/librepges/test_git_write_read_works";
let tmp_dir = Temp::new_dir().unwrap();
let path = tmp_dir.to_str().unwrap();
write_file_util(path, FILENAME, None);
let resp = read_file(&Path::new(path).into(), FILENAME).unwrap();
assert_eq!(resp.filename, FILENAME);
write_file_util(PATH);
let resp = read_file(&Path::new(PATH).into(), "README.txt").unwrap();
assert_eq!(resp.filename, "README.txt");
assert_eq!(resp.content.bytes(), FILE_CONTENT.as_bytes());
assert_eq!(resp.mime.first().unwrap(), "text/plain");
let resp = read_preview_file(&Path::new(path).into(), "master", FILENAME).unwrap();
assert_eq!(resp.filename, FILENAME);
let resp = read_preview_file(&Path::new(PATH).into(), "master", "README.txt").unwrap();
assert_eq!(resp.filename, "README.txt");
assert_eq!(resp.content.bytes(), FILE_CONTENT.as_bytes());
assert_eq!(resp.mime.first().unwrap(), "text/plain");
assert_eq!(
read_preview_file(&Path::new(path).into(), "master", "file-does-not-exist.txt"),
Err(ServiceError::FileNotFound)
);
}
}

View file

@ -27,13 +27,13 @@ use tracing::info;
use tracing_actix_web::TracingLogger;
mod api;
mod conductor;
mod ctx;
mod db;
mod deploy;
mod errors;
mod git;
mod meta;
mod page;
mod page_config;
mod pages;
mod preview;
mod serve;
@ -109,7 +109,6 @@ async fn main() -> std::io::Result<()> {
async fn serve(settings: Settings, ctx: AppCtx) -> std::io::Result<()> {
let ip = settings.server.get_ip();
let workers = settings.server.workers.unwrap_or_else(num_cpus::get);
info!("Starting server on: http://{}", ip);
HttpServer::new(move || {
@ -118,7 +117,6 @@ async fn serve(settings: Settings, ctx: AppCtx) -> std::io::Result<()> {
.wrap(actix_middleware::Compress::default())
.app_data(ctx.clone())
.app_data(get_json_err())
.wrap(get_identity_service(&(settings.clone())))
.wrap(
actix_middleware::DefaultHeaders::new()
.add(("Permissions-Policy", "interest-cohort=()")),
@ -128,7 +126,7 @@ async fn serve(settings: Settings, ctx: AppCtx) -> std::io::Result<()> {
))
.configure(services)
})
.workers(workers)
.workers(settings.server.workers.unwrap_or_else(num_cpus::get))
.bind(ip)
.unwrap()
.run()

View file

@ -26,7 +26,6 @@ use serde::Deserialize;
use serde::Serialize;
#[cfg(not(test))]
use tracing::{debug, error, info};
use uuid::Uuid;
use crate::db::Site;
use crate::errors::*;
@ -40,7 +39,6 @@ pub struct Page {
pub path: String,
pub branch: String,
pub domain: String,
pub pub_id: Uuid,
}
impl Page {
@ -54,7 +52,6 @@ impl Page {
.to_owned(),
domain: s.hostname,
branch: s.branch,
pub_id: s.pub_id,
}
}
pub fn open_repo(&self) -> ServiceResult<Repository> {
@ -268,19 +265,16 @@ mod tests {
use git2::Repository;
use mktemp::Temp;
use crate::tests;
#[actix_rt::test]
async fn pages_works() {
let tmp_dir = Temp::new_dir().unwrap();
assert!(tmp_dir.exists(), "tmp directory successully created");
let mut page = Page {
secret: String::default(),
repo: tests::REPO_URL.into(),
repo: "https://github.com/mcaptcha/website".to_owned(),
path: tmp_dir.to_str().unwrap().to_string(),
branch: tests::BRANCH.to_string(),
branch: "gh-pages".to_string(),
domain: "mcaptcha.org".into(),
pub_id: Uuid::new_v4(),
};
assert!(

View file

@ -1,165 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::path::Path;
use libconfig::Config;
use serde::{Deserialize, Serialize};
use crate::git::{ContentType, GitFileMode};
#[derive(Deserialize, Debug, Serialize, PartialEq, Eq)]
struct Policy<'a> {
rel_path: &'a str,
format: SupportedFormat,
}
impl<'a> Policy<'a> {
const fn new(rel_path: &'a str, format: SupportedFormat) -> Self {
Self { rel_path, format }
}
}
#[derive(Deserialize, Debug, Serialize, PartialEq, Eq)]
enum SupportedFormat {
Json,
Yaml,
Toml,
}
pub fn load<P: AsRef<Path>>(repo_path: &P, branch: &str) -> Option<Config> {
const POLICIES: [Policy; 2] = [
Policy::new("librepages.toml", SupportedFormat::Toml),
Policy::new("librepages.json", SupportedFormat::Json),
];
if let Some(policy) = discover(repo_path, branch, &POLICIES) {
// let path = p.repo.as_ref().join(policy.rel_path);
//let contents = fs::read_to_string(path).await.unwrap();
let file =
crate::git::read_preview_file(&repo_path.as_ref().into(), branch, policy.rel_path)
.unwrap();
if let ContentType::Text(contents) = file.content {
let res = match policy.format {
SupportedFormat::Json => load_json(&contents),
SupportedFormat::Yaml => load_yaml(&contents),
SupportedFormat::Toml => load_toml(&contents),
};
return Some(res);
};
}
None
}
fn discover<'a, P: AsRef<Path>>(
repo_path: &P,
branch: &str,
policies: &'a [Policy<'a>],
) -> Option<&'a Policy<'a>> {
let repo = git2::Repository::open(repo_path).unwrap();
let branch = repo.find_branch(branch, git2::BranchType::Local).unwrap();
// let tree = head.peel_to_tree().unwrap();
let branch = branch.into_reference();
let tree = branch.peel_to_tree().unwrap();
for p in policies.iter() {
let file_exists = tree.iter().any(|x| {
if let Some(name) = x.name() {
if policies.iter().any(|p| p.rel_path == name) {
let mode: GitFileMode = x.into();
matches!(mode, GitFileMode::Executable | GitFileMode::Regular)
} else {
false
}
} else {
false
}
});
if file_exists {
return Some(p);
}
}
None
}
fn load_toml(c: &str) -> Config {
toml::from_str(c).unwrap()
}
fn load_yaml(c: &str) -> Config {
serde_yaml::from_str(c).unwrap()
}
fn load_json(c: &str) -> Config {
serde_json::from_str(c).unwrap()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::git::tests::write_file_util;
use mktemp::Temp;
use libconfig::*;
#[actix_rt::test]
async fn page_config_test() {
let tmp_dir = Temp::new_dir().unwrap();
let repo_path = tmp_dir.join("page_config_test");
let content = std::fs::read_to_string(
&Path::new("./tests/cases/contains-everything/toml/librepages.toml")
.canonicalize()
.unwrap(),
)
.unwrap();
write_file_util(
repo_path.to_str().unwrap(),
"librepages.toml",
Some(&content),
);
let config = load(&repo_path, "master").unwrap();
assert!(config.forms.as_ref().unwrap().enable);
assert!(config.image_compression.as_ref().unwrap().enable);
assert_eq!(config.source.production_branch, "librepages");
assert_eq!(config.source.staging.as_ref().unwrap(), "beta");
assert_eq!(
config.redirects.as_ref().unwrap(),
&vec![
Redirects {
from: "/from1".into(),
to: "/to1".into()
},
Redirects {
from: "/from2".into(),
to: "/to2".into()
},
]
);
assert_eq!(
config.domains.as_ref().unwrap(),
&vec!["example.org".to_string(), "example.com".to_string(),]
);
}
}

View file

@ -93,7 +93,7 @@ pub async fn login_submit(
.finish())
} else {
Ok(HttpResponse::Found()
.insert_header((http::header::LOCATION, PAGES.dash.home))
.insert_header((http::header::LOCATION, PAGES.home))
.finish())
}
}

View file

@ -71,7 +71,7 @@ async fn auth_works(ctx: ArcCtx) {
.await;
assert_eq!(resp.status(), StatusCode::FOUND);
let headers = resp.headers();
assert_eq!(headers.get(header::LOCATION).unwrap(), PAGES.dash.home);
assert_eq!(headers.get(header::LOCATION).unwrap(), PAGES.home);
let cookies = get_cookie!(resp);
// redirect after signin

View file

@ -1,193 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use tera::Context;
use tracing::info;
use super::get_auth_middleware;
use crate::api::v1::forgejo::AddWebhook;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_FORGEJO_WEBHOOK_ADD: TemplateFile =
TemplateFile::new("dash_forgejo_webhook_add", "pages/dash/forgejo/add.html");
pub struct Add {
ctx: RefCell<Context>,
}
impl CtxError for Add {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl Add {
pub fn new(settings: &Settings) -> Self {
let ctx = RefCell::new(context(settings));
Self { ctx }
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_FORGEJO_WEBHOOK_ADD.name, &self.ctx.borrow())
.unwrap()
}
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.forgejo_webhook.add",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Dashboard add forgejo webhook webpage", skip(ctx))]
pub async fn get_add_forgejo_webhook(ctx: AppCtx) -> PageResult<impl Responder, Add> {
let add = Add::new(&ctx.settings).render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(add))
}
#[actix_web_codegen_const_routes::post(
path = "PAGES.dash.forgejo_webhook.add",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(
name = "Post Dashboard add Forgejo webhook webpage",
skip(ctx, id, payload)
)]
pub async fn post_add_forgejo_webhook(
ctx: AppCtx,
id: Identity,
payload: web::Form<AddWebhook>,
) -> PageResult<impl Responder, Add> {
let owner = id.identity().unwrap();
let payload = payload.into_inner();
info!(
"Adding webhook for Forgejo instance: {}",
payload.forgejo_url.as_str()
);
let hook = ctx
.db
.new_webhook(payload.forgejo_url, &owner)
.await
.map_err(|e| PageError::new(Add::new(&ctx.settings), e))?;
Ok(HttpResponse::Found()
.append_header((
http::header::LOCATION,
PAGES.dash.forgejo_webhook.get_view(&hook.auth_token),
))
.finish())
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_add_forgejo_webhook);
cfg.service(post_add_forgejo_webhook);
}
#[cfg(test)]
mod tests {
use actix_web::http::StatusCode;
use actix_web::test;
use url::Url;
use crate::api::v1::forgejo::AddWebhook;
use crate::ctx::ArcCtx;
use crate::tests;
use crate::*;
use super::PAGES;
#[actix_rt::test]
async fn postgres_dashboadr_add_forgejo_webhook_works() {
let (_, ctx) = tests::get_ctx().await;
dashboadr_add_forgejo_webhook_works(ctx.clone()).await;
}
async fn dashboadr_add_forgejo_webhook_works(ctx: ArcCtx) {
const NAME: &str = "testdashwebhookforgejoadduser";
const EMAIL: &str = "testdashwebhookforgejoadduser@foo.com";
const PASSWORD: &str = "longpassword";
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let cookies = get_cookie!(signin_resp);
let app = get_app!(ctx.clone()).await;
let resp = get_request!(&app, PAGES.dash.forgejo_webhook.add, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains("Add Forgejo Webhook"));
let payload = AddWebhook {
forgejo_url: Url::parse("https://git.batsense.net").unwrap(),
};
let add_webhook = test::call_service(
&app,
post_request!(&payload, PAGES.dash.forgejo_webhook.add, FORM)
.cookie(cookies.clone())
.to_request(),
)
.await;
assert_eq!(add_webhook.status(), StatusCode::FOUND);
let mut hooks = ctx.db.list_all_webhooks_with_owner(NAME).await.unwrap();
let hook = hooks.pop().unwrap();
// let mut event = ctx.db.list(&site.hostname).await.unwrap();
// let event = event.pop().unwrap();
let headers = add_webhook.headers();
let view_webhook_url = PAGES.dash.forgejo_webhook.get_view(&hook.auth_token);
assert_eq!(
headers.get(actix_web::http::header::LOCATION).unwrap(),
&view_webhook_url
);
// list webhooks
let resp = get_request!(&app, PAGES.dash.forgejo_webhook.list, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains(hook.forgejo_url.as_str()));
// view webhook
let resp = get_request!(&app, &view_webhook_url, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains("****"));
assert!(res.contains(
&crate::V1_API_ROUTES
.forgejo
.get_webhook_url(&ctx, &hook.auth_token)
));
let show_forgejo_webhook_secret =
format!("{view_webhook_url}?show_forgejo_webhook_secret=true");
let resp = get_request!(&app, &show_forgejo_webhook_secret, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains(&hook.forgejo_webhook_secret));
}
}

View file

@ -1,93 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use tera::Context;
use super::get_auth_middleware;
use crate::errors::ServiceResult;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_FORGEJO_WEBHOOK_LIST: TemplateFile =
TemplateFile::new("dash_forgejo_webhook_list", "pages/dash/forgejo/list.html");
pub struct List {
ctx: RefCell<Context>,
}
impl CtxError for List {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl List {
pub fn new(settings: &Settings, hooks: Option<&[TemplateForgejoWebhook]>) -> Self {
let ctx = RefCell::new(context(settings));
if let Some(hooks) = hooks {
ctx.borrow_mut().insert(PAYLOAD_KEY, hooks);
}
Self { ctx }
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_FORGEJO_WEBHOOK_LIST.name, &self.ctx.borrow())
.unwrap()
}
}
async fn get_webhook_data(
ctx: &AppCtx,
id: &Identity,
) -> ServiceResult<Vec<TemplateForgejoWebhook>> {
let db_hooks = ctx
.db
.list_all_webhooks_with_owner(&id.identity().unwrap())
.await?;
let mut hooks = Vec::with_capacity(db_hooks.len());
for hook in db_hooks {
hooks.push(TemplateForgejoWebhook::new(ctx, hook));
}
Ok(hooks)
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.forgejo_webhook.list",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "List all Forgejo webhooks", skip(ctx, id))]
pub async fn list_hooks(ctx: AppCtx, id: Identity) -> PageResult<impl Responder, List> {
let sites = get_webhook_data(&ctx, &id)
.await
.map_err(|e| PageError::new(List::new(&ctx.settings, None), e))?;
let home = List::new(&ctx.settings, Some(&sites)).render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(home))
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(list_hooks);
}

View file

@ -1,69 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::*;
use serde::{Deserialize, Serialize};
use super::get_auth_middleware;
pub use super::home::TemplateSite;
pub use super::{context, Footer, TemplateFile, PAGES, PAYLOAD_KEY, TEMPLATES};
use crate::ctx::Ctx;
use crate::db::ForgejoWebhook;
pub mod add;
pub mod list;
pub mod view;
pub fn register_templates(t: &mut tera::Tera) {
add::DASH_FORGEJO_WEBHOOK_ADD
.register(t)
.expect(add::DASH_FORGEJO_WEBHOOK_ADD.name);
list::DASH_FORGEJO_WEBHOOK_LIST
.register(t)
.expect(list::DASH_FORGEJO_WEBHOOK_LIST.name);
view::DASH_FORGEJO_WEBHOOK_VIEW
.register(t)
.expect(view::DASH_FORGEJO_WEBHOOK_VIEW.name);
}
pub fn services(cfg: &mut web::ServiceConfig) {
add::services(cfg);
list::services(cfg);
view::services(cfg);
}
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
pub struct TemplateForgejoWebhook {
pub webhook: ForgejoWebhook,
pub view: String,
pub url: String,
}
impl TemplateForgejoWebhook {
pub fn new(ctx: &Ctx, hook: ForgejoWebhook) -> Self {
let view = PAGES.dash.forgejo_webhook.get_view(&hook.auth_token);
let url = crate::V1_API_ROUTES
.forgejo
.get_webhook_url(ctx, &hook.auth_token);
Self {
webhook: hook,
view,
url,
}
}
}

View file

@ -1,108 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use serde::{Deserialize, Serialize};
use tera::Context;
use super::get_auth_middleware;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_FORGEJO_WEBHOOK_VIEW: TemplateFile =
TemplateFile::new("dash_forgejo_webhook_view", "pages/dash/forgejo/view.html");
const SHOW_FORGEJO_WEBHOOK_SECRET_KEY: &str = "show_forgejo_webhook_secret";
pub struct View {
ctx: RefCell<Context>,
}
impl CtxError for View {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl View {
pub fn new(settings: &Settings, payload: Option<TemplateForgejoWebhook>) -> Self {
let ctx = RefCell::new(context(settings));
if let Some(payload) = payload {
ctx.borrow_mut().insert(PAYLOAD_KEY, &payload);
}
Self { ctx }
}
pub fn show_forgejo_webhook_secret(&mut self) {
self.ctx
.borrow_mut()
.insert(SHOW_FORGEJO_WEBHOOK_SECRET_KEY, &true);
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_FORGEJO_WEBHOOK_VIEW.name, &self.ctx.borrow())
.unwrap()
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct ViewOptions {
show_forgejo_webhook_secret: Option<bool>,
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.forgejo_webhook.view",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Dashboard Forgejo webhook webpage", skip(ctx, id))]
pub async fn get_view_site(
ctx: AppCtx,
id: Identity,
path: web::Path<String>,
query: web::Query<ViewOptions>,
) -> PageResult<impl Responder, View> {
let auth_token = path.into_inner();
let owner = id.identity().unwrap();
let hook = ctx
.db
.get_webhook_with_owner(&auth_token, &owner)
.await
.map_err(|e| PageError::new(View::new(&ctx.settings, None), e))?;
let payload = TemplateForgejoWebhook::new(&ctx, hook);
let mut page = View::new(&ctx.settings, Some(payload));
if let Some(true) = query.show_forgejo_webhook_secret {
page.show_forgejo_webhook_secret();
}
let add = page.render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(add))
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_view_site);
}

View file

@ -16,20 +16,14 @@
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use serde::{Deserialize, Serialize};
use tera::Context;
use super::get_auth_middleware;
use crate::db::Site;
use crate::errors::ServiceResult;
use crate::ctx::api::v1::auth::Login as LoginPayload;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
use super::TemplateSiteEvent;
pub use super::*;
pub const DASH_HOME: TemplateFile = TemplateFile::new("dash_home", "pages/dash/index.html");
@ -38,24 +32,6 @@ pub struct Home {
ctx: RefCell<Context>,
}
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
pub struct TemplateSite {
pub site: Site,
pub view: String,
pub last_update: Option<TemplateSiteEvent>,
}
impl TemplateSite {
pub fn new(site: Site, last_update: Option<TemplateSiteEvent>) -> Self {
let view = PAGES.dash.site.get_view(site.pub_id);
Self {
site,
last_update,
view,
}
}
}
impl CtxError for Home {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
@ -64,10 +40,10 @@ impl CtxError for Home {
}
impl Home {
pub fn new(settings: &Settings, sites: Option<&[TemplateSite]>) -> Self {
pub fn new(settings: &Settings, payload: Option<&LoginPayload>) -> Self {
let ctx = RefCell::new(context(settings));
if let Some(sites) = sites {
ctx.borrow_mut().insert(PAYLOAD_KEY, sites);
if let Some(payload) = payload {
ctx.borrow_mut().insert(PAYLOAD_KEY, payload);
}
Self { ctx }
}
@ -77,81 +53,21 @@ impl Home {
.render(DASH_HOME.name, &self.ctx.borrow())
.unwrap()
}
}
async fn get_site_data(ctx: &AppCtx, id: &Identity) -> ServiceResult<Vec<TemplateSite>> {
let db_sites = ctx.db.list_all_sites(&id.identity().unwrap()).await?;
let mut sites = Vec::with_capacity(db_sites.len());
for site in db_sites {
// TODO: impl method on DB to get latest "update" event
let last_update = ctx
.db
.get_latest_update_event(&site.hostname)
.await?
.map(|e| e.into());
sites.push(TemplateSite::new(site, last_update));
pub fn page(s: &Settings) -> String {
let p = Self::new(s, None);
p.render()
}
Ok(sites)
}
#[actix_web_codegen_const_routes::get(path = "PAGES.dash.home", wrap = "get_auth_middleware()")]
#[tracing::instrument(name = "Dashboard homepage", skip(ctx, id))]
pub async fn get_home(ctx: AppCtx, id: Identity) -> PageResult<impl Responder, Home> {
let sites = get_site_data(&ctx, &id)
.await
.map_err(|e| PageError::new(Home::new(&ctx.settings, None), e))?;
let home = Home::new(&ctx.settings, Some(&sites)).render();
#[actix_web_codegen_const_routes::get(path = "PAGES.dash.home")]
#[tracing::instrument(name = "Dashboard homepage", skip(ctx))]
pub async fn get_home(ctx: AppCtx) -> impl Responder {
let home = Home::page(&ctx.settings);
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(home))
HttpResponse::Ok().content_type(html).body(home)
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_home);
}
#[cfg(test)]
mod tests {
use actix_web::http::StatusCode;
use actix_web::test;
use crate::ctx::ArcCtx;
use crate::tests;
use crate::*;
use super::PAGES;
#[actix_rt::test]
async fn postgres_dash_home_works() {
let (_, ctx) = tests::get_ctx().await;
dashboard_home_works(ctx.clone()).await;
}
async fn dashboard_home_works(ctx: ArcCtx) {
const NAME: &str = "testdashuser";
const EMAIL: &str = "testdashuser@foo.com";
const PASSWORD: &str = "longpassword";
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let cookies = get_cookie!(signin_resp);
let app = get_app!(ctx).await;
let resp = get_request!(&app, PAGES.dash.home, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
println!("before adding site: {res}");
assert!(res.contains("Nothing to show"));
let page = ctx.add_test_site(NAME.into()).await;
let resp = get_request!(&app, PAGES.dash.home, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
println!("after adding site: {res}");
assert!(!res.contains("Nothing here"));
assert!(res.contains(&page.domain));
assert!(res.contains(&page.repo));
let _ = ctx.delete_user(NAME, PASSWORD).await;
}
}

View file

@ -15,46 +15,15 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::*;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
pub use super::get_auth_middleware;
pub use super::{context, Footer, TemplateFile, PAGES, PAYLOAD_KEY, TEMPLATES};
use crate::db::Event;
use crate::db::LibrePagesEvent;
pub mod forgejo;
pub mod home;
pub mod sites;
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct TemplateSiteEvent {
pub event_type: Event,
pub time: i64,
pub site: String,
pub id: Uuid,
}
impl From<LibrePagesEvent> for TemplateSiteEvent {
fn from(e: LibrePagesEvent) -> Self {
Self {
event_type: e.event_type,
time: e.time.unix_timestamp(),
site: e.site,
id: e.id,
}
}
}
mod home;
pub fn register_templates(t: &mut tera::Tera) {
home::DASH_HOME.register(t).expect(home::DASH_HOME.name);
sites::register_templates(t);
forgejo::register_templates(t);
}
pub fn services(cfg: &mut web::ServiceConfig) {
home::services(cfg);
sites::services(cfg);
forgejo::services(cfg);
}

View file

@ -1,230 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use serde::{Deserialize, Serialize};
use tera::Context;
use super::get_auth_middleware;
use crate::ctx::api::v1::pages::AddSite;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_SITE_ADD: TemplateFile =
TemplateFile::new("dash_site_add", "pages/dash/sites/add.html");
pub struct Add {
ctx: RefCell<Context>,
}
impl CtxError for Add {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl Add {
pub fn new(settings: &Settings) -> Self {
let ctx = RefCell::new(context(settings));
Self { ctx }
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_SITE_ADD.name, &self.ctx.borrow())
.unwrap()
}
}
#[actix_web_codegen_const_routes::get(path = "PAGES.dash.site.add", wrap = "get_auth_middleware()")]
#[tracing::instrument(name = "Dashboard add site webpage", skip(ctx))]
pub async fn get_add_site(ctx: AppCtx) -> PageResult<impl Responder, Add> {
let add = Add::new(&ctx.settings).render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(add))
}
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
/// Data required to add site
pub struct TemplateAddSite {
pub repo_url: String,
pub branch: String,
}
#[actix_web_codegen_const_routes::post(
path = "PAGES.dash.site.add",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Post Dashboard add site webpage", skip(ctx, id))]
pub async fn post_add_site(
ctx: AppCtx,
id: Identity,
payload: web::Form<TemplateAddSite>,
) -> PageResult<impl Responder, Add> {
let owner = id.identity().unwrap();
let payload = payload.into_inner();
let msg = AddSite {
branch: payload.branch,
repo_url: payload.repo_url,
owner,
};
let page = ctx
.add_site(msg)
.await
.map_err(|e| PageError::new(Add::new(&ctx.settings), e))?;
Ok(HttpResponse::Found()
.append_header((
http::header::LOCATION,
PAGES.dash.site.get_view(page.pub_id),
))
.finish())
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_add_site);
cfg.service(post_add_site);
}
#[cfg(test)]
mod tests {
use actix_web::http::StatusCode;
use actix_web::test;
use crate::ctx::api::v1::auth::Password;
use crate::ctx::ArcCtx;
use crate::errors::ServiceError;
use crate::pages::dash::sites::add::TemplateAddSite;
use crate::tests;
use crate::*;
use super::PAGES;
#[actix_rt::test]
async fn postgres_dashboard_add_site_works() {
let (_, ctx) = tests::get_ctx().await;
dashboard_add_site_works(ctx.clone()).await;
}
async fn dashboard_add_site_works(ctx: ArcCtx) {
const NAME: &str = "testdashaddsiteuser";
const EMAIL: &str = "testdashaddsiteuser@foo.com";
const PASSWORD: &str = "longpassword";
let _ = ctx.delete_user(NAME, PASSWORD).await;
let (_, signin_resp) = ctx.register_and_signin(NAME, EMAIL, PASSWORD).await;
let cookies = get_cookie!(signin_resp);
let app = get_app!(ctx.clone()).await;
let resp = get_request!(&app, PAGES.dash.site.add, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains("Add Site"));
let payload = TemplateAddSite {
repo_url: tests::REPO_URL.into(),
branch: tests::BRANCH.into(),
};
let add_site = test::call_service(
&app,
post_request!(&payload, PAGES.dash.site.add, FORM)
.cookie(cookies.clone())
.to_request(),
)
.await;
assert_eq!(add_site.status(), StatusCode::FOUND);
let mut site = ctx.db.list_all_sites(NAME).await.unwrap();
let site = site.pop().unwrap();
let mut event = ctx.db.list_all_site_events(&site.hostname).await.unwrap();
let event = event.pop().unwrap();
let headers = add_site.headers();
let view_site = &PAGES.dash.site.get_view(site.pub_id);
assert_eq!(
headers.get(actix_web::http::header::LOCATION).unwrap(),
view_site
);
// view site
let resp = get_request!(&app, view_site, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains("****"));
assert!(res.contains(&site.hostname));
assert!(res.contains(&site.repo_url));
assert!(res.contains(&site.branch));
assert!(res.contains(&event.event_type.name));
assert!(res.contains(&event.id.to_string()));
let show_deploy_secret_route = format!("{view_site}?show_deploy_secret=true");
let resp = get_request!(&app, &show_deploy_secret_route, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains(&site.site_secret));
// delete site
let delete_site = &PAGES.dash.site.get_delete(site.pub_id);
let resp = get_request!(&app, delete_site, cookies.clone());
assert_eq!(resp.status(), StatusCode::OK);
let res = String::from_utf8(test::read_body(resp).await.to_vec()).unwrap();
assert!(res.contains(&site.hostname));
let msg = Password {
password: PASSWORD.into(),
};
let resp = test::call_service(
&app,
post_request!(&msg, delete_site, FORM)
.cookie(cookies.clone())
.to_request(),
)
.await;
// delete_request!(&app, delete_site, cookies.clone(), &msg, FORM);
assert_eq!(resp.status(), StatusCode::FOUND);
let headers = resp.headers();
assert_eq!(
headers.get(actix_web::http::header::LOCATION).unwrap(),
PAGES.dash.home,
);
assert!(!utils::get_website_path(&ctx.settings, &site.hostname).exists());
assert_eq!(
ctx.db
.get_site_from_pub_id(site.pub_id, NAME.into())
.await
.err(),
Some(ServiceError::WebsiteNotFound)
);
let mut events = ctx.db.list_all_site_events(&site.hostname).await.unwrap();
let possible_delete = events.pop().unwrap();
assert_eq!(&possible_delete.event_type, &*crate::db::EVENT_TYPE_DELETE);
let _ = ctx.delete_user(NAME, PASSWORD).await;
}
}

View file

@ -1,185 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use serde::{Deserialize, Serialize};
use tera::Context;
use uuid::Uuid;
use super::get_auth_middleware;
use crate::ctx::api::v1::auth::{Login, Password};
use crate::db::Site;
use crate::pages::dash::TemplateSiteEvent;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_SITE_DELETE: TemplateFile =
TemplateFile::new("dash_site_delete", "pages/dash/sites/delete.html");
const SHOW_DEPLOY_SECRET_KEY: &str = "show_deploy_secret";
pub struct Delete {
ctx: RefCell<Context>,
}
impl CtxError for Delete {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl Delete {
pub fn new(settings: &Settings, payload: Option<TemplateSiteWithEvents>) -> Self {
let ctx = RefCell::new(context(settings));
if let Some(payload) = payload {
ctx.borrow_mut().insert(PAYLOAD_KEY, &payload);
}
Self { ctx }
}
pub fn show_deploy_secret(&mut self) {
self.ctx.borrow_mut().insert(SHOW_DEPLOY_SECRET_KEY, &true);
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_SITE_DELETE.name, &self.ctx.borrow())
.unwrap()
}
}
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
pub struct TemplateSiteWithEvents {
pub site: Site,
pub delete: String,
pub last_update: Option<TemplateSiteEvent>,
pub events: Vec<TemplateSiteEvent>,
}
impl TemplateSiteWithEvents {
pub fn new(
site: Site,
last_update: Option<TemplateSiteEvent>,
events: Vec<TemplateSiteEvent>,
) -> Self {
let delete = PAGES.dash.site.get_delete(site.pub_id);
Self {
site,
last_update,
delete,
events,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct DeleteOptions {
show_deploy_secret: Option<bool>,
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.site.delete",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Dashboard delete site webpage", skip(ctx, id))]
pub async fn get_delete_site(
ctx: AppCtx,
id: Identity,
path: web::Path<Uuid>,
query: web::Query<DeleteOptions>,
) -> PageResult<impl Responder, Delete> {
let site_id = path.into_inner();
let owner = id.identity().unwrap();
let site = ctx
.db
.get_site_from_pub_id(site_id, owner)
.await
.map_err(|e| PageError::new(Delete::new(&ctx.settings, None), e))?;
let last_update = ctx
.db
.get_latest_update_event(&site.hostname)
.await
.map_err(|e| PageError::new(Delete::new(&ctx.settings, None), e))?;
let last_update = last_update.map(|e| e.into());
let mut db_events = ctx
.db
.list_all_site_events(&site.hostname)
.await
.map_err(|e| PageError::new(Delete::new(&ctx.settings, None), e))?;
let mut events = Vec::with_capacity(db_events.len());
for e in db_events.drain(0..) {
events.push(e.into());
}
let payload = TemplateSiteWithEvents::new(site, last_update, events);
let mut page = Delete::new(&ctx.settings, Some(payload));
if let Some(true) = query.show_deploy_secret {
page.show_deploy_secret();
}
let add = page.render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(add))
}
#[actix_web_codegen_const_routes::post(
path = "PAGES.dash.site.delete",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Delete site from webpage", skip(ctx, id))]
pub async fn post_delete_site(
ctx: AppCtx,
id: Identity,
path: web::Path<Uuid>,
payload: web::Form<Password>,
) -> PageResult<impl Responder, Delete> {
let site_id = path.into_inner();
let owner = id.identity().unwrap();
let payload = payload.into_inner();
let msg = Login {
login: owner,
password: payload.password,
};
ctx.login(&msg)
.await
.map_err(|e| PageError::new(Delete::new(&ctx.settings, None), e))?;
ctx.delete_site(msg.login, site_id)
.await
.map_err(|e| PageError::new(Delete::new(&ctx.settings, None), e))?;
Ok(HttpResponse::Found()
.append_header((http::header::LOCATION, PAGES.dash.home))
.finish())
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_delete_site);
cfg.service(post_delete_site);
}

View file

@ -1,43 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::*;
use super::get_auth_middleware;
pub use super::home::TemplateSite;
pub use super::{context, Footer, TemplateFile, PAGES, PAYLOAD_KEY, TEMPLATES};
pub mod add;
pub mod delete;
pub mod view;
pub fn register_templates(t: &mut tera::Tera) {
add::DASH_SITE_ADD
.register(t)
.expect(add::DASH_SITE_ADD.name);
view::DASH_SITE_VIEW
.register(t)
.expect(view::DASH_SITE_VIEW.name);
delete::DASH_SITE_DELETE
.register(t)
.expect(delete::DASH_SITE_DELETE.name);
}
pub fn services(cfg: &mut web::ServiceConfig) {
add::services(cfg);
view::services(cfg);
delete::services(cfg);
}

View file

@ -1,154 +0,0 @@
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header::ContentType;
use serde::{Deserialize, Serialize};
use tera::Context;
use uuid::Uuid;
use super::get_auth_middleware;
use crate::db::Site;
use crate::pages::dash::TemplateSiteEvent;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::AppCtx;
pub use super::*;
pub const DASH_SITE_VIEW: TemplateFile =
TemplateFile::new("dash_site_view", "pages/dash/sites/view.html");
const SHOW_DEPLOY_SECRET_KEY: &str = "show_deploy_secret";
pub struct View {
ctx: RefCell<Context>,
}
impl CtxError for View {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl View {
pub fn new(settings: &Settings, payload: Option<TemplateSiteWithEvents>) -> Self {
let ctx = RefCell::new(context(settings));
if let Some(payload) = payload {
ctx.borrow_mut().insert(PAYLOAD_KEY, &payload);
}
Self { ctx }
}
pub fn show_deploy_secret(&mut self) {
self.ctx.borrow_mut().insert(SHOW_DEPLOY_SECRET_KEY, &true);
}
pub fn render(&self) -> String {
TEMPLATES
.render(DASH_SITE_VIEW.name, &self.ctx.borrow())
.unwrap()
}
}
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
pub struct TemplateSiteWithEvents {
pub site: Site,
pub view: String,
pub delete: String,
pub last_update: Option<TemplateSiteEvent>,
pub events: Vec<TemplateSiteEvent>,
}
impl TemplateSiteWithEvents {
pub fn new(
site: Site,
last_update: Option<TemplateSiteEvent>,
events: Vec<TemplateSiteEvent>,
) -> Self {
let view = PAGES.dash.site.get_view(site.pub_id);
let delete = PAGES.dash.site.get_delete(site.pub_id);
Self {
site,
last_update,
view,
delete,
events,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct ViewOptions {
show_deploy_secret: Option<bool>,
}
#[actix_web_codegen_const_routes::get(
path = "PAGES.dash.site.view",
wrap = "get_auth_middleware()"
)]
#[tracing::instrument(name = "Dashboard add site webpage", skip(ctx, id))]
pub async fn get_view_site(
ctx: AppCtx,
id: Identity,
path: web::Path<Uuid>,
query: web::Query<ViewOptions>,
) -> PageResult<impl Responder, View> {
let site_id = path.into_inner();
let owner = id.identity().unwrap();
let site = ctx
.db
.get_site_from_pub_id(site_id, owner)
.await
.map_err(|e| PageError::new(View::new(&ctx.settings, None), e))?;
let last_update = ctx
.db
.get_latest_update_event(&site.hostname)
.await
.map_err(|e| PageError::new(View::new(&ctx.settings, None), e))?;
let last_update = last_update.map(|e| e.into());
let mut db_events = ctx
.db
.list_all_site_events(&site.hostname)
.await
.map_err(|e| PageError::new(View::new(&ctx.settings, None), e))?;
let mut events = Vec::with_capacity(db_events.len());
for e in db_events.drain(0..) {
events.push(e.into());
}
let payload = TemplateSiteWithEvents::new(site, last_update, events);
let mut page = View::new(&ctx.settings, Some(payload));
if let Some(true) = query.show_deploy_secret {
page.show_deploy_secret();
}
let add = page.render();
let html = ContentType::html();
Ok(HttpResponse::Ok().content_type(html).body(add))
}
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_view_site);
}

View file

@ -14,6 +14,8 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::cell::RefCell;
use actix_identity::Identity;
use actix_web::http::header;
use actix_web::*;
@ -22,8 +24,10 @@ use rust_embed::RustEmbed;
use serde::*;
use tera::*;
use crate::pages::errors::*;
use crate::settings::Settings;
use crate::static_assets::ASSETS;
use crate::AppCtx;
use crate::{GIT_COMMIT_HASH, VERSION};
pub mod auth;
@ -73,6 +77,7 @@ lazy_static! {
tera.autoescape_on(vec![".html", ".sql"]);
auth::register_templates(&mut tera);
dash::register_templates(&mut tera);
HOME.register(&mut tera).expect(HOME.name);
tera
};
}
@ -99,7 +104,7 @@ pub fn context(s: &Settings) -> Context {
ctx
}
pub fn auth_ctx(_username: Option<&str>, s: &Settings) -> Context {
pub fn auth_ctx(username: Option<&str>, s: &Settings) -> Context {
let mut ctx = Context::new();
let footer = Footer::new(s);
ctx.insert("footer", &footer);
@ -130,20 +135,53 @@ impl<'a> Footer<'a> {
}
}
pub async fn home(id: &Identity) -> HttpResponse {
let location = if id.identity().is_some() {
PAGES.home
pub const HOME: TemplateFile = TemplateFile::new("home", "pages/index.html");
pub struct Home {
ctx: RefCell<Context>,
}
impl CtxError for Home {
fn with_error(&self, e: &ReadableError) -> String {
self.ctx.borrow_mut().insert(ERROR_KEY, e);
self.render()
}
}
impl Home {
pub fn new(settings: &Settings) -> Self {
let ctx = RefCell::new(context(settings));
Self { ctx }
}
pub fn render(&self) -> String {
TEMPLATES.render(HOME.name, &self.ctx.borrow()).unwrap()
}
pub fn page(s: &Settings) -> String {
let p = Self::new(s);
p.render()
}
}
#[actix_web_codegen_const_routes::get(path = "PAGES.home")]
#[tracing::instrument(name = "Dashboard homepage", skip(id, ctx))]
pub async fn home(ctx: AppCtx, id: Identity) -> impl Responder {
if id.identity().is_none() {
let home = Home::page(&ctx.settings);
let html = header::ContentType::html();
HttpResponse::Ok().content_type(html).body(home)
} else {
PAGES.dash.home
};
HttpResponse::Found()
.append_header((header::LOCATION, location))
.finish()
HttpResponse::Found()
.append_header((header::LOCATION, PAGES.dash.home))
.finish()
}
}
pub fn services(cfg: &mut web::ServiceConfig) {
dash::services(cfg);
auth::services(cfg);
dash::services(cfg);
cfg.service(home);
}
#[cfg(test)]
@ -165,8 +203,7 @@ mod tests {
auth::login::LOGIN,
auth::register::REGISTER,
errors::ERROR_TEMPLATE,
super::dash::home::DASH_HOME,
super::dash::sites::add::DASH_SITE_ADD,
HOME,
]
.iter()
{

View file

@ -16,7 +16,6 @@
*/
use actix_auth_middleware::{Authentication, GetLoginRoute};
use serde::*;
use uuid::Uuid;
/// constant [Pages](Pages) instance
pub const PAGES: Pages = Pages::new();
@ -36,7 +35,7 @@ impl Pages {
const fn new() -> Pages {
let auth = Auth::new();
let dash = Dash::new();
let home = auth.login;
let home = "/";
Pages { auth, home, dash }
}
}
@ -71,81 +70,13 @@ impl Auth {
pub struct Dash {
/// home route
pub home: &'static str,
pub site: DashSite,
pub forgejo_webhook: ForgejoWebhook,
}
impl Dash {
/// create new instance of Dash route
pub const fn new() -> Dash {
let home = "/dash";
let site = DashSite::new();
let forgejo_webhook = ForgejoWebhook::new();
Dash {
home,
site,
forgejo_webhook,
}
}
}
#[derive(Serialize)]
/// Dashboard ForgejoWebhook routes
pub struct ForgejoWebhook {
/// add forgejo webhook route
pub add: &'static str,
/// view forgejo webhook route
pub view: &'static str,
/// list forgejo webhooks route
pub list: &'static str,
}
impl ForgejoWebhook {
/// create new instance of ForgejoWebhook route
pub const fn new() -> ForgejoWebhook {
let add = "/dash/forgejo/webhook/add";
let list = "/dash/forgejo/webhook/list";
let view = "/dash/forgejo/webhook/view/{auth_token}";
ForgejoWebhook { add, view, list }
}
pub fn get_view(&self, auth_token: &str) -> String {
self.view.replace("{auth_token}", auth_token)
}
}
#[derive(Serialize)]
/// Dashboard Site routes
pub struct DashSite {
/// add site route
pub add: &'static str,
/// view site route
pub view: &'static str,
/// delete site route
pub delete: &'static str,
}
impl DashSite {
/// create new instance of DashSite route
pub const fn new() -> DashSite {
let add = "/dash/site/add";
let view = "/dash/site/view/{deployment_pub_id}";
let delete = "/dash/site/delete/{deployment_pub_id}";
DashSite { add, view, delete }
}
pub fn get_view(&self, deployment_pub_id: Uuid) -> String {
self.view.replace(
"{deployment_pub_id}",
deployment_pub_id.to_string().as_ref(),
)
}
pub fn get_delete(&self, deployment_pub_id: Uuid) -> String {
self.delete.replace(
"{deployment_pub_id}",
deployment_pub_id.to_string().as_ref(),
)
Dash { home }
}
}

View file

@ -38,9 +38,9 @@ impl<'a> Preview<'a> {
}
pub fn extract(&self, hostname: &'a str) -> Option<&'a str> {
if !hostname.contains(self.delimiter)
|| !hostname.contains(self.prefix)
|| !hostname.contains(self.base)
if !hostname.contains(&self.delimiter)
|| !hostname.contains(&self.prefix)
|| !hostname.contains(&self.base)
{
return None;
}

View file

@ -1,4 +1,3 @@
use actix_identity::Identity;
/*
* Copyright (C) 2022 Aravinth Manivannan <realaravinth@batsense.net>
*
@ -15,10 +14,9 @@ use actix_identity::Identity;
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use actix_web::{web, HttpRequest, HttpResponse, Responder};
use actix_web::{http::header::ContentType, web, HttpRequest, HttpResponse, Responder};
use crate::errors::*;
use crate::pages;
use crate::AppCtx;
pub mod routes {
@ -36,27 +34,27 @@ pub mod routes {
}
#[actix_web_codegen_const_routes::get(path = "crate::V1_API_ROUTES.serve.catch_all")]
#[tracing::instrument(name = "Serve webpages", skip(req, ctx, id))]
async fn index(req: HttpRequest, ctx: AppCtx, id: Identity) -> ServiceResult<impl Responder> {
#[tracing::instrument(name = "Serve webpages", skip(req, ctx))]
async fn index(req: HttpRequest, ctx: AppCtx) -> ServiceResult<impl Responder> {
let c = req.connection_info();
let mut host = c.host();
if host.contains(':') {
host = host.split(':').next().unwrap();
}
tracing::debug!("Current host {host}");
// serve meta page
if host == ctx.settings.server.domain || host == "localhost" {
tracing::debug!("Into home");
return Ok(pages::home(&id).await);
return Ok(HttpResponse::Ok()
.content_type(ContentType::html())
.body("Welcome to Librepages!"));
}
// serve default hostname content
if host.contains(&ctx.settings.page.base_domain) {
let extractor = crate::preview::Preview::new(&ctx);
if let Some(preview_branch) = extractor.extract(host) {
let res = if ctx.db.hostname_exists(host).await? {
let path = crate::utils::get_website_path(&ctx.settings, host);
let res = if ctx.db.hostname_exists(&host).await? {
let path = crate::utils::get_website_path(&ctx.settings, &host);
let content =
crate::git::read_preview_file(&path, preview_branch, req.uri().path())?;
let mime = if let Some(mime) = content.mime.first_raw() {
@ -77,7 +75,7 @@ async fn index(req: HttpRequest, ctx: AppCtx, id: Identity) -> ServiceResult<imp
// TODO: custom domains.
if ctx.db.hostname_exists(host).await? {
let path = crate::utils::get_website_path(&ctx.settings, host);
let path = crate::utils::get_website_path(&ctx.settings, &host);
let content = crate::git::read_file(&path, req.uri().path())?;
let mime = if let Some(mime) = content.mime.first_raw() {
mime

View file

@ -16,20 +16,22 @@
*/
use std::env;
use std::path::Path;
use std::sync::Arc;
use config::{Config, ConfigError, Environment, File};
use derive_more::Display;
#[cfg(not(test))]
use tracing::warn;
use tracing::{error, warn};
#[cfg(test)]
use std::println as warn;
use std::{println as warn, println as error};
use serde::Deserialize;
use serde::Serialize;
use url::Url;
use crate::errors::*;
use crate::page::Page;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Server {
@ -82,14 +84,6 @@ pub struct Settings {
pub source_code: String,
pub database: Database,
pub page: PageConfig,
pub conductors: Vec<Conductor>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Conductor {
pub username: String,
pub api_key: String,
pub url: Url,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -136,6 +130,15 @@ impl Settings {
Err(e) => warn!("couldn't interpret PORT: {}", e),
}
if let Ok(val) = env::var("DATABASE_URL") {
let url = Url::parse(&val).expect("couldn't parse Database URL");
s = s.set_override("database.url", url.to_string()).unwrap();
let database_type = DBType::from_url(&url).unwrap();
s = s
.set_override("database.database_type", database_type.to_string())
.unwrap();
}
let intermediate_config = s.build_cloned().unwrap();
s = s
@ -162,15 +165,6 @@ impl Settings {
)
.expect("Couldn't set database url");
if let Ok(val) = env::var("DATABASE_URL") {
let url = Url::parse(&val).expect("couldn't parse Database URL");
s = s.set_override("database.url", url.to_string()).unwrap();
let database_type = DBType::from_url(&url).unwrap();
s = s
.set_override("database.database_type", database_type.to_string())
.unwrap();
}
let settings = s.build()?.try_deserialize::<Settings>()?;
settings.check_url();
@ -184,7 +178,7 @@ impl Settings {
}
if !path.exists() {
std::fs::create_dir_all(path).unwrap();
std::fs::create_dir_all(&path).unwrap();
}
}

View file

@ -35,21 +35,6 @@ pub mod routes {
lazy_static! {
pub static ref ASSETS: Assets = Assets::new();
}
#[derive(Serialize)]
pub struct Svg {
pub eye_off: &'static str,
pub eye: &'static str,
}
impl Svg {
/// create new instance of Routes
pub fn new() -> Svg {
Svg {
eye: &static_files::assets::CSS,
eye_off: &static_files::assets::CSS,
}
}
}
#[derive(Serialize)]
/// Top-level routes data structure for V1 AP1
@ -57,7 +42,6 @@ pub mod routes {
/// Authentication routes
pub css: &'static str,
pub mobile_css: &'static str,
pub svg: Svg,
}
impl Assets {
@ -66,7 +50,6 @@ pub mod routes {
Assets {
css: &static_files::assets::CSS,
mobile_css: &static_files::assets::CSS,
svg: Svg::new(),
}
}
}

View file

@ -29,9 +29,6 @@ pub mod assets {
lazy_static! {
pub static ref CSS: &'static str = FILES.get("./static/cache/css/main.css").unwrap();
pub static ref EYE: &'static str = FILES.get("./static/cache/img/svg/eye.svg").unwrap();
pub static ref EYE_OFF: &'static str =
FILES.get("./static/cache/img/svg/eye-off.svg").unwrap();
pub static ref MOBILE_CSS: &'static str =
FILES.get("./static/cache/css/mobile.css").unwrap();
}

View file

@ -109,18 +109,6 @@ macro_rules! delete_request {
)
.await
};
($app:expr, $route:expr, $cookies:expr, $serializable:expr, FORM) => {
test::call_service(
&$app,
test::TestRequest::delete()
.uri($route)
.set_form($serializable)
.cookie($cookies)
.to_request(),
)
.await
};
}
#[macro_export]

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="52" height="52" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" id="icon" class="feather feather-eye-off"><path d="M17.94 17.94A10.07 10.07 0 0 1 12 20c-7 0-11-8-11-8a18.45 18.45 0 0 1 5.06-5.94M9.9 4.24A9.12 9.12 0 0 1 12 4c7 0 11 8 11 8a18.5 18.5 0 0 1-2.16 3.19m-6.72-1.07a3 3 0 1 1-4.24-4.24"></path><line x1="1" y1="1" x2="23" y2="23"></line></svg>

Before

Width:  |  Height:  |  Size: 470 B

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="52" height="52" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" id="icon" class="feather feather-eye"><path d="M1 12s4-8 11-8 11 8 11 8-4 8-11 8-11-8-11-8z"></path><circle cx="12" cy="12" r="3"></circle></svg>

Before

Width:  |  Height:  |  Size: 326 B

View file

@ -4,9 +4,10 @@
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="{{ assets.css }}" />
<title>{% block title %} {% endblock %} | LibrePages</title>
<title>LibrePages</title>
<title>{% block title %} {% endblock %}</title>
</head>
<body class="default-body">
<body>
<header>{% block nav %} {% endblock %}</header>
{% block main %} {% endblock %}
{% include "footer" %}

View file

@ -2,7 +2,7 @@
<input type="checkbox" class="nav__toggle" id="nav__toggle" />
<div class="nav__header">
<a class="nav__logo-container" href="{{page.dash.home}}">
<a class="nav__logo-container" href="/">
<p class="nav__home-btn">LibrePages</p>
</a>
<label class="nav__hamburger-menu" for="nav__toggle">
@ -14,13 +14,13 @@
<div class="nav__link-group">
<div class="nav__link-container">
<a class="nav__link" rel="noreferrer" href="{{ page.dash.site.add }}">New Site</a>
<a class="nav__link" rel="noreferrer" href="{{ page.gist.new }}">New Paste</a>
</div>
<div class="nav__link-container">
<a class="nav__link" rel="noreferrer" href="{{ page.dash.forgejo_webhook.list }}">Webhooks</a>
</div>
{% if loggedin_user %}
<div class="nav__link-container">
<a class="nav__link" rel="noreferrer" href="{{ loggedin_user }}">Profile</a>
</div>
{% endif %}
<div class="nav__link-container">
<a class="nav__link" rel="noreferrer" href="{{ page.auth.logout }}">Log out</a>
</div>

View file

@ -2,7 +2,7 @@
<input type="checkbox" class="nav__toggle" id="nav__toggle" />
<div class="nav__header">
<a class="nav__logo-container" href="{{ page.dash.home }}">
<a class="nav__logo-container" href="/">
<p class="nav__home-btn">LibrePages</p>
</a>
<label class="nav__hamburger-menu" for="nav__toggle">

View file

@ -1,5 +1,5 @@
@mixin fullscreen {
height: 100vh;
min-height: 500px;
// max-height: 800px;
max-height: 800px;
}

View file

@ -1,14 +1,5 @@
@import "defaults.scss";
@import "pages/auth/sass/main.scss";
@import "pages/auth/sass/form/main.scss";
@import "pages/dash/main.scss";
@import "pages/dash/sites/main.scss";
@import "components/sass/footer/main.scss";
@import "components/nav/sass/main.scss";
.default-body {
display: flex;
@include fullscreen;
flex-direction: column;
justify-content: space-between;
}

View file

@ -1,23 +0,0 @@
{% extends 'base' %}{% block title %} Add Forgejo Webhook{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="add-site__container">
<form class="auth-form" action="{{ page.dash.forgejo_webhook.add }}" method="POST">
<label class="auth-form__label" for="forgejo_url">
Forgejo instance URL
<input
type="url"
name="forgejo_url"
id="forgejo_url"
class="auth-form__input"
/>
</label>
<div class="auth-form__action-container">
<button class="auth-form__submit" type="submit">Add Webhook</button>
</div>
</form>
</div>
</main>
{% endblock main %}

View file

@ -1,33 +0,0 @@
{% extends 'base' %}{% block title %} Forgejo Webhooks{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="sites__collection">
<div class="sites__actions">
<a class="sites__actions__new-site" href="{{ page.dash.forgejo_webhook.add }}">
<button>Add New Forgejo Webhook</button>
</a>
</div>
{% if payload|length > 0 %}
{% for hook in payload %}
<a href="{{ hook.view }}" class="site__container">
<div class="site__info--head">
<img
class="site__container--preview"
src="{{ hook.webhook.forgejo_url }}/favicon.ico"
alt="Webhook icon"
/>
<div class="site__info--column">
<p><b>{{ hook.webhook.forgejo_url }}</b></p>
<p>Forgejo instance {{ hook.webhook.forgejo_url }}</p>
</div>
</div>
</a>
{% endfor %}
{% else %}
<p class="sites__banner">Nothing to show, click <a href="{{page.dash.forgejo_webhook.add}}">here</a> to add new Forgejo webhook!</p>
{% endif %}
</div>
</main>
{% endblock main %}

View file

@ -1,32 +0,0 @@
{% extends 'base' %}{% block title %} {{ payload.webhook.forgejo_url }}{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="add-site__container">
<section>
<table>
<tr>
<th>Webhook URL</th>
<td>{{ payload.url }}</td>
</tr>
<tr>
<th>Secret</th>
<td>
{% if show_forgejo_webhook_secret %}
{{ payload.webhook.forgejo_webhook_secret }} <a href="{{ payload.view }}">Hide</a>
{% else %}
****
<a href="{{ payload.view }}?show_forgejo_webhook_secret=true">
Show
</a>
{% endif %}
</td>
</tr>
</table>
</section>
</div>
</main>
{% endblock main %}

View file

@ -1,38 +1,160 @@
{% extends 'base' %}{% block title %} Dashboard{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="sites__collection">
<div class="sites__actions">
<a class="sites__actions__new-site" href="{{ page.dash.site.add }}">
<button>Add New Site</button>
</a>
</div>
{% if payload|length > 0 %}
{% for deployment in payload %}
<a href="{{ deployment.view }}" class="site__container">
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="{{ assets.css }}" />
<title>LibrePages</title>
</head>
<body class="auth__body">
<header>
<nav>
<p>LibrePages</p>
<span class="nav__spacer"></span>
<ul class="nav__links">
<li class="nav__item">Help</li>
<li class="nav__item">Settings</li>
<li class="nav__item">Logout</li>
</ul>
</nav>
</header>
<main>
<div class="sites__collection">
<div class="sites__actions">
<a class="sites__actions__new-site" href="/add/new/site">
<button>Add new site</button>
</a>
</div>
<a href="/sites/mcaptcha.org" class="site__container">
<div class="site__info--head">
<img
class="site__container--preview"
src="{{ deployment.site.hostname }}/favicon.ico"
alt="Website icon"
src="https://mcaptcha.org/favicon.ico"
/>
<div class="site__info--column">
<p><b>{{ deployment.site.hostname }}</b></p>
<p>Deploys from {{ deployment.site.repo_url }}</p>
<p href="https://mcaptcha.org"><b>mcaptcha.org</b></p>
<p>Deploys from {{ source_url }}</p>
</div>
</div>
{% if deployment.last_update %}
<div class="site__info--tail">
<p>Last update {{ deployment.last_update.time }}</p>
</div>
{% endif %}
</a>
{% endfor %}
{% else %}
<p class="sites__banner">Nothing to show, click <a href="{{page.dash.site.add}}">here</a> to get started!</p>
{% endif %}
</div>
</main>
<div class="site__info--tail">
<p>Last update {{ last_update }}</p>
</div></a
>
</div>
</main>
{% include "footer" %}
</body>
{% endblock main %}
<style>
header {
width: 100%;
}
nav {
width: 100%;
margin: auto;
display: flex;
}
.nav__spacer {
flex: 4;
}
.nav__links {
display: flex;
list-style: none;
}
.nav__item {
margin: 0 20px;
}
body {
display: flex;
flex-direction: column;
align-items: center;
}
main {
width: 100%;
margin: 20px;
}
.sites__collection {
margin: auto;
width: 70%;
border: 1px solid #e8ebed;
border-radius: 8px;
}
.sites__actions {
width: 100%;
height: 50px;
display: flex;
flex-direction: row-reverse;
align-items: center;
box-sizing: border-box;
padding: 0px 20px;
margin: 10px 0;
}
.sites__actions__new-site {
min-height: 36px;
background: green;
display: flex;
align-items: center;
padding: 0px 8px;
}
.sites__actions__new-site > button {
margin: 0;
padding: 0;
height: 100%;
border: none;
width: 100%;
color: white;
background: none;
}
.site__container {
box-sizing: border-box;
margin: 10px 0;
width: 100%;
display: flex;
justify-content: space-between;
padding: 10px 20px;
align-items: center;
}
.site__container:hover {
background: #f7f8f8;
}
.site__info--head {
display: flex;
align-items: center;
}
.site__info--column {
margin-left: 20px;
}
.site__info--column > p,
.site__info--column > a {
margin: 0;
padding: 0;
}
.site__container:visited,
.site__container {
color: black;
text-decoration: none;
}
.site__container--preview {
width: 50px;
height: 50px;
border-radius: 50%;
}
</style>
</html>

View file

@ -1,85 +0,0 @@
.sites__main {
width: 100%;
margin: 20px;
}
.sites__collection {
margin: auto;
width: 70%;
border: 1px solid #e8ebed;
border-radius: 8px;
}
.sites__actions {
width: 100%;
height: 50px;
display: flex;
flex-direction: row-reverse;
align-items: center;
box-sizing: border-box;
padding: 0px 20px;
margin: 10px 0;
}
.sites__actions__new-site {
min-height: 36px;
background: green;
display: flex;
align-items: center;
padding: 0px 8px;
}
.sites__actions__new-site > button {
margin: 0;
padding: 0;
height: 100%;
border: none;
width: 100%;
color: white;
background: none;
}
.site__container {
box-sizing: border-box;
margin: 10px 0;
width: 100%;
display: flex;
justify-content: space-between;
padding: 10px 20px;
align-items: center;
}
.site__container:hover {
background: #f7f8f8;
}
.site__info--head {
display: flex;
align-items: center;
}
.site__info--column {
margin-left: 20px;
}
.site__info--column > p,
.site__info--column > a {
margin: 0;
padding: 0;
}
.site__container:visited,
.site__container {
color: black;
text-decoration: none;
}
.site__container--preview {
width: 50px;
height: 50px;
border-radius: 50%;
}
.sites__banner {
text-align: center;
}

View file

@ -1,29 +0,0 @@
{% extends 'base' %}{% block title %} Add Site{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="add-site__container">
<form class="auth-form" action="{{ page.dash.site.add }}" method="POST">
<label class="auth-form__label" for="repo_url">
Repository URL
<input
type="url"
name="repo_url"
id="repo_url"
class="auth-form__input"
/>
</label>
<label class="auth-form__label" for="branch">
Deployment branch
<input type="text" name="branch" id="branch" class="auth-form__input" />
</label>
<div class="auth-form__action-container">
<button class="auth-form__submit" type="submit">Add Site</button>
</div>
</form>
</div>
</main>
{% endblock main %}

View file

@ -1,37 +0,0 @@
{% extends 'base' %}{% block title %} Delete {{ payload.site.hostname }}{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="add-site__container">
<form class="auth-form" action="{{ payload.delete }}" method="post">
<h1>Confirm access</h1>
<p>
Please confirm access to your account to delete deployment at
<a href="http://{{ payload.site.hostname }}">
{{ payload.site.hostname }}
</a>
</p>
<label class="auth-form__label" for="password">
Password
<input
class="auth-form__input"
name="password"
required
id="password"
type="password"
{% if payload.password %}
value={{ payload.password }}
{% endif %}
/>
</label>
<div class="auth-form__action-container">
<button class="auth-form__submit--danger" type="submit">Delete</button>
</div>
</form>
</div>
</main>
{% endblock main %}

View file

@ -1,4 +0,0 @@
.add-site__container {
max-width: 50%;
margin: auto;
}

View file

@ -1,72 +0,0 @@
{% extends 'base' %}{% block title %} {{ payload.site.hostname }}{% endblock title %} {% block nav
%} {% include "auth_nav" %} {% endblock nav %} {% block main %}
<main class="sites__main">
<div class="add-site__container">
<section>
<table>
<tr>
<th>Hostname</th>
<td>{{ payload.site.hostname }}</td>
</tr>
<tr>
<th>Repository</th>
<td>{{ payload.site.repo_url }}</td>
</tr>
<tr>
<th>Secret</th>
<td>
{% if show_deploy_secret %}
{{ payload.site.site_secret }} <a href="{{ payload.view }}">Hide</a>
{% else %}
****
<a href="{{ payload.view }}?show_deploy_secret=true">
Show
</a>
{% endif %}
</td>
</tr>
<tr>
<th>Branch</th>
<td>{{ payload.site.branch }}</td>
</tr>
<tr>
<th>Last Updated</th>
{% if payload.last_updated %}
<td>{{ payload.last_updated.time }}</td>
{% else %}
<td>N/A</td>
{% endif %}
</tr>
</table>
<a href="{{ payload.delete }}"><button>Delete</button></a>
</section>
<section>
<h2>Events</h2>
<table>
<thead>
<tr>
<th>ID</th>
<th>Event Type</th>
<th>Time</th>
</tr>
</thead>
<tbody>
{% for event in payload.events %}
<tr>
<td>{{ event.id }}</td>
<td>{{ event.event_type.name }}</td>
<td>{{ event.time }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</section>
</div>
</main>
{% endblock main %}

138
templates/pages/index.html Normal file
View file

@ -0,0 +1,138 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="{{ assets.css }}" />
<title>LibrePages</title>
</head>
<body class="auth__body">
<header>
<nav>
<p>LibrePages</p>
<span class="nav__spacer"></span>
<ul class="nav__links">
<li class="nav__item">Help</li>
</ul>
</nav>
</header>
<main>
<h1>LibrePages: FOSS static site hosting</h1>
<p>Welcome to LibrePages. Homepage.</p>
</main>
{% include "footer" %}
</body>
<style>
header {
width: 100%;
}
nav {
width: 100%;
margin: auto;
display: flex;
}
.nav__spacer {
flex: 4;
}
.nav__links {
display: flex;
list-style: none;
}
.nav__item {
margin: 0 20px;
}
body {
display: flex;
flex-direction: column;
align-items: center;
}
main {
width: 100%;
margin: 20px;
}
.sites__collection {
margin: auto;
width: 70%;
border: 1px solid #e8ebed;
border-radius: 8px;
}
.sites__actions {
width: 100%;
height: 50px;
display: flex;
flex-direction: row-reverse;
align-items: center;
box-sizing: border-box;
padding: 0px 20px;
margin: 10px 0;
}
.sites__actions__new-site {
min-height: 36px;
background: green;
display: flex;
align-items: center;
padding: 0px 8px;
}
.sites__actions__new-site > button {
margin: 0;
padding: 0;
height: 100%;
border: none;
width: 100%;
color: white;
background: none;
}
.site__container {
box-sizing: border-box;
margin: 10px 0;
width: 100%;
display: flex;
justify-content: space-between;
padding: 10px 20px;
align-items: center;
}
.site__container:hover {
background: #f7f8f8;
}
.site__info--head {
display: flex;
align-items: center;
}
.site__info--column {
margin-left: 20px;
}
.site__info--column > p,
.site__info--column > a {
margin: 0;
padding: 0;
}
.site__container:visited,
.site__container {
color: black;
text-decoration: none;
}
.site__container--preview {
width: 50px;
height: 50px;
border-radius: 50%;
}
</style>
</html>

View file

@ -1,18 +0,0 @@
domains = [
"example.org",
"example.com",
]
redirects = [
{from = "/from1", to = "/to1"},
{from = "/from2", to = "/to2"},
]
[source]
production_branch = "librepages"
staging = "beta"
[forms]
enable = true
[image_compression]
enable = true

View file

@ -167,15 +167,6 @@ version = "0.2.125"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b"
[[package]]
name = "librepages"
version = "0.1.0"
dependencies = [
"cache-buster",
"serde",
"serde_json",
]
[[package]]
name = "mime"
version = "0.3.16"
@ -192,6 +183,15 @@ dependencies = [
"unicase",
]
[[package]]
name = "pages"
version = "0.1.0"
dependencies = [
"cache-buster",
"serde",
"serde_json",
]
[[package]]
name = "proc-macro2"
version = "1.0.38"

View file

@ -1,11 +1,11 @@
[package]
name = "librepages"
name = "pages"
version = "0.1.0"
edition = "2021"
homepage = "https://git.batsense.net/LibrePages/librepages"
repository = "https://git.batsense.net/LibrePages/librepages"
documentation = "https://github.con/realaravinth/librepages"
readme = "https://git.batsense.net/LibrePages/librepages/blob/master/README.md"
homepage = "https://github.com/realaravinth/pages"
repository = "https://github.com/realaravinth/pages"
documentation = "https://github.con/realaravinth/pages"
readme = "https://github.com/realaravinth/pages/blob/master/README.md"
license = "AGPLv3 or later version"
authors = ["realaravinth <realaravinth@batsense.net>"]