Compare commits

..

1 commit

Author SHA1 Message Date
Aravinth Manivannan 77d4720e7d
feat: actix-web and tokio_tungstenite based webrtc impl
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-19 17:43:34 +05:30
107 changed files with 2173 additions and 9886 deletions

1
.gitignore vendored
View file

@ -156,4 +156,3 @@ keys
htmlcov/
tmp/
static/
nohup.out

View file

@ -2,26 +2,11 @@ steps:
backend:
image: rust
commands:
- apt update
- apt-get install -y --no-install-recommends protobuf-compiler
- cargo build
- cargo test --lib
# - make migrate
# - make
# - make release
# - make test // requires Docker-in-Docker
integration_tests:
image: python
commands:
- pip install virtualenv && virtualenv venv
- . venv/bin/activate && pip install -r requirements.txt
- nohup ./target/debug/main --id 1 --http-addr 127.0.0.1:9001 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
- sleep 1
- nohup ./target/debug/main --id 2 --http-addr 127.0.0.1:9002 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
- sleep 1
- nohup ./target/debug/main --id 3 --http-addr 127.0.0.1:9003 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
- mv dcache_py/ tests/
- . venv/bin/activate && python tests/test.py
build_docker_img:
image: plugins/docker

1926
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,6 @@
name = "dcache"
version = "0.1.0"
edition = "2021"
build = "build.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,40 +12,38 @@ openraft = { version = "0.8.8", features = ["serde", "single-term-leader"]}
#libmcaptcha = { path="/src/atm/code/mcaptcha/libmcaptcha", features=["full"] }
libmcaptcha = { git = "https://github.com/mcaptcha/libmcaptcha", branch = "feat-dcache", features = ["full"]}
tracing = { version = "0.1.37", features = ["log"] }
serde_json = "1"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0.96"
serde = { version = "1.0.163", features = ["derive"] }
byteorder = "1.4.3"
actix-web = "4"
actix-web-httpauth = "0.8.0"
futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
lazy_static = "1.4.0"
pretty_env_logger = "0.4.0"
uuid = { version = "1", features = ["v4"] }
actix-web-codegen-const-routes = { version = "0.1.0", tag = "0.1.0", git = "https://github.com/realaravinth/actix-web-codegen-const-routes" }
derive_builder = "0.11.2"
config = { version = "0.11", features = ["toml"] }
derive_more = "0.99.17"
url = { version = "2.2.2", features = ["serde"]}
async-trait = "0.1.36"
clap = { version = "4.1.11", features = ["derive", "env"] }
tokio = { version = "1.0", default-features = false, features = ["sync", "macros", "rt-multi-thread", "time"] }
reqwest = { version = "0.11.9", features = ["json"] }
tokio = { version = "1.0", default-features = false, features = ["sync"] }
tracing-subscriber = { version = "0.3.0", features = ["env-filter"] }
actix = "0.13.0"
tonic = { version = "0.10.2", features = ["transport", "channel"] }
prost = "0.12.3"
tokio-stream = "0.1.14"
async-stream = "0.3.5"
actix-rt = "2.9.0"
futures = "0.3.30"
tower-service = "0.3.2"
dashmap = { version = "5.5.3", features = ["serde"] }
actix-web-actors = "4.2.0"
awc = "3.2.0"
futures = "0.3.29"
actix-codec = "0.5.1"
tokio-tungstenite = "0.21.0"
[build-dependencies]
serde_json = "1"
tonic-build = "0.10.2"
[dev-dependencies]
actix-rt = "2.7.0"
base64 = "0.13.0"
anyhow = "1.0.63"
maplit = "1.0.2"
#[profile.release]
#debug = true

View file

@ -4,7 +4,6 @@
FROM rust:latest as rust
WORKDIR /src
RUN apt update && apt-get install -y --no-install-recommends protobuf-compiler
COPY . .
RUN cargo build --release

View file

@ -1,6 +0,0 @@
python.gen:
. venv/bin/activate && cd dcache_py
python -m grpc_tools.protoc \
-I=./proto/dcache/ --python_out=dcache_py/ \
--pyi_out=dcache_py/ \
--grpc_python_out=dcache_py/ ./proto/dcache/dcache.proto

View file

@ -1,44 +0,0 @@
[![status-badge](https://ci.batsense.net/api/badges/105/status.svg)](https://ci.batsense.net/repos/105)
---
# dcache: Distributed, Highly Available cache implementation for mCaptcha
## Overview
- Uses Raft consensus algorithm via [openraft](https://crates.io/crates/openraft)
- GRPC via [tonic](https://crates.io/crates/tonic)
## Tips
We recommend running at least three instances of dcache in your
deployment.
**NOTE: Catastrophic failure will occur when n/2 + 1 instances are
down.**
## Usage
## Firewall configuration
dcache uses a single, configurable port for both server-to-server and client-to-server
communications. Please open that port on your server.
## Launch
```bash
dcache --id 1 \
--http-addr 127.0.0.1:9001 \
--introducer-addr 127.0.0.1:9001 \
--introducer-id 1 \
--cluster-size 3
```
### Options
| Name | Purpose |
| ----------------- | ----------------------------------------------------------- |
| --id | Unique integer to identify node in network |
| --http-addr | Socket address to bind and listen for connections |
| --introducer-addr | Socket address of introducer node; required to join network |
| --intdocuer-id | ID of the introducer node; required to join network |
| --cluster-size | Total size of the cluster |

View file

@ -1,64 +0,0 @@
import time
from typing import Any, Callable
import grpc
import grpc.experimental.gevent as grpc_gevent
from grpc_interceptor import ClientInterceptor
from locust import User
from locust.exception import LocustError
# patch grpc so that it uses gevent instead of asyncio
grpc_gevent.init_gevent()
class LocustInterceptor(ClientInterceptor):
def __init__(self, environment, *args, **kwargs):
super().__init__(*args, **kwargs)
self.env = environment
def intercept(
self,
method: Callable,
request_or_iterator: Any,
call_details: grpc.ClientCallDetails,
):
response = None
exception = None
start_perf_counter = time.perf_counter()
response_length = 0
try:
response = method(request_or_iterator, call_details)
response_length = response.result().ByteSize()
except grpc.RpcError as e:
exception = e
self.env.events.request.fire(
request_type="grpc",
name=call_details.method,
response_time=(time.perf_counter() - start_perf_counter) * 1000,
response_length=response_length,
response=response,
context=None,
exception=exception,
)
return response
class GrpcUser(User):
abstract = True
stub_class = None
def __init__(self, environment):
super().__init__(environment)
for attr_value, attr_name in (
(self.host, "host"),
(self.stub_class, "stub_class"),
):
if attr_value is None:
raise LocustError(f"You must specify the {attr_name}.")
self._channel = grpc.insecure_channel(self.host)
interceptor = LocustInterceptor(environment=environment)
self._channel = grpc.intercept_channel(self._channel, interceptor)
self.stub = self.stub_class(self._channel)

View file

@ -1,69 +1,95 @@
import json
import time
import grpc
import gevent
from pprint import pprint
from locust import FastHttpUser, between, task, events
from locust import FastHttpUser, between, task
from dcache_py import dcache_pb2 as dcache
from dcache_py.dcache_pb2 import RaftRequest
from dcache_py.dcache_pb2_grpc import DcacheServiceStub
import adaptor
host = "localhost:9001"
captcha_id = "locust"
password = "fooobarasdfasdf"
username = "realaravinth"
def add_captcha(stub: DcacheServiceStub, captcha_id: str):
msg = dcache.AddCaptchaRequest(
id=captcha_id,
mcaptcha=dcache.MCaptcha(
duration=30,
defense=dcache.Defense(
levels=[
dcache.Level(visitor_threshold=50, difficulty_factor=500),
dcache.Level(visitor_threshold=5000, difficulty_factor=50000),
]
),
),
)
resp = stub.AddCaptcha(msg)
pprint(f"Captcha added {captcha_id}: {resp}")
class Unprotected(FastHttpUser):
# wait_time = between(5, 15)
peers = [
"http://localhost:9001",
"http://localhost:9002",
"http://localhost:9003",
]
leader = "http://localhost:9001"
host = leader
captcha_id="locust"
# pipeline_vote = []
# for _ in range(0,100):
# pipeline_vote.append({"AddVisitor": captcha_id})
# def on_start(self):
# resp = self.client.get(f"{self.leader}/metrics")
# data = resp.json()
# leader = data["Ok"]["membership_config"]["log_id"]["leader_id"]["node_id"]
# self.leader = self.peers[leader - 1]
# self.host = self.leader
# print(f"Leader: {self.host}")
# self.add_captcha(captcha_id="locust")
with grpc.insecure_channel(host) as channel:
stub = DcacheServiceStub(channel)
add_captcha(stub=stub, captcha_id=captcha_id)
def write(self, data):
resp = self.client.post(f"{self.host}/write", json=data)
# print(f"RPC Status: {resp.status_code}")
resp = resp.json()
if "Err" in resp:
leader = resp["Err"]["APIError"]["ForwardToLeader"]["leader_node"]["addr"]
print(f"Forwarding write to leader {leader}")
return write(leader, data)
return resp["Ok"]["data"]
def pipeline_write(self, data):
resp = self.client.post(f"{self.host}/pipeline/write", json=data)
# print(f"RPC Status: {resp.status_code}")
resp = resp.json()
if "Err" in resp:
leader = resp["Err"]["APIError"]["ForwardToLeader"]["leader_node"]["addr"]
print(f"Forwarding write to leader {leader}")
return write(leader, data)
return resp
pipeline_msgs = []
for _ in range(0,10):
pipeline_msgs.append(dcache.DcacheRequest(addVisitor=dcache.CaptchaID(id=captcha_id)))
pipeline_msgs = dcache.DcacheBatchRequest(requests=pipeline_msgs)
#def pipeline_generate_messages():
# for msg in pipeline_msgs:
# yield msg
class HelloGrpcUser(adaptor.GrpcUser):
stub_class = DcacheServiceStub
host = host
captcha_id = captcha_id
msg = dcache.CaptchaID(id=captcha_id)
def add_vote(self, captcha_id: str):
resp = self.stub.AddVisitor(self.msg)
resp = self.write(data={"AddVisitor": captcha_id})
# pprint(resp)
def add_vote_pipeline(self):
res = self.stub.PipelineDcacheOps(pipeline_msgs)
def add_vote_pipeline(self, captcha_id: str):
resp = self.pipeline_write(data=self.pipeline_vote)
# pprint(resp)
def add_captcha(self, captcha_id: str):
params = {
"AddCaptcha": {
"id": captcha_id,
"mcaptcha": {
"visitor_threshold": 0,
"defense": {
"levels": [
{"visitor_threshold": 50, "difficulty_factor": 500},
{"visitor_threshold": 5000, "difficulty_factor": 50000},
],
"current_visitor_threshold": 0,
},
"duration": 30,
},
}
}
resp = self.write(data=params)
pprint(f"Captcha added {captcha_id}: {resp}")
# @task
# def addVote(self):
# self.add_vote(self.captcha_id)
@task
def addVotePipeline(self):
self.add_vote_pipeline()
def unprotected(self):
#self.add_vote_pipeline(captcha_id=self.captcha_id)
self.add_vote(captcha_id="locust")
# data = {
# "username": username,
# "password": username,
# "confirm_password": username,
# }
# self.client.post("/unprotected", data=data)

View file

@ -1,212 +0,0 @@
# Benchmark Report
Benchmarks were run at various stages of development to keep track of
performance. Tech stacks were changed and the implementation optimized
to increase throughput. This report summarizes the findings of the
benchmarks
Ultimately, we were able to identify a bottleneck that was previously
hidden in mCaptcha (hidden because a different bottleneck like DB access
eclipsed it :p) [and were able to increase performance of the critical
path by ~147 times](https://git.batsense.net/mCaptcha/dcache/pulls/3)
through a trivial optimization.
## Environment
These benchmarks were run on a noisy development laptop and should be
used for guidance only.
- CPU: AMD Ryzen 5 5600U with Radeon Graphics (12) @ 4.289GHz
- Memory: 22849MiB
- OS: Arch Linux x86_64
- Kernel: 6.6.7-arch1-1
- rustc: 1.73.0 (cc66ad468 2023-10-03)
## Baseline: Tech stack version 1
Actix Web based networking with JSON for message format. Was chosen for
prototyping, and was later used to set a baseline.
## Without connection pooling in server-to-server communications
### Single requests (no batching)
<details>
<summary>Peak throughput observed was 1117 request/second (please click
to see charts)</summary>
#### Total number of requests vs time
![number of requests](./v1/nopooling/nopipelining/total_requests_per_second_1703969194.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/nopooling/nopipelining/response_times_(ms)_1703969194.png>)
#### Number of concurrent users vs time
![number of concurrent
users](./v1/nopooling/nopipelining/number_of_users_1703969194.png)
</details>
### Batched requests
<details>
<summary>
Each network request contained 1,000 application requests, so peak throughput observed was 1,800 request/second.
Please click to see charts</summary>
#### Total number of requests vs time
![number of requests](./v1/pooling/pipelining/total_requests_per_second_1703968582.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/pooling/pipelining/response_times_(ms)_1703968582.png>))
#### Number of concurrent users vs time
![number of concurrent
users](./v1/pooling/pipelining/number_of_users_1703968582.png)
</details>
## With connection pooling in server-to-server communications
### Single requests (no batching)
<details>
<summary>
Peak throughput observed was 3904 request/second. Please click to see
charts</summary>
#### Total number of requests vs time
![number of requests](./v1/pooling/nopipelining/total_requests_per_second_1703968214.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/pooling/nopipelining/response_times_(ms)_1703968215.png>)
#### Number of concurrent users vs time
![number of concurrent
users](./v1/pooling/nopipelining/number_of_users_1703968215.png)
</details>
### Batched requests
<details>
<summary>
Each network request contained 1,000 application requests, so peak throughput observed was 15,800 request/second.
Please click to see charts.
</summary>
#### Total number of requests vs time
![number of requests](./v1/pooling/pipelining/total_requests_per_second_1703968582.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/pooling/pipelining/response_times_(ms)_1703968582.png>))
#### Number of concurrent users vs time
![number of concurrent
users](./v1/pooling/pipelining/number_of_users_1703968582.png)
</details>
## Tech stack version 2
Tonic for the network stack and GRPC for wire format. We ran over a
dozen benchmarks with this tech stack. The trend was similar to the ones
observed above: throughput was higher when connection pool was used and
even higher when requests were batched. _But_ the throughput of all of these benchmarks were lower than the
baseline benchmarks!
The CPU was busier. We put it through
[flamgragh](https://github.com/flamegraph-rs/flamegraph) and hit it with
the same test suite to identify compute-heavy areas. The result was
unexpected:
![flamegraph indicating libmcaptcha being
slow](./v2/libmcaptcha-bottleneck/problem/flamegraph.svg)
libmCaptcha's [AddVisitor
handler](https://github.com/mCaptcha/libmcaptcha/blob/e3f456f35b2c9e55e0475b01b3e05d48b21fd51f/src/master/embedded/counter.rs#L124)
was taking up 59% of CPU time of the entire test run. This is a very
critical part of the variable difficulty factor PoW algorithm that
mCaptcha uses. We never ran into this bottleneck before because in other
cache implementations, it was always preceded with a database request.
It surfaced here as we are using in-memory data sources in dcache.
libmCaptcha uses an actor-based approach with message passing for clean
concurrent state management. Message passing is generally faster in most
cases, but in our case, sharing memory using CPU's concurrent primitives
turned out to be significantly faster:
![flamegraph indicating libmcaptcha being
slow](./v2/libmcaptcha-bottleneck/solution/flamegraph.svg)
CPU time was reduced from 59% to 0.4%, roughly by one 147 times!
With this fix in place:
### Connection pooled server-to-server communications, single requests (no batching)
Peak throughput observed was 4816 request/second, ~1000 requests/second
more than baseline.
#### Total number of requests vs time
![number of requests](./v2/grpc-conn-pool-post-bottleneck/single/total_requests_per_second_1703970940.png)
#### Response times(ms) vs time
![repsonse times(ms)](./v2/grpc-conn-pool-post-bottleneck/single/response_times_(ms)_1703970940.png)
#### Number of concurrent users vs time
![number of concurrent
users](./v2/grpc-conn-pool-post-bottleneck/single/number_of_users_1703970940.png)
### Connection pooled server-to-server communications, batched requests
Each network request contained 1,000 application requests, so peak throughput observed was 95,700 request/second. This six times higher than baseline.
Please click to see charts.
#### Total number of requests vs time
![number of requests](./v2/grpc-conn-pool-post-bottleneck/pipeline/total_requests_per_second_1703971082.png)
#### Response times(ms) vs time
![repsonse times(ms)](./v2/grpc-conn-pool-post-bottleneck/pipeline/response_times_(ms)_1703971082.png)
#### Number of concurrent users vs time
![number of concurrent
users](./v2/grpc-conn-pool-post-bottleneck/pipeline/number_of_users_1703971082.png)
</details>

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info">
<p>During: <span class="l10n datetime">2023-12-26 13:03:21</span> - <span class="l10n datetime">2023-12-26 13:03:59</span></p>
<p>During: <span class="l10n datetime">2023-12-17 14:30:25</span> - <span class="l10n datetime">2023-12-17 14:32:43</span></p>
<p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p>
</div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/write</td>
<td>148740</td>
<td>348265</td>
<td>0</td>
<td>7</td>
<td>133</td>
<td>0</td>
<td>485</td>
<td>544</td>
<td>98</td>
<td>3904.8</td>
<td>2517.2</td>
<td>0.0</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>148740</td>
<td>348265</td>
<td>0</td>
<td>7</td>
<td>133</td>
<td>0</td>
<td>485</td>
<td>544</td>
<td>98</td>
<td>3904.8</td>
<td>2517.2</td>
<td>0.0</td>
</tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/write</td>
<td>6</td>
<td>7</td>
<td>9</td>
<td>10</td>
<td>12</td>
<td>15</td>
<td>25</td>
<td>480</td>
<td>130</td>
<td>150</td>
<td>160</td>
<td>190</td>
<td>310</td>
<td>350</td>
<td>390</td>
<td>540</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>6</td>
<td>7</td>
<td>9</td>
<td>10</td>
<td>12</td>
<td>15</td>
<td>25</td>
<td>480</td>
<td>130</td>
<td>150</td>
<td>160</td>
<td>190</td>
<td>310</td>
<td>350</td>
<td>390</td>
<td>540</td>
</tr>
</tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script>
var stats_history = {
"time": ["13:03:24", "13:03:29", "13:03:34", "13:03:39", "13:03:44", "13:03:49", "13:03:54"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 200}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}],
"current_rps": [{"users": 200, "value": 1874.0}, {"users": 400, "value": 3968.5}, {"users": 400, "value": 4127.4}, {"users": 400, "value": 3895.7}, {"users": 400, "value": 3895.4}, {"users": 400, "value": 3811.2}, {"users": 400, "value": 3801.2}],
"current_fail_per_sec": [{"users": 200, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}],
"response_time_percentile_1": [{"users": 200, "value": 5}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}],
"response_time_percentile_2": [{"users": 200, "value": 11}, {"users": 400, "value": 14}, {"users": 400, "value": 15}, {"users": 400, "value": 15}, {"users": 400, "value": 16}, {"users": 400, "value": 16}, {"users": 400, "value": 15}],
"time": ["14:30:27", "14:30:32", "14:30:37", "14:30:42", "14:30:47", "14:30:52", "14:30:57", "14:31:02", "14:31:07", "14:31:12", "14:31:17", "14:31:22", "14:31:27", "14:31:32", "14:31:37", "14:31:42", "14:31:47", "14:31:52", "14:31:57", "14:32:02", "14:32:07", "14:32:12", "14:32:17", "14:32:22", "14:32:27", "14:32:32", "14:32:37", "14:32:42"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 20}, {"value": 70}, {"value": 120}, {"value": 170}, {"value": 220}, {"value": 270}, {"value": 320}, {"value": 200}, {"value": 200}, {"value": 450}, {"value": 500}, {"value": 500}, {"value": 500}, {"value": 500}, {"value": 500}, {"value": 450}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 550}, {"value": 800}, {"value": 1000}, {"value": 1000}, {"value": 1000}, {"value": 1000}],
"current_rps": [{"users": 20, "value": 0}, {"users": 70, "value": 2461.4}, {"users": 120, "value": 2658.2}, {"users": 170, "value": 2756.2}, {"users": 220, "value": 2621.4}, {"users": 270, "value": 2564.6}, {"users": 320, "value": 2504.7}, {"users": 200, "value": 2470.7}, {"users": 200, "value": 2534.8}, {"users": 450, "value": 2588.5}, {"users": 500, "value": 2507.5}, {"users": 500, "value": 2472.9}, {"users": 500, "value": 2518.8}, {"users": 500, "value": 2491.6}, {"users": 500, "value": 2455.1}, {"users": 450, "value": 2479.9}, {"users": 400, "value": 2495.8}, {"users": 400, "value": 2513.2}, {"users": 400, "value": 2533.0}, {"users": 400, "value": 2482.6}, {"users": 400, "value": 2492.1}, {"users": 400, "value": 2496.6}, {"users": 550, "value": 2471.3}, {"users": 800, "value": 2440.5}, {"users": 1000, "value": 2363.0}, {"users": 1000, "value": 2438.9}, {"users": 1000, "value": 2390.1}, {"users": 1000, "value": 2401.2}],
"current_fail_per_sec": [{"users": 20, "value": 0}, {"users": 70, "value": 0}, {"users": 120, "value": 0}, {"users": 170, "value": 0}, {"users": 220, "value": 0}, {"users": 270, "value": 0}, {"users": 320, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 450, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 450, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 550, "value": 0}, {"users": 800, "value": 0}, {"users": 1000, "value": 0}, {"users": 1000, "value": 0}, {"users": 1000, "value": 0}, {"users": 1000, "value": 0}],
"response_time_percentile_1": [{"users": 20, "value": 4}, {"users": 70, "value": 11}, {"users": 120, "value": 20}, {"users": 170, "value": 40}, {"users": 220, "value": 58}, {"users": 270, "value": 75}, {"users": 320, "value": 93}, {"users": 200, "value": 94}, {"users": 200, "value": 70}, {"users": 450, "value": 74}, {"users": 500, "value": 140}, {"users": 500, "value": 170}, {"users": 500, "value": 170}, {"users": 500, "value": 170}, {"users": 500, "value": 170}, {"users": 450, "value": 170}, {"users": 400, "value": 150}, {"users": 400, "value": 140}, {"users": 400, "value": 140}, {"users": 400, "value": 140}, {"users": 400, "value": 130}, {"users": 400, "value": 140}, {"users": 550, "value": 140}, {"users": 800, "value": 170}, {"users": 1000, "value": 260}, {"users": 1000, "value": 330}, {"users": 1000, "value": 340}, {"users": 1000, "value": 340}],
"response_time_percentile_2": [{"users": 20, "value": 9}, {"users": 70, "value": 23}, {"users": 120, "value": 42}, {"users": 170, "value": 62}, {"users": 220, "value": 82}, {"users": 270, "value": 100}, {"users": 320, "value": 120}, {"users": 200, "value": 130}, {"users": 200, "value": 110}, {"users": 450, "value": 140}, {"users": 500, "value": 200}, {"users": 500, "value": 210}, {"users": 500, "value": 200}, {"users": 500, "value": 210}, {"users": 500, "value": 210}, {"users": 450, "value": 200}, {"users": 400, "value": 200}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 550, "value": 180}, {"users": 800, "value": 280}, {"users": 1000, "value": 360}, {"users": 1000, "value": 390}, {"users": 1000, "value": 400}, {"users": 1000, "value": 390}],
"markers": [],
};
var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,348265,0,130,133.02968716351054,0,544,98.68140640029863,2517.2083812580477,0.0,130,160,180,190,310,350,370,390,420,530,540
,Aggregated,348265,0,130,133.02968716351054,0,544,98.68140640029863,2517.2083812580477,0.0,130,160,180,190,310,350,370,390,420,530,540
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 348265 0 130 133.02968716351054 0 544 98.68140640029863 2517.2083812580477 0.0 130 160 180 190 310 350 370 390 420 530 540
3 Aggregated 348265 0 130 133.02968716351054 0 544 98.68140640029863 2517.2083812580477 0.0 130 160 180 190 310 350 370 390 420 530 540

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info">
<p>During: <span class="l10n datetime">2023-12-26 13:14:12</span> - <span class="l10n datetime">2023-12-26 13:15:15</span></p>
<p>During: <span class="l10n datetime">2023-12-18 20:08:43</span> - <span class="l10n datetime">2023-12-18 20:09:30</span></p>
<p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p>
</div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/write</td>
<td>70718</td>
<td>47348</td>
<td>0</td>
<td>303</td>
<td>344</td>
<td>5</td>
<td>939</td>
<td>1034</td>
<td>97</td>
<td>1117.2</td>
<td>993.0</td>
<td>0.0</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>70718</td>
<td>47348</td>
<td>0</td>
<td>303</td>
<td>344</td>
<td>5</td>
<td>939</td>
<td>1034</td>
<td>97</td>
<td>1117.2</td>
<td>993.0</td>
<td>0.0</td>
</tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/write</td>
<td>300</td>
<td>320</td>
<td>350</td>
<td>390</td>
<td>380</td>
<td>400</td>
<td>430</td>
<td>470</td>
<td>550</td>
<td>940</td>
<td>480</td>
<td>520</td>
<td>660</td>
<td>1000</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>300</td>
<td>320</td>
<td>350</td>
<td>390</td>
<td>380</td>
<td>400</td>
<td>430</td>
<td>470</td>
<td>550</td>
<td>940</td>
<td>480</td>
<td>520</td>
<td>660</td>
<td>1000</td>
</tr>
</tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script>
var stats_history = {
"time": ["13:14:16", "13:14:21", "13:14:26", "13:14:31", "13:14:36", "13:14:41", "13:14:46", "13:14:51", "13:14:56", "13:15:01", "13:15:06", "13:15:11"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 250}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}],
"current_rps": [{"users": 250, "value": 707.5}, {"users": 400, "value": 832.8571428571429}, {"users": 400, "value": 1047.2}, {"users": 400, "value": 1291.9}, {"users": 400, "value": 1333.4}, {"users": 400, "value": 1302.2}, {"users": 400, "value": 1211.4}, {"users": 400, "value": 1040.7}, {"users": 400, "value": 1094.1}, {"users": 400, "value": 1131.6}, {"users": 400, "value": 1027.1}, {"users": 400, "value": 1027.8}],
"current_fail_per_sec": [{"users": 250, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}],
"response_time_percentile_1": [{"users": 250, "value": 140}, {"users": 400, "value": 250}, {"users": 400, "value": 280}, {"users": 400, "value": 260}, {"users": 400, "value": 280}, {"users": 400, "value": 280}, {"users": 400, "value": 320}, {"users": 400, "value": 350}, {"users": 400, "value": 320}, {"users": 400, "value": 330}, {"users": 400, "value": 360}, {"users": 400, "value": 380}],
"response_time_percentile_2": [{"users": 250, "value": 250}, {"users": 400, "value": 450}, {"users": 400, "value": 430}, {"users": 400, "value": 340}, {"users": 400, "value": 360}, {"users": 400, "value": 380}, {"users": 400, "value": 440}, {"users": 400, "value": 450}, {"users": 400, "value": 470}, {"users": 400, "value": 480}, {"users": 400, "value": 490}, {"users": 400, "value": 500}],
"time": ["20:08:45", "20:08:50", "20:08:55", "20:09:00", "20:09:05", "20:09:10", "20:09:15", "20:09:20", "20:09:25", "20:09:30"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 100}, {"value": 350}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}],
"current_rps": [{"users": 100, "value": 0}, {"users": 350, "value": 758.25}, {"users": 400, "value": 883.4}, {"users": 400, "value": 962.3}, {"users": 400, "value": 1018.2}, {"users": 400, "value": 1096.2}, {"users": 400, "value": 1069.3}, {"users": 400, "value": 1076.7}, {"users": 400, "value": 956.9}, {"users": 400, "value": 921.9}],
"current_fail_per_sec": [{"users": 100, "value": 0}, {"users": 350, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}],
"response_time_percentile_1": [{"users": 100, "value": 130}, {"users": 350, "value": 200}, {"users": 400, "value": 320}, {"users": 400, "value": 360}, {"users": 400, "value": 360}, {"users": 400, "value": 320}, {"users": 400, "value": 320}, {"users": 400, "value": 400}, {"users": 400, "value": 400}, {"users": 400, "value": 360}],
"response_time_percentile_2": [{"users": 100, "value": 170}, {"users": 350, "value": 380}, {"users": 400, "value": 510}, {"users": 400, "value": 520}, {"users": 400, "value": 500}, {"users": 400, "value": 480}, {"users": 400, "value": 550}, {"users": 400, "value": 560}, {"users": 400, "value": 490}, {"users": 400, "value": 480}],
"markers": [],
};
var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
,Aggregated,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000
3 Aggregated 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
,Aggregated,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000
3 Aggregated 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,70718,0,300,303.8036002149382,5,939,97.84323651686982,1117.209290974752,0.0,300,340,370,390,430,470,510,550,770,930,940
,Aggregated,70718,0,300,303.8036002149382,5,939,97.84323651686982,1117.209290974752,0.0,300,340,370,390,430,470,510,550,770,930,940
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 70718 0 300 303.8036002149382 5 939 97.84323651686982 1117.209290974752 0.0 300 340 370 390 430 470 510 550 770 930 940
3 Aggregated 70718 0 300 303.8036002149382 5 939 97.84323651686982 1117.209290974752 0.0 300 340 370 390 430 470 510 550 770 930 940

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,70,0,9700,9854.628571428571,7898,12282,98842.68571428572,0.9866591262400619,0.0,9700,10000,11000,11000,12000,12000,12000,12000,12000,12000,12000
,Aggregated,70,0,9700,9854.628571428571,7898,12282,98842.68571428572,0.9866591262400619,0.0,9700,10000,11000,11000,12000,12000,12000,12000,12000,12000,12000
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 70 0 9700 9854.628571428571 7898 12282 98842.68571428572 0.9866591262400619 0.0 9700 10000 11000 11000 12000 12000 12000 12000 12000 12000 12000
3 Aggregated 70 0 9700 9854.628571428571 7898 12282 98842.68571428572 0.9866591262400619 0.0 9700 10000 11000 11000 12000 12000 12000 12000 12000 12000 12000

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info">
<p>During: <span class="l10n datetime">2023-12-26 13:12:13</span> - <span class="l10n datetime">2023-12-26 13:13:24</span></p>
<p>During: <span class="l10n datetime">2023-12-17 14:25:53</span> - <span class="l10n datetime">2023-12-17 14:27:01</span></p>
<p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p>
</div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/pipeline/write</td>
<td>70</td>
<td>10968</td>
<td>0</td>
<td>9854</td>
<td>7898</td>
<td>12282</td>
<td>98842</td>
<td>1.0</td>
<td>761</td>
<td>7</td>
<td>1293</td>
<td>9999</td>
<td>161.4</td>
<td>0.0</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>70</td>
<td>10968</td>
<td>0</td>
<td>9854</td>
<td>7898</td>
<td>12282</td>
<td>98842</td>
<td>1.0</td>
<td>761</td>
<td>7</td>
<td>1293</td>
<td>9999</td>
<td>161.4</td>
<td>0.0</td>
</tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/pipeline/write</td>
<td>9700</td>
<td>10000</td>
<td>10000</td>
<td>11000</td>
<td>12000</td>
<td>12000</td>
<td>12000</td>
<td>12000</td>
<td>620</td>
<td>860</td>
<td>1100</td>
<td>1100</td>
<td>1200</td>
<td>1200</td>
<td>1200</td>
<td>1300</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>9700</td>
<td>10000</td>
<td>10000</td>
<td>11000</td>
<td>12000</td>
<td>12000</td>
<td>12000</td>
<td>12000</td>
<td>620</td>
<td>860</td>
<td>1100</td>
<td>1100</td>
<td>1200</td>
<td>1200</td>
<td>1200</td>
<td>1300</td>
</tr>
</tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script>
var stats_history = {
"time": ["13:12:14", "13:12:19", "13:12:24", "13:12:29", "13:12:34", "13:12:39", "13:12:44", "13:12:49", "13:12:54", "13:12:59", "13:13:04", "13:13:09", "13:13:14", "13:13:19"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}],
"current_rps": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 0.2}, {"users": 10, "value": 0.2}, {"users": 10, "value": 1.1}, {"users": 10, "value": 0.8}, {"users": 10, "value": 1.1}, {"users": 10, "value": 1.0}],
"current_fail_per_sec": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}],
"response_time_percentile_1": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 8800}, {"users": 10, "value": 8900}, {"users": 10, "value": 9700}, {"users": 10, "value": 9700}, {"users": 10, "value": 10000}, {"users": 10, "value": 10000}, {"users": 10, "value": 12000}, {"users": 10, "value": 12000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 9900}, {"users": 10, "value": 9400}],
"response_time_percentile_2": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 9800}, {"users": 10, "value": 9800}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 12000}, {"users": 10, "value": 12000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 10000}],
"time": ["14:25:56", "14:26:01", "14:26:06", "14:26:11", "14:26:16", "14:26:21", "14:26:26", "14:26:31", "14:26:36", "14:26:41", "14:26:46", "14:26:51", "14:26:56", "14:27:01"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 3}, {"value": 36}, {"value": 86}, {"value": 100}, {"value": 100}, {"value": 100}, {"value": 100}, {"value": 110}, {"value": 160}, {"value": 200}, {"value": 200}, {"value": 200}, {"value": 200}, {"value": 200}],
"current_rps": [{"users": 3, "value": 8.0}, {"users": 36, "value": 48.0}, {"users": 86, "value": 109.9}, {"users": 100, "value": 164.0}, {"users": 100, "value": 172.1}, {"users": 100, "value": 172.8}, {"users": 100, "value": 170.5}, {"users": 110, "value": 169.9}, {"users": 160, "value": 168.3}, {"users": 200, "value": 172.3}, {"users": 200, "value": 169.9}, {"users": 200, "value": 172.4}, {"users": 200, "value": 171.7}, {"users": 200, "value": 170.5}],
"current_fail_per_sec": [{"users": 3, "value": 0}, {"users": 36, "value": 0}, {"users": 86, "value": 0}, {"users": 100, "value": 0}, {"users": 100, "value": 0}, {"users": 100, "value": 0}, {"users": 100, "value": 0}, {"users": 110, "value": 0}, {"users": 160, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}],
"response_time_percentile_1": [{"users": 3, "value": 57}, {"users": 36, "value": 120}, {"users": 86, "value": 250}, {"users": 100, "value": 520}, {"users": 100, "value": 580}, {"users": 100, "value": 580}, {"users": 100, "value": 580}, {"users": 110, "value": 590}, {"users": 160, "value": 620}, {"users": 200, "value": 870}, {"users": 200, "value": 1100}, {"users": 200, "value": 1100}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}],
"response_time_percentile_2": [{"users": 3, "value": 62}, {"users": 36, "value": 220}, {"users": 86, "value": 480}, {"users": 100, "value": 610}, {"users": 100, "value": 620}, {"users": 100, "value": 620}, {"users": 100, "value": 670}, {"users": 110, "value": 670}, {"users": 160, "value": 860}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}],
"markers": [],
};
var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,10968,0,620,761.5634573304158,7,1293,9999.697757111597,161.3685696386145,0.0,620,1100,1100,1100,1200,1200,1200,1200,1300,1300,1300
,Aggregated,10968,0,620,761.5634573304158,7,1293,9999.697757111597,161.3685696386145,0.0,620,1100,1100,1100,1200,1200,1200,1200,1300,1300,1300
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 10968 0 620 761.5634573304158 7 1293 9999.697757111597 161.3685696386145 0.0 620 1100 1100 1100 1200 1200 1200 1200 1300 1300 1300
3 Aggregated 10968 0 620 761.5634573304158 7 1293 9999.697757111597 161.3685696386145 0.0 620 1100 1100 1100 1200 1200 1200 1200 1300 1300 1300

View file

@ -0,0 +1,3 @@
Design:
Actix Web and reqwests with request pipelining

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info">
<p>During: <span class="l10n datetime">2023-12-26 13:09:24</span> - <span class="l10n datetime">2023-12-26 13:10:06</span></p>
<p>During: <span class="l10n datetime">2023-12-17 14:19:00</span> - <span class="l10n datetime">2023-12-17 14:20:11</span></p>
<p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p>
</div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/pipeline/write</td>
<td>673</td>
<td>1141</td>
<td>0</td>
<td>625</td>
<td>448</td>
<td>734</td>
<td>99835</td>
<td>15.8</td>
<td>681</td>
<td>69</td>
<td>858</td>
<td>100027</td>
<td>16.1</td>
<td>0.0</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>673</td>
<td>1141</td>
<td>0</td>
<td>625</td>
<td>448</td>
<td>734</td>
<td>99835</td>
<td>15.8</td>
<td>681</td>
<td>69</td>
<td>858</td>
<td>100027</td>
<td>16.1</td>
<td>0.0</td>
</tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr>
<td>POST</td>
<td>http://localhost:9001/pipeline/write</td>
<td>620</td>
<td>630</td>
<td>640</td>
<td>650</td>
<td>680</td>
<td>700</td>
<td>720</td>
<td>730</td>
<td>740</td>
<td>760</td>
<td>780</td>
<td>790</td>
<td>820</td>
<td>860</td>
</tr>
<tr class="total">
<td></td>
<td>Aggregated</td>
<td>620</td>
<td>630</td>
<td>640</td>
<td>650</td>
<td>680</td>
<td>700</td>
<td>720</td>
<td>730</td>
<td>740</td>
<td>760</td>
<td>780</td>
<td>790</td>
<td>820</td>
<td>860</td>
</tr>
</tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script>
var stats_history = {
"time": ["13:09:26", "13:09:31", "13:09:36", "13:09:41", "13:09:46", "13:09:51", "13:09:56", "13:10:01", "13:10:06"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}],
"current_rps": [{"users": 10, "value": 0}, {"users": 10, "value": 14.2}, {"users": 10, "value": 15.6}, {"users": 10, "value": 16.1}, {"users": 10, "value": 15.7}, {"users": 10, "value": 16.2}, {"users": 10, "value": 15.8}, {"users": 10, "value": 15.3}, {"users": 10, "value": 15.6}],
"current_fail_per_sec": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}],
"response_time_percentile_1": [{"users": 10, "value": 630}, {"users": 10, "value": 600}, {"users": 10, "value": 600}, {"users": 10, "value": 620}, {"users": 10, "value": 640}, {"users": 10, "value": 640}, {"users": 10, "value": 640}, {"users": 10, "value": 640}, {"users": 10, "value": 630}],
"response_time_percentile_2": [{"users": 10, "value": 650}, {"users": 10, "value": 640}, {"users": 10, "value": 660}, {"users": 10, "value": 700}, {"users": 10, "value": 700}, {"users": 10, "value": 680}, {"users": 10, "value": 720}, {"users": 10, "value": 720}, {"users": 10, "value": 700}],
"time": ["14:19:01", "14:19:06", "14:19:11", "14:19:16", "14:19:21", "14:19:26", "14:19:31", "14:19:36", "14:19:41", "14:19:46", "14:19:51", "14:19:56", "14:20:01", "14:20:06"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 2}, {"value": 7}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}],
"current_rps": [{"users": 2, "value": 0}, {"users": 7, "value": 9.25}, {"users": 12, "value": 12.88888888888889}, {"users": 12, "value": 16.1}, {"users": 12, "value": 16.2}, {"users": 12, "value": 16.6}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.1}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.5}, {"users": 12, "value": 16.5}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.3}],
"current_fail_per_sec": [{"users": 2, "value": 0}, {"users": 7, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}],
"response_time_percentile_1": [{"users": 2, "value": 0}, {"users": 7, "value": 260}, {"users": 12, "value": 400}, {"users": 12, "value": 670}, {"users": 12, "value": 710}, {"users": 12, "value": 700}, {"users": 12, "value": 740}, {"users": 12, "value": 740}, {"users": 12, "value": 730}, {"users": 12, "value": 720}, {"users": 12, "value": 730}, {"users": 12, "value": 730}, {"users": 12, "value": 720}, {"users": 12, "value": 730}],
"response_time_percentile_2": [{"users": 2, "value": 0}, {"users": 7, "value": 380}, {"users": 12, "value": 640}, {"users": 12, "value": 770}, {"users": 12, "value": 800}, {"users": 12, "value": 800}, {"users": 12, "value": 800}, {"users": 12, "value": 790}, {"users": 12, "value": 780}, {"users": 12, "value": 770}, {"users": 12, "value": 820}, {"users": 12, "value": 830}, {"users": 12, "value": 790}, {"users": 12, "value": 790}],
"markers": [],
};
var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,1141,0,720,681.7598597721297,69,858,100027.22261174409,16.07784382812381,0.0,720,740,750,760,780,790,810,820,840,860,860
,Aggregated,1141,0,720,681.7598597721297,69,858,100027.22261174409,16.07784382812381,0.0,720,740,750,760,780,790,810,820,840,860,860
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 1141 0 720 681.7598597721297 69 858 100027.22261174409 16.07784382812381 0.0 720 740 750 760 780 790 810 820 840 860 860
3 Aggregated 1141 0 720 681.7598597721297 69 858 100027.22261174409 16.07784382812381 0.0 720 740 750 760 780 790 810 820 840 860 860

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,148740,0,6,7.030960064542154,0,485,98.25319349199947,3904.7975720558998,0.0,6,8,9,10,12,15,20,25,170,380,480
,Aggregated,148740,0,6,7.030960064542154,0,485,98.25319349199947,3904.7975720558998,0.0,6,8,9,10,12,15,20,25,170,380,480
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 148740 0 6 7.030960064542154 0 485 98.25319349199947 3904.7975720558998 0.0 6 8 9 10 12 15 20 25 170 380 480
3 Aggregated 148740 0 6 7.030960064542154 0 485 98.25319349199947 3904.7975720558998 0.0 6 8 9 10 12 15 20 25 170 380 480

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,673,0,620,625.5542347696879,448,734,99835.94056463595,15.80574909851346,0.0,620,640,650,650,680,700,720,720,730,730,730
,Aggregated,673,0,620,625.5542347696879,448,734,99835.94056463595,15.80574909851346,0.0,620,640,650,650,680,700,720,720,730,730,730
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 673 0 620 625.5542347696879 448 734 99835.94056463595 15.80574909851346 0.0 620 640 650 650 680 700 720 720 730 730 730
3 Aggregated 673 0 620 625.5542347696879 448 734 99835.94056463595 15.80574909851346 0.0 620 640 650 650 680 700 720 720 730 730 730

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

View file

@ -1 +0,0 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -1 +0,0 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

File diff suppressed because one or more lines are too long

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/PipelineDcacheOps,3480,0,98,104.35343347919283,85.40578499378171,842.1087349997833,14999.985632183909,95.67244900465325,0.0,98,99,100,100,100,110,120,360,840,840,840
,Aggregated,3480,0,98,104.35343347919283,85.40578499378171,842.1087349997833,14999.985632183909,95.67244900465325,0.0,98,99,100,100,100,110,120,360,840,840,840
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/PipelineDcacheOps 3480 0 98 104.35343347919283 85.40578499378171 842.1087349997833 14999.985632183909 95.67244900465325 0.0 98 99 100 100 100 110 120 360 840 840 840
3 Aggregated 3480 0 98 104.35343347919283 85.40578499378171 842.1087349997833 14999.985632183909 95.67244900465325 0.0 98 99 100 100 100 110 120 360 840 840 840

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

View file

@ -1 +0,0 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -1 +0,0 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

File diff suppressed because one or more lines are too long

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/AddVisitor,186109,0,79,74.60541254397303,3.7561320059467107,119.94536400015932,10.999731340236098,4816.33283284295,0.0,79,83,86,89,93,97,100,110,120,120,120
,Aggregated,186109,0,79,74.60541254397303,3.7561320059467107,119.94536400015932,10.999731340236098,4816.33283284295,0.0,79,83,86,89,93,97,100,110,120,120,120
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/AddVisitor 186109 0 79 74.60541254397303 3.7561320059467107 119.94536400015932 10.999731340236098 4816.33283284295 0.0 79 83 86 89 93 97 100 110 120 120 120
3 Aggregated 186109 0 79 74.60541254397303 3.7561320059467107 119.94536400015932 10.999731340236098 4816.33283284295 0.0 79 83 86 89 93 97 100 110 120 120 120

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

View file

@ -1,7 +0,0 @@
#!/bin/bash
ghz --insecure --proto ./proto/dcache/dcache.proto --call dcache.DcacheService.Write \
-c 300 -n 30000 --rps 4000 -O html -o out.html \
-d '{"data":"{\"AddVisitor\": \"test_1\"}"}' \
localhost:9001
# -d '{"data":"{\"AddCaptcha\":{\"id\":\"test_1\",\"mcaptcha\":{\"defense\":{\"current_visitor_threshold\":0,\"levels\":[{\"difficulty_factor\":500,\"visitor_threshold\":50},{\"difficulty_factor\":50000,\"visitor_threshold\":5000}]},\"duration\":30,\"visitor_threshold\":0}}}"}' \

View file

@ -1 +0,0 @@
10 messages per batch request

View file

@ -1 +0,0 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -1 +0,0 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/PipelineDcacheOps,41383,0,140.0,99.16818701259079,5.581609002547339,182.89305199868977,40.0,650.896214047811,0.0,140,150,150,150,160,160,160,170,180,180,180
,Aggregated,41383,0,140.0,99.16818701259079,5.581609002547339,182.89305199868977,40.0,650.896214047811,0.0,140,150,150,150,160,160,160,170,180,180,180
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/PipelineDcacheOps 41383 0 140.0 99.16818701259079 5.581609002547339 182.89305199868977 40.0 650.896214047811 0.0 140 150 150 150 160 160 160 170 180 180 180
3 Aggregated 41383 0 140.0 99.16818701259079 5.581609002547339 182.89305199868977 40.0 650.896214047811 0.0 140 150 150 150 160 160 160 170 180 180 180

View file

@ -1 +0,0 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -1 +0,0 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

File diff suppressed because one or more lines are too long

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/Write,96465,0,600.0,530.5241670541676,3.931416000114041,2860.153126999876,130.11822940963043,732.274667601832,0.0,600,720,830,880,1100,1200,1300,1500,2300,2900,2900
,Aggregated,96465,0,600.0,530.5241670541676,3.931416000114041,2860.153126999876,130.11822940963043,732.274667601832,0.0,600,720,830,880,1100,1200,1300,1500,2300,2900,2900
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/Write 96465 0 600.0 530.5241670541676 3.931416000114041 2860.153126999876 130.11822940963043 732.274667601832 0.0 600 720 830 880 1100 1200 1300 1500 2300 2900 2900
3 Aggregated 96465 0 600.0 530.5241670541676 3.931416000114041 2860.153126999876 130.11822940963043 732.274667601832 0.0 600 720 830 880 1100 1200 1300 1500 2300 2900 2900

View file

@ -1 +0,0 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -1 +0,0 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

File diff suppressed because one or more lines are too long

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/AddVisitor,358924,0,79,77.86313645947614,3.354386999944836,123.28810700000759,0.0,4842.970815301002,0.0,79,84,86,88,92,96,100,100,110,120,120
,Aggregated,358924,0,79,77.86313645947614,3.354386999944836,123.28810700000759,0.0,4842.970815301002,0.0,79,84,86,88,92,96,100,100,110,120,120
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/AddVisitor 358924 0 79 77.86313645947614 3.354386999944836 123.28810700000759 0.0 4842.970815301002 0.0 79 84 86 88 92 96 100 100 110 120 120
3 Aggregated 358924 0 79 77.86313645947614 3.354386999944836 123.28810700000759 0.0 4842.970815301002 0.0 79 84 86 88 92 96 100 100 110 120 120

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 674 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 1.2 MiB

View file

@ -1,10 +0,0 @@
fn main() -> Result<(), Box<dyn std::error::Error>> {
// tonic_build::configure()
// .out_dir("protoout")
// .compile(
// &["proto/dcache/dcache.proto"],
// &["proto/dcache"],
// )?;
tonic_build::compile_protos("proto/dcache/dcache.proto")?;
Ok(())
}

View file

@ -1,9 +0,0 @@
#!/bin/bash
protoc \
--proto_path=${PWD}/proto \
--proto_path=${PWD}/bufbuild \
--go_out=${PWD} \
--go-grpc_out=${PWD} \
--validate_out="lang=rust:${PWD}" \
${PWD}/proto/dcache/dcache.proto

File diff suppressed because one or more lines are too long

View file

@ -1,218 +0,0 @@
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
DESCRIPTOR: _descriptor.FileDescriptor
class Level(_message.Message):
__slots__ = ("visitor_threshold", "difficulty_factor")
VISITOR_THRESHOLD_FIELD_NUMBER: _ClassVar[int]
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
visitor_threshold: int
difficulty_factor: int
def __init__(self, visitor_threshold: _Optional[int] = ..., difficulty_factor: _Optional[int] = ...) -> None: ...
class Defense(_message.Message):
__slots__ = ("levels",)
LEVELS_FIELD_NUMBER: _ClassVar[int]
levels: _containers.RepeatedCompositeFieldContainer[Level]
def __init__(self, levels: _Optional[_Iterable[_Union[Level, _Mapping]]] = ...) -> None: ...
class MCaptcha(_message.Message):
__slots__ = ("duration", "defense")
DURATION_FIELD_NUMBER: _ClassVar[int]
DEFENSE_FIELD_NUMBER: _ClassVar[int]
duration: int
defense: Defense
def __init__(self, duration: _Optional[int] = ..., defense: _Optional[_Union[Defense, _Mapping]] = ...) -> None: ...
class AddCaptchaRequest(_message.Message):
__slots__ = ("id", "mcaptcha")
ID_FIELD_NUMBER: _ClassVar[int]
MCAPTCHA_FIELD_NUMBER: _ClassVar[int]
id: str
mcaptcha: MCaptcha
def __init__(self, id: _Optional[str] = ..., mcaptcha: _Optional[_Union[MCaptcha, _Mapping]] = ...) -> None: ...
class RenameCaptchaRequest(_message.Message):
__slots__ = ("name", "rename_to")
NAME_FIELD_NUMBER: _ClassVar[int]
RENAME_TO_FIELD_NUMBER: _ClassVar[int]
name: str
rename_to: str
def __init__(self, name: _Optional[str] = ..., rename_to: _Optional[str] = ...) -> None: ...
class CachePowRequest(_message.Message):
__slots__ = ("string", "difficulty_factor", "duration", "key")
STRING_FIELD_NUMBER: _ClassVar[int]
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
DURATION_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
string: str
difficulty_factor: int
duration: int
key: str
def __init__(self, string: _Optional[str] = ..., difficulty_factor: _Optional[int] = ..., duration: _Optional[int] = ..., key: _Optional[str] = ...) -> None: ...
class CacheResultRequest(_message.Message):
__slots__ = ("token", "key", "duration")
TOKEN_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
DURATION_FIELD_NUMBER: _ClassVar[int]
token: str
key: str
duration: int
def __init__(self, token: _Optional[str] = ..., key: _Optional[str] = ..., duration: _Optional[int] = ...) -> None: ...
class DeleteCaptchaResultRequest(_message.Message):
__slots__ = ("token",)
TOKEN_FIELD_NUMBER: _ClassVar[int]
token: str
def __init__(self, token: _Optional[str] = ...) -> None: ...
class CaptchaID(_message.Message):
__slots__ = ("id",)
ID_FIELD_NUMBER: _ClassVar[int]
id: str
def __init__(self, id: _Optional[str] = ...) -> None: ...
class PoID(_message.Message):
__slots__ = ("id",)
ID_FIELD_NUMBER: _ClassVar[int]
id: str
def __init__(self, id: _Optional[str] = ...) -> None: ...
class AddVisitorResult(_message.Message):
__slots__ = ("duration", "difficulty_factor")
DURATION_FIELD_NUMBER: _ClassVar[int]
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
duration: int
difficulty_factor: int
def __init__(self, duration: _Optional[int] = ..., difficulty_factor: _Optional[int] = ...) -> None: ...
class OptionAddVisitorResult(_message.Message):
__slots__ = ("result",)
RESULT_FIELD_NUMBER: _ClassVar[int]
result: AddVisitorResult
def __init__(self, result: _Optional[_Union[AddVisitorResult, _Mapping]] = ...) -> None: ...
class RaftRequest(_message.Message):
__slots__ = ("data",)
DATA_FIELD_NUMBER: _ClassVar[int]
data: str
def __init__(self, data: _Optional[str] = ...) -> None: ...
class RaftReply(_message.Message):
__slots__ = ("data", "error")
DATA_FIELD_NUMBER: _ClassVar[int]
ERROR_FIELD_NUMBER: _ClassVar[int]
data: str
error: str
def __init__(self, data: _Optional[str] = ..., error: _Optional[str] = ...) -> None: ...
class Learner(_message.Message):
__slots__ = ("id", "addr")
ID_FIELD_NUMBER: _ClassVar[int]
ADDR_FIELD_NUMBER: _ClassVar[int]
id: int
addr: str
def __init__(self, id: _Optional[int] = ..., addr: _Optional[str] = ...) -> None: ...
class CaptchaExistsResponse(_message.Message):
__slots__ = ("exists",)
EXISTS_FIELD_NUMBER: _ClassVar[int]
exists: bool
def __init__(self, exists: bool = ...) -> None: ...
class GetVisitorCountResponse(_message.Message):
__slots__ = ("visitors",)
VISITORS_FIELD_NUMBER: _ClassVar[int]
visitors: int
def __init__(self, visitors: _Optional[int] = ...) -> None: ...
class OptionGetVisitorCountResponse(_message.Message):
__slots__ = ("result",)
RESULT_FIELD_NUMBER: _ClassVar[int]
result: GetVisitorCountResponse
def __init__(self, result: _Optional[_Union[GetVisitorCountResponse, _Mapping]] = ...) -> None: ...
class DcacheRequest(_message.Message):
__slots__ = ("addCaptcha", "addVisitor", "renameCaptcha", "removeCaptcha", "cachePow", "cacheResult", "captchaExists", "getVisitorCount")
ADDCAPTCHA_FIELD_NUMBER: _ClassVar[int]
ADDVISITOR_FIELD_NUMBER: _ClassVar[int]
RENAMECAPTCHA_FIELD_NUMBER: _ClassVar[int]
REMOVECAPTCHA_FIELD_NUMBER: _ClassVar[int]
CACHEPOW_FIELD_NUMBER: _ClassVar[int]
CACHERESULT_FIELD_NUMBER: _ClassVar[int]
CAPTCHAEXISTS_FIELD_NUMBER: _ClassVar[int]
GETVISITORCOUNT_FIELD_NUMBER: _ClassVar[int]
addCaptcha: AddCaptchaRequest
addVisitor: CaptchaID
renameCaptcha: RenameCaptchaRequest
removeCaptcha: CaptchaID
cachePow: CachePowRequest
cacheResult: CacheResultRequest
captchaExists: CaptchaID
getVisitorCount: CaptchaID
def __init__(self, addCaptcha: _Optional[_Union[AddCaptchaRequest, _Mapping]] = ..., addVisitor: _Optional[_Union[CaptchaID, _Mapping]] = ..., renameCaptcha: _Optional[_Union[RenameCaptchaRequest, _Mapping]] = ..., removeCaptcha: _Optional[_Union[CaptchaID, _Mapping]] = ..., cachePow: _Optional[_Union[CachePowRequest, _Mapping]] = ..., cacheResult: _Optional[_Union[CacheResultRequest, _Mapping]] = ..., captchaExists: _Optional[_Union[CaptchaID, _Mapping]] = ..., getVisitorCount: _Optional[_Union[CaptchaID, _Mapping]] = ...) -> None: ...
class DcacheResponse(_message.Message):
__slots__ = ("option_add_visitor_result", "other", "captcha_exists", "get_visitor_count")
OPTION_ADD_VISITOR_RESULT_FIELD_NUMBER: _ClassVar[int]
OTHER_FIELD_NUMBER: _ClassVar[int]
CAPTCHA_EXISTS_FIELD_NUMBER: _ClassVar[int]
GET_VISITOR_COUNT_FIELD_NUMBER: _ClassVar[int]
option_add_visitor_result: OptionAddVisitorResult
other: RaftReply
captcha_exists: CaptchaExistsResponse
get_visitor_count: OptionGetVisitorCountResponse
def __init__(self, option_add_visitor_result: _Optional[_Union[OptionAddVisitorResult, _Mapping]] = ..., other: _Optional[_Union[RaftReply, _Mapping]] = ..., captcha_exists: _Optional[_Union[CaptchaExistsResponse, _Mapping]] = ..., get_visitor_count: _Optional[_Union[OptionGetVisitorCountResponse, _Mapping]] = ...) -> None: ...
class DcacheBatchRequest(_message.Message):
__slots__ = ("requests",)
REQUESTS_FIELD_NUMBER: _ClassVar[int]
requests: _containers.RepeatedCompositeFieldContainer[DcacheRequest]
def __init__(self, requests: _Optional[_Iterable[_Union[DcacheRequest, _Mapping]]] = ...) -> None: ...
class DcacheBatchResponse(_message.Message):
__slots__ = ("responses",)
RESPONSES_FIELD_NUMBER: _ClassVar[int]
responses: _containers.RepeatedCompositeFieldContainer[DcacheResponse]
def __init__(self, responses: _Optional[_Iterable[_Union[DcacheResponse, _Mapping]]] = ...) -> None: ...
class RetrievePowRequest(_message.Message):
__slots__ = ("token", "key")
TOKEN_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
token: str
key: str
def __init__(self, token: _Optional[str] = ..., key: _Optional[str] = ...) -> None: ...
class RetrievePowResponse(_message.Message):
__slots__ = ("difficulty_factor", "duration", "key")
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
DURATION_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
difficulty_factor: int
duration: int
key: str
def __init__(self, difficulty_factor: _Optional[int] = ..., duration: _Optional[int] = ..., key: _Optional[str] = ...) -> None: ...
class CaptchaResultVerified(_message.Message):
__slots__ = ("verified",)
VERIFIED_FIELD_NUMBER: _ClassVar[int]
verified: bool
def __init__(self, verified: bool = ...) -> None: ...
class DeletePowRequest(_message.Message):
__slots__ = ("string",)
STRING_FIELD_NUMBER: _ClassVar[int]
string: str
def __init__(self, string: _Optional[str] = ...) -> None: ...
class OptionalRetrievePoWResponse(_message.Message):
__slots__ = ("result",)
RESULT_FIELD_NUMBER: _ClassVar[int]
result: RetrievePowResponse
def __init__(self, result: _Optional[_Union[RetrievePowResponse, _Mapping]] = ...) -> None: ...

View file

@ -1,663 +0,0 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import dcache_py.dcache_pb2 as dcache__pb2
class DcacheServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.AddCaptcha = channel.unary_unary(
'/dcache.DcacheService/AddCaptcha',
request_serializer=dcache__pb2.AddCaptchaRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.AddVisitor = channel.unary_unary(
'/dcache.DcacheService/AddVisitor',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.OptionAddVisitorResult.FromString,
)
self.RenameCaptcha = channel.unary_unary(
'/dcache.DcacheService/RenameCaptcha',
request_serializer=dcache__pb2.RenameCaptchaRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.RemoveCaptcha = channel.unary_unary(
'/dcache.DcacheService/RemoveCaptcha',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.CachePow = channel.unary_unary(
'/dcache.DcacheService/CachePow',
request_serializer=dcache__pb2.CachePowRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.RetrievePow = channel.unary_unary(
'/dcache.DcacheService/RetrievePow',
request_serializer=dcache__pb2.RetrievePowRequest.SerializeToString,
response_deserializer=dcache__pb2.OptionalRetrievePoWResponse.FromString,
)
self.DeletePow = channel.unary_unary(
'/dcache.DcacheService/DeletePow',
request_serializer=dcache__pb2.DeletePowRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.CacheResult = channel.unary_unary(
'/dcache.DcacheService/CacheResult',
request_serializer=dcache__pb2.CacheResultRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.VerifyCaptchaResult = channel.unary_unary(
'/dcache.DcacheService/VerifyCaptchaResult',
request_serializer=dcache__pb2.RetrievePowRequest.SerializeToString,
response_deserializer=dcache__pb2.CaptchaResultVerified.FromString,
)
self.DeleteCaptchaResult = channel.unary_unary(
'/dcache.DcacheService/DeleteCaptchaResult',
request_serializer=dcache__pb2.DeleteCaptchaResultRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.CaptchaExists = channel.unary_unary(
'/dcache.DcacheService/CaptchaExists',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.CaptchaExistsResponse.FromString,
)
self.GetVisitorCount = channel.unary_unary(
'/dcache.DcacheService/GetVisitorCount',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.OptionGetVisitorCountResponse.FromString,
)
self.PipelineDcacheOps = channel.unary_unary(
'/dcache.DcacheService/PipelineDcacheOps',
request_serializer=dcache__pb2.DcacheBatchRequest.SerializeToString,
response_deserializer=dcache__pb2.DcacheBatchResponse.FromString,
)
self.AddLearner = channel.unary_unary(
'/dcache.DcacheService/AddLearner',
request_serializer=dcache__pb2.Learner.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.Write = channel.unary_unary(
'/dcache.DcacheService/Write',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.Forward = channel.unary_unary(
'/dcache.DcacheService/Forward',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.AppendEntries = channel.unary_unary(
'/dcache.DcacheService/AppendEntries',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.InstallSnapshot = channel.unary_unary(
'/dcache.DcacheService/InstallSnapshot',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.vote = channel.unary_unary(
'/dcache.DcacheService/vote',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
class DcacheServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def AddCaptcha(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddVisitor(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RenameCaptcha(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RemoveCaptcha(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CachePow(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RetrievePow(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeletePow(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CacheResult(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def VerifyCaptchaResult(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteCaptchaResult(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CaptchaExists(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetVisitorCount(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PipelineDcacheOps(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddLearner(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Write(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Forward(self, request, context):
"""/ Forward a request to other
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AppendEntries(self, request, context):
"""raft RPC
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InstallSnapshot(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vote(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DcacheServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'AddCaptcha': grpc.unary_unary_rpc_method_handler(
servicer.AddCaptcha,
request_deserializer=dcache__pb2.AddCaptchaRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'AddVisitor': grpc.unary_unary_rpc_method_handler(
servicer.AddVisitor,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.OptionAddVisitorResult.SerializeToString,
),
'RenameCaptcha': grpc.unary_unary_rpc_method_handler(
servicer.RenameCaptcha,
request_deserializer=dcache__pb2.RenameCaptchaRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'RemoveCaptcha': grpc.unary_unary_rpc_method_handler(
servicer.RemoveCaptcha,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'CachePow': grpc.unary_unary_rpc_method_handler(
servicer.CachePow,
request_deserializer=dcache__pb2.CachePowRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'RetrievePow': grpc.unary_unary_rpc_method_handler(
servicer.RetrievePow,
request_deserializer=dcache__pb2.RetrievePowRequest.FromString,
response_serializer=dcache__pb2.OptionalRetrievePoWResponse.SerializeToString,
),
'DeletePow': grpc.unary_unary_rpc_method_handler(
servicer.DeletePow,
request_deserializer=dcache__pb2.DeletePowRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'CacheResult': grpc.unary_unary_rpc_method_handler(
servicer.CacheResult,
request_deserializer=dcache__pb2.CacheResultRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'VerifyCaptchaResult': grpc.unary_unary_rpc_method_handler(
servicer.VerifyCaptchaResult,
request_deserializer=dcache__pb2.RetrievePowRequest.FromString,
response_serializer=dcache__pb2.CaptchaResultVerified.SerializeToString,
),
'DeleteCaptchaResult': grpc.unary_unary_rpc_method_handler(
servicer.DeleteCaptchaResult,
request_deserializer=dcache__pb2.DeleteCaptchaResultRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'CaptchaExists': grpc.unary_unary_rpc_method_handler(
servicer.CaptchaExists,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.CaptchaExistsResponse.SerializeToString,
),
'GetVisitorCount': grpc.unary_unary_rpc_method_handler(
servicer.GetVisitorCount,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.OptionGetVisitorCountResponse.SerializeToString,
),
'PipelineDcacheOps': grpc.unary_unary_rpc_method_handler(
servicer.PipelineDcacheOps,
request_deserializer=dcache__pb2.DcacheBatchRequest.FromString,
response_serializer=dcache__pb2.DcacheBatchResponse.SerializeToString,
),
'AddLearner': grpc.unary_unary_rpc_method_handler(
servicer.AddLearner,
request_deserializer=dcache__pb2.Learner.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'Write': grpc.unary_unary_rpc_method_handler(
servicer.Write,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'Forward': grpc.unary_unary_rpc_method_handler(
servicer.Forward,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'AppendEntries': grpc.unary_unary_rpc_method_handler(
servicer.AppendEntries,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'InstallSnapshot': grpc.unary_unary_rpc_method_handler(
servicer.InstallSnapshot,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'vote': grpc.unary_unary_rpc_method_handler(
servicer.vote,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'dcache.DcacheService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class DcacheService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def AddCaptcha(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AddCaptcha',
dcache__pb2.AddCaptchaRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AddVisitor(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AddVisitor',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.OptionAddVisitorResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RenameCaptcha(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/RenameCaptcha',
dcache__pb2.RenameCaptchaRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RemoveCaptcha(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/RemoveCaptcha',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CachePow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/CachePow',
dcache__pb2.CachePowRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RetrievePow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/RetrievePow',
dcache__pb2.RetrievePowRequest.SerializeToString,
dcache__pb2.OptionalRetrievePoWResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeletePow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/DeletePow',
dcache__pb2.DeletePowRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CacheResult(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/CacheResult',
dcache__pb2.CacheResultRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def VerifyCaptchaResult(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/VerifyCaptchaResult',
dcache__pb2.RetrievePowRequest.SerializeToString,
dcache__pb2.CaptchaResultVerified.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteCaptchaResult(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/DeleteCaptchaResult',
dcache__pb2.DeleteCaptchaResultRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CaptchaExists(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/CaptchaExists',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.CaptchaExistsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetVisitorCount(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/GetVisitorCount',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.OptionGetVisitorCountResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def PipelineDcacheOps(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/PipelineDcacheOps',
dcache__pb2.DcacheBatchRequest.SerializeToString,
dcache__pb2.DcacheBatchResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AddLearner(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AddLearner',
dcache__pb2.Learner.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Write(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/Write',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Forward(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/Forward',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AppendEntries(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AppendEntries',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InstallSnapshot(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/InstallSnapshot',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vote(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/vote',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

View file

@ -1,8 +0,0 @@
#!/bin/bash
nohup ./target/release/main --id 1 --http-addr 127.0.0.1:9001 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
sleep 1
nohup ./target/release/main --id 2 --http-addr 127.0.0.1:9002 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
sleep 1
nohup ./target/release/main --id 3 --http-addr 127.0.0.1:9003 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
read -p "Continue? (Y/N): " confirm && killall main

View file

@ -1,177 +0,0 @@
syntax = "proto3";
package dcache;
message Level {
uint32 visitor_threshold = 301;
uint32 difficulty_factor= 302;
}
message Defense {
repeated Level levels = 401;
}
message MCaptcha {
uint64 duration = 502;
Defense defense = 503;
}
message AddCaptchaRequest {
string id = 601;
MCaptcha mcaptcha = 602;
}
message RenameCaptchaRequest {
string name = 701;
string rename_to = 702;
}
message CachePowRequest {
string string= 801;
uint32 difficulty_factor = 802;
uint64 duration = 803;
string key = 804;
}
message CacheResultRequest {
string token = 817;
string key = 818;
uint64 duration= 819;
}
message DeleteCaptchaResultRequest {
string token = 821;
}
message CaptchaID{
string id = 1;
}
message PoID{
string id = 1;
}
message AddVisitorResult {
uint64 duration = 901;
uint32 difficulty_factor = 902;
}
message OptionAddVisitorResult {
optional AddVisitorResult result = 911;
}
message RaftRequest {
string data = 1;
}
message RaftReply {
string data = 1;
string error = 2;
}
message Learner {
uint64 id = 1;
string addr = 2;
}
message CaptchaExistsResponse {
bool exists = 1;
}
message GetVisitorCountResponse {
uint32 visitors = 1;
}
message OptionGetVisitorCountResponse {
optional GetVisitorCountResponse result = 1;
}
message DcacheRequest {
oneof DcacheRequest {
AddCaptchaRequest addCaptcha = 1;
CaptchaID addVisitor = 2;
RenameCaptchaRequest renameCaptcha = 3;
CaptchaID removeCaptcha = 4;
CachePowRequest cachePow = 5;
CacheResultRequest cacheResult = 6;
CaptchaID captchaExists = 7;
CaptchaID getVisitorCount = 8;
}
}
message DcacheResponse {
oneof DcacheResponse {
OptionAddVisitorResult option_add_visitor_result = 1;
RaftReply other = 2;
CaptchaExistsResponse captcha_exists = 3;
OptionGetVisitorCountResponse get_visitor_count = 4;
}
}
message DcacheBatchRequest {
repeated DcacheRequest requests = 1;
}
message DcacheBatchResponse {
repeated DcacheResponse responses = 1;
}
message RetrievePowRequest {
string token = 1;
string key = 2;
}
message RetrievePowResponse {
uint32 difficulty_factor = 1;
uint64 duration = 2;
string key = 3;
}
message CaptchaResultVerified {
bool verified = 1;
}
message DeletePowRequest {
string string = 1;
}
message OptionalRetrievePoWResponse {
optional RetrievePowResponse result = 1;
}
service DcacheService {
rpc AddCaptcha(AddCaptchaRequest) returns (RaftReply) {}
rpc AddVisitor(CaptchaID) returns (OptionAddVisitorResult) {}
rpc RenameCaptcha(RenameCaptchaRequest) returns (RaftReply) {}
rpc RemoveCaptcha(CaptchaID) returns (RaftReply) {}
rpc CachePow(CachePowRequest) returns (RaftReply) {}
rpc RetrievePow(RetrievePowRequest) returns (OptionalRetrievePoWResponse) {}
rpc DeletePow(DeletePowRequest) returns (RaftReply) {}
rpc CacheResult(CacheResultRequest) returns (RaftReply) {}
rpc VerifyCaptchaResult(RetrievePowRequest) returns (CaptchaResultVerified) {}
rpc DeleteCaptchaResult(DeleteCaptchaResultRequest) returns (RaftReply) {}
rpc CaptchaExists(CaptchaID) returns (CaptchaExistsResponse) {}
rpc GetVisitorCount(CaptchaID) returns (OptionGetVisitorCountResponse) {}
rpc PipelineDcacheOps(DcacheBatchRequest) returns (DcacheBatchResponse) {}
rpc AddLearner(Learner) returns (RaftReply) {}
rpc Write(RaftRequest) returns (RaftReply) {}
/// Forward a request to other
rpc Forward(RaftRequest) returns (RaftReply) {}
// raft RPC
rpc AppendEntries(RaftRequest) returns (RaftReply);
rpc InstallSnapshot(RaftRequest) returns (RaftReply);
rpc vote(RaftRequest) returns (RaftReply);
}

View file

@ -1,32 +0,0 @@
asyncio==3.4.3
blinker==1.7.0
Brotli==1.1.0
certifi==2023.11.17
charset-normalizer==3.3.2
click==8.1.7
ConfigArgParse==1.7
Flask==3.0.0
Flask-BasicAuth==0.2.0
Flask-Cors==4.0.0
gevent==23.9.1
geventhttpclient==2.0.11
greenlet==3.0.2
grpc-interceptor==0.15.4
grpcio==1.60.0
grpcio-tools==1.60.0
idna==3.6
itsdangerous==2.1.2
Jinja2==3.1.2
locust==2.20.0
MarkupSafe==2.1.3
msgpack==1.0.7
protobuf==4.25.1
psutil==5.9.7
pyzmq==25.1.2
requests==2.31.0
roundrobin==0.0.4
six==1.16.0
urllib3==2.1.0
Werkzeug==3.0.1
zope.event==5.0
zope.interface==6.1

View file

@ -17,6 +17,7 @@
*/
use std::collections::BTreeMap;
use std::sync::Arc;
use std::time::Duration;
use openraft::error::RaftError;
use openraft::BasicNode;

View file

@ -16,7 +16,11 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use clap::Parser;
use dcache::network::raft_network_impl::DcacheNetwork;
use dcache::start_example_raft_node;
use dcache::store::DcacheStore;
use dcache::DcacheTypeConfig;
use openraft::Raft;
use tracing_subscriber::EnvFilter;
//pub type DcacheRaft = Raft<DcacheTypeConfig, DcacheNetwork, DcacheStore>;
@ -40,7 +44,7 @@ pub struct Opt {
pub cluster_size: usize,
}
#[actix_rt::main]
#[actix_web::main]
async fn main() -> std::io::Result<()> {
// Setup the logger
tracing_subscriber::fmt()

View file

@ -20,26 +20,27 @@
use std::io::Cursor;
use std::sync::Arc;
use actix_web::middleware;
use actix_web::middleware::Logger;
use actix_web::web::Data;
use actix_web::App;
use actix_web::HttpServer;
use openraft::storage::Adaptor;
use openraft::BasicNode;
use openraft::Config;
use openraft::Raft;
use tonic::transport::Server;
use crate::app::DcacheApp;
use crate::network::api;
use crate::network::management;
use crate::network::raft;
use crate::network::raft_network_impl::DcacheNetwork;
use crate::protobuf::dcache::dcache_service_client::DcacheServiceClient;
use crate::protobuf::dcache::dcache_service_server::DcacheServiceServer;
use crate::protobuf::dcache::Learner;
use crate::store::DcacheRequest;
use crate::store::DcacheResponse;
use crate::store::DcacheStore;
pub mod app;
mod mcaptcha;
pub mod network;
mod pool;
mod protobuf;
pub mod store;
pub type DcacheNodeId = u64;
@ -97,6 +98,7 @@ pub async fn start_example_raft_node(
let store = Arc::new(DcacheStore::new(salt));
let (log_store, state_machine) = Adaptor::new(store.clone());
let client = reqwest::Client::new();
// Create the network layer that will connect and communicate the raft instances and
// will be used in conjunction with the store created above.
@ -104,7 +106,7 @@ pub async fn start_example_raft_node(
let (manager_tx, manager_rx) = tokio::sync::mpsc::channel(1000);
// let health = Arc::new(crate::network::raft_network_impl::HealthLedger::new(manager_tx));
// let network = Arc::new(DcacheNetwork::new(health));
let network = Arc::new(DcacheNetwork::new(manager_tx));
let network = Arc::new(DcacheNetwork::new(manager_tx, client.clone()));
// Create a local raft instance.
let raft = Raft::new(
@ -118,50 +120,68 @@ pub async fn start_example_raft_node(
.unwrap();
raft.enable_heartbeat(true);
raft.enable_elect(true);
// raft.enable_tick(true);
// Create an application that will store all the instances created above, this will
// be later used on the actix-web services.
let app = DcacheApp {
let app = Data::new(DcacheApp {
id: node_id,
addr: http_addr.clone(),
raft,
store,
config,
network,
};
let app = Arc::new(app);
let dcache_service = protobuf::MyDcacheImpl::new(app.clone());
});
if introducer_addr == http_addr {
app.init().await.unwrap();
}
let app_copy = app.clone();
// Start the actix-web server.
let server = HttpServer::new(move || {
App::new()
.wrap(Logger::default())
.wrap(Logger::new("%a %{User-Agent}i"))
.wrap(middleware::Compress::default())
.app_data(app.clone())
// raft internal RPC
.service(raft::append)
.service(raft::snapshot)
.service(raft::vote)
// admin API
.service(management::init)
.service(management::add_learner)
.service(management::change_membership)
.service(management::metrics)
// application API
.service(api::write)
.service(api::state)
.service(api::read)
.service(api::pipeline_read)
.service(api::pipeline_write)
.service(api::ws_write)
// .service(api::consistent_read)
});
let svc = DcacheServiceServer::new(dcache_service);
let x = Server::builder()
.add_service(svc)
.serve(http_addr.clone().parse().unwrap());
let server_fut = tokio::spawn(x);
let x = server.bind(&http_addr)?;
let server_fut = tokio::spawn(x.run());
tokio::time::sleep(std::time::Duration::new(3, 0)).await;
let url = format!("http://{}", introducer_addr);
let mut client = DcacheServiceClient::connect(url).await.unwrap();
client
.add_learner(Learner {
id: node_id,
addr: http_addr,
})
let req: (DcacheNodeId, String) = (node_id, http_addr);
let c = reqwest::Client::new();
c.post(format!("http://{}/add-learner", introducer_addr))
.json(&req)
.send()
.await
.unwrap();
// let health_job = tokio::spawn(DcacheApp::health_job(app_copy));
let health_metrics_handle =
crate::network::management::HealthMetrics::spawn(app_copy, 5, manager_rx).await;
server_fut.await?.unwrap();
server_fut.await??;
health_metrics_handle.abort();
// health_job.abort();
Ok(())
}

View file

@ -1,330 +0,0 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2021 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
//! In-memory cache implementation that uses [HashMap]
use std::sync::Arc;
use std::time::Duration;
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
use libmcaptcha::cache::messages::*;
use libmcaptcha::errors::*;
#[derive(Clone, Default, Serialize, Deserialize)]
/// cache datastructure implementing [Save]
pub struct HashCache {
difficulty_map: Arc<DashMap<String, CachedPoWConfig>>,
result_map: Arc<DashMap<String, (String, u64)>>,
}
impl HashCache {
// save [PoWConfig] to cache
fn save_pow_config(&self, config: CachePoW) -> CaptchaResult<()> {
let challenge = config.string;
let config: CachedPoWConfig = CachedPoWConfig {
key: config.key,
difficulty_factor: config.difficulty_factor,
duration: config.duration,
};
if self.difficulty_map.get(&challenge).is_none() {
self.difficulty_map.insert(challenge, config);
Ok(())
} else {
Err(CaptchaError::InvalidPoW)
}
}
pub async fn clean_all_after_cold_start(&self, updated: HashCache) {
updated.difficulty_map.iter().for_each(|x| {
self.difficulty_map
.insert(x.key().to_owned(), x.value().to_owned());
});
updated.result_map.iter().for_each(|x| {
self.result_map
.insert(x.key().to_owned(), x.value().to_owned());
});
let cache = self.clone();
let fut = async move {
for values in cache.result_map.iter() {
let inner_cache = cache.clone();
let duration = values.value().1;
let key = values.key().to_owned();
let inner_fut = async move {
tokio::time::sleep(Duration::new(duration, 0)).await;
inner_cache.remove_cache_result(&key);
};
tokio::spawn(inner_fut);
}
for values in cache.difficulty_map.iter() {
let inner_cache = cache.clone();
let duration = values.value().duration;
let key = values.key().to_owned();
let inner_fut = async move {
tokio::time::sleep(Duration::new(duration, 0)).await;
inner_cache.remove_pow_config(&key);
};
tokio::spawn(inner_fut);
}
};
tokio::spawn(fut);
}
// retrieve [PoWConfig] from cache. Deletes config post retrival
pub fn retrieve_pow_config(&self, msg: VerifyCaptchaResult) -> Option<CachedPoWConfig> {
if let Some(difficulty_factor) = self.remove_pow_config(&msg.token) {
Some(difficulty_factor)
} else {
None
}
}
// delete [PoWConfig] from cache
pub fn remove_pow_config(&self, string: &str) -> Option<CachedPoWConfig> {
self.difficulty_map.remove(string).map(|x| x.1)
}
// save captcha result
fn save_captcha_result(&self, res: CacheResult) {
self.result_map.insert(res.token, (res.key, res.duration));
}
// verify captcha result
pub fn verify_captcha_result(&self, challenge: VerifyCaptchaResult) -> bool {
if let Some(captcha_id) = self.remove_cache_result(&challenge.token) {
if captcha_id == challenge.key {
true
} else {
false
}
} else {
false
}
}
// delete cache result
pub fn remove_cache_result(&self, string: &str) -> Option<String> {
self.result_map.remove(string).map(|x| x.1 .0)
}
pub fn cache_pow(&self, msg: CachePoW) {
use std::time::Duration;
use tokio::time::sleep;
let duration: Duration = Duration::new(msg.duration, 0);
let string = msg.string.clone();
let cache = self.clone();
let wait_for = async move {
sleep(duration).await;
//delay_for(duration).await;
cache.remove_pow_config(&string);
};
let _ = self.save_pow_config(msg);
tokio::spawn(wait_for);
}
/// cache PoW result
pub fn cache_result(&self, msg: CacheResult) {
use std::time::Duration;
use tokio::time::sleep;
let token = msg.token.clone();
msg.token.clone();
msg.token.clone();
msg.token.clone();
let duration: Duration = Duration::new(msg.duration, 0);
let cache = self.clone();
let wait_for = async move {
sleep(duration).await;
//delay_for(duration).await;
cache.remove_cache_result(&token);
};
tokio::spawn(wait_for);
let _ = self.save_captcha_result(msg);
}
}
#[cfg(test)]
mod tests {
use super::*;
use libmcaptcha::master::AddVisitorResult;
use libmcaptcha::pow::PoWConfig;
use std::time::Duration;
#[actix_rt::test]
async fn merge_works() {
const DIFFICULTY_FACTOR: u32 = 54;
const RES: &str = "b";
const DURATION: u64 = 5;
const KEY: &str = "mcaptchakey";
let pow: PoWConfig = PoWConfig::new(DIFFICULTY_FACTOR, KEY.into()); //salt is dummy here
let cache = HashCache::default();
let new_cache = HashCache::default();
let visitor_result = AddVisitorResult {
difficulty_factor: DIFFICULTY_FACTOR,
duration: DURATION,
};
let string = pow.string.clone();
let msg = CachePoWBuilder::default()
.string(pow.string.clone())
.difficulty_factor(DIFFICULTY_FACTOR)
.duration(visitor_result.duration)
.key(KEY.into())
.build()
.unwrap();
cache.cache_pow(msg);
let add_cache = CacheResult {
key: KEY.into(),
token: RES.into(),
duration: DURATION,
};
cache.cache_result(add_cache.clone());
new_cache.clean_all_after_cold_start(cache.clone()).await;
let msg = VerifyCaptchaResult {
token: string.clone(),
key: KEY.into(),
};
let cache_difficulty_factor = cache.retrieve_pow_config(msg.clone()).unwrap();
let new_cache_difficulty_factor = new_cache.retrieve_pow_config(msg.clone()).unwrap();
assert_eq!(DIFFICULTY_FACTOR, cache_difficulty_factor.difficulty_factor);
assert_eq!(
DIFFICULTY_FACTOR,
new_cache_difficulty_factor.difficulty_factor
);
let verify_msg = VerifyCaptchaResult {
key: KEY.into(),
token: RES.into(),
};
assert!(new_cache.verify_captcha_result(verify_msg.clone()));
assert!(!new_cache.verify_captcha_result(verify_msg.clone()));
let duration: Duration = Duration::new(5, 0);
//sleep(DURATION + DURATION).await;
tokio::time::sleep(duration + duration).await;
let expired_string = cache.retrieve_pow_config(msg.clone());
assert_eq!(None, expired_string);
let expired_string = new_cache.retrieve_pow_config(msg);
assert_eq!(None, expired_string);
cache.cache_result(add_cache);
new_cache.clean_all_after_cold_start(cache.clone()).await;
tokio::time::sleep(duration + duration).await;
assert!(!new_cache.verify_captcha_result(verify_msg.clone()));
assert!(!cache.verify_captcha_result(verify_msg));
}
#[actix_rt::test]
async fn hashcache_pow_cache_works() {
const DIFFICULTY_FACTOR: u32 = 54;
const DURATION: u64 = 5;
const KEY: &str = "mcaptchakey";
let cache = HashCache::default();
let pow: PoWConfig = PoWConfig::new(DIFFICULTY_FACTOR, KEY.into()); //salt is dummy here
let visitor_result = AddVisitorResult {
difficulty_factor: DIFFICULTY_FACTOR,
duration: DURATION,
};
let string = pow.string.clone();
let msg = CachePoWBuilder::default()
.string(pow.string.clone())
.difficulty_factor(DIFFICULTY_FACTOR)
.duration(visitor_result.duration)
.key(KEY.into())
.build()
.unwrap();
cache.cache_pow(msg);
let msg = VerifyCaptchaResult {
token: string.clone(),
key: KEY.into(),
};
let cache_difficulty_factor = cache.retrieve_pow_config(msg.clone()).unwrap();
assert_eq!(DIFFICULTY_FACTOR, cache_difficulty_factor.difficulty_factor);
let duration: Duration = Duration::new(5, 0);
//sleep(DURATION + DURATION).await;
tokio::time::sleep(duration + duration).await;
let expired_string = cache.retrieve_pow_config(msg);
assert_eq!(None, expired_string);
}
#[actix_rt::test]
async fn hashcache_result_cache_works() {
const DURATION: u64 = 5;
const KEY: &str = "a";
const RES: &str = "b";
let cache = HashCache::default();
// send value to cache
// send another value to cache for auto delete
// verify_captcha_result
// delete
// wait for timeout and verify_captcha_result against second value
let add_cache = CacheResult {
key: KEY.into(),
token: RES.into(),
duration: DURATION,
};
cache.cache_result(add_cache);
let verify_msg = VerifyCaptchaResult {
key: KEY.into(),
token: RES.into(),
};
assert!(cache.verify_captcha_result(verify_msg.clone()));
// duplicate
assert!(!cache.verify_captcha_result(verify_msg));
let verify_msg = VerifyCaptchaResult {
key: "cz".into(),
token: RES.into(),
};
assert!(!cache.verify_captcha_result(verify_msg));
let duration: Duration = Duration::new(5, 0);
tokio::time::sleep(duration + duration).await;
let verify_msg = VerifyCaptchaResult {
key: KEY.into(),
token: RES.into(),
};
assert!(!cache.verify_captcha_result(verify_msg));
}
}

View file

@ -1,398 +0,0 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2021 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use serde::{Deserialize, Serialize};
use libmcaptcha::defense::Level;
use libmcaptcha::errors::*;
//
///// Level struct that describes threshold-difficulty factor mapping
//#[derive(Debug, Deserialize, Serialize, Copy, Clone, PartialEq)]
//pub struct Level {
// pub visitor_threshold: u32,
// pub difficulty_factor: u32,
//}
//
///// Bulder struct for [Level] to describe threshold-difficulty factor mapping
//#[derive(Debug, Copy, Clone, PartialEq)]
//pub struct LevelBuilder {
// visitor_threshold: Option<u32>,
// difficulty_factor: Option<u32>,
//}
//
//impl Default for LevelBuilder {
// fn default() -> Self {
// LevelBuilder {
// visitor_threshold: None,
// difficulty_factor: None,
// }
// }
//}
//
//impl LevelBuilder {
// /// set visitor count for level
// pub fn visitor_threshold(&mut self, visitor_threshold: u32) -> &mut Self {
// self.visitor_threshold = Some(visitor_threshold);
// self
// }
//
// /// set difficulty factor for level. difficulty_factor can't be zero because
// /// Difficulty is calculated as:
// /// ```no_run
// /// let difficulty_factor = 500;
// /// let difficulty = u128::max_value() - u128::max_value() / difficulty_factor;
// /// ```
// /// the higher the `difficulty_factor`, the higher the difficulty.
// pub fn difficulty_factor(&mut self, difficulty_factor: u32) -> CaptchaResult<&mut Self> {
// if difficulty_factor > 0 {
// self.difficulty_factor = Some(difficulty_factor);
// Ok(self)
// } else {
// Err(CaptchaError::DifficultyFactorZero)
// }
// }
//
// /// build Level struct
// pub fn build(&mut self) -> CaptchaResult<Level> {
// if self.visitor_threshold.is_none() {
// Err(CaptchaError::SetVisitorThreshold)
// } else if self.difficulty_factor.is_none() {
// Err(CaptchaError::SetDifficultyFactor)
// } else {
// Ok(Level {
// difficulty_factor: self.difficulty_factor.unwrap(),
// visitor_threshold: self.visitor_threshold.unwrap(),
// })
// }
// }
//}
//
/// Builder struct for [Defense]
#[derive(Debug, Clone, PartialEq)]
pub struct DefenseBuilder {
levels: Vec<Level>,
}
impl Default for DefenseBuilder {
fn default() -> Self {
DefenseBuilder { levels: vec![] }
}
}
impl DefenseBuilder {
/// add a level to [Defense]
pub fn add_level(&mut self, level: Level) -> CaptchaResult<&mut Self> {
for i in self.levels.iter() {
if i.visitor_threshold == level.visitor_threshold {
return Err(CaptchaError::DuplicateVisitorCount);
}
}
self.levels.push(level);
Ok(self)
}
/// Build [Defense]
pub fn build(&mut self) -> CaptchaResult<Defense> {
if !self.levels.is_empty() {
// sort levels to arrange in ascending order
self.levels.sort_by_key(|a| a.visitor_threshold);
for level in self.levels.iter() {
if level.difficulty_factor == 0 {
return Err(CaptchaError::DifficultyFactorZero);
}
}
// as visitor count increases, difficulty_factor too should increse
// if it decreses, an error must be thrown
for i in 0..self.levels.len() - 1 {
if self.levels[i].difficulty_factor > self.levels[i + 1].difficulty_factor {
return Err(CaptchaError::DecreaseingDifficultyFactor);
}
}
Ok(Defense {
levels: self.levels.to_owned(),
})
} else {
Err(CaptchaError::LevelEmpty)
}
}
}
/// struct describes all the different [Level]s at which an mCaptcha system operates
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
pub struct Defense {
levels: Vec<Level>,
// index of current visitor threshold
}
impl From<Defense> for Vec<Level> {
fn from(d: Defense) -> Self {
d.levels
}
}
impl Defense {
///! Difficulty is calculated as:
///! ```rust
///! let difficulty = u128::max_value() - u128::max_value() / difficulty_factor;
///! ```
///! The higher the `difficulty_factor`, the higher the difficulty.
// /// Get difficulty factor of current level of defense
// pub fn get_difficulty(&self, current_visitor_threshold: usize) -> u32 {
// self.levels[current_visitor_threshold].difficulty_factor
// }
//
// /// tighten up defense. Increases defense level by a factor of one.
// /// When defense is at max level, calling this method will have no effect
// pub fn tighten_up(&mut self) {
// if self.current_visitor_threshold < self.levels.len() - 1 {
// self.current_visitor_threshold += 1;
// }
// }
// /// Loosen up defense. Decreases defense level by a factor of one.
// /// When defense is at the lowest level, calling this method will have no effect.
// pub fn loosen_up(&mut self) {
// if self.current_visitor_threshold > 0 {
// self.current_visitor_threshold -= 1;
// }
// }
//
// /// Set defense to maximum level
// pub fn max_defense(&mut self) {
// self.current_visitor_threshold = self.levels.len() - 1;
// }
//
// /// Set defense to minimum level
// pub fn min_defense(&mut self) {
// self.current_visitor_threshold = 0;
// }
//
pub fn get_levels(&self) -> Vec<Level> {
self.levels.clone()
}
/// Get current level's visitor threshold
pub fn current_level(&self, current_visitor_level: u32) -> &Level {
for level in self.levels.iter() {
if current_visitor_level <= level.visitor_threshold {
return level;
}
}
self.levels.last().as_ref().unwrap()
// &self.levels[self.current_visitor_threshold]
}
//
// /// Get current level's visitor threshold
// pub fn visitor_threshold(&self) -> u32 {
// self.levels[self.current_visitor_threshold].difficulty_factor
// }
}
#[cfg(test)]
mod tests {
use super::*;
use libmcaptcha::defense::Level;
use libmcaptcha::LevelBuilder;
#[test]
fn defense_builder_duplicate_visitor_threshold() {
let mut defense_builder = DefenseBuilder::default();
let err = defense_builder
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
);
assert_eq!(err, Err(CaptchaError::DuplicateVisitorCount));
}
#[test]
fn defense_builder_decreasing_difficulty_factor() {
let mut defense_builder = DefenseBuilder::default();
let err = defense_builder
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(500)
.difficulty_factor(10)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build();
assert_eq!(err, Err(CaptchaError::DecreaseingDifficultyFactor));
}
#[test]
fn checking_for_integer_overflow() {
let mut defense = DefenseBuilder::default()
.add_level(
LevelBuilder::default()
.visitor_threshold(5)
.difficulty_factor(5)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(10)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(20)
.difficulty_factor(60)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(30)
.difficulty_factor(65)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build()
.unwrap();
// for _ in 0..500 {
// defense.tighten_up();
// }
//
// defense.get_difficulty();
// for _ in 0..500000 {
// defense.tighten_up();
// }
//
defense.current_level(10_000_000);
}
fn get_defense() -> Defense {
DefenseBuilder::default()
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(500)
.difficulty_factor(5000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(5000)
.difficulty_factor(50000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(50000)
.difficulty_factor(500000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(500000)
.difficulty_factor(5000000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build()
.unwrap()
}
#[test]
fn defense_builder_works() {
let defense = get_defense();
assert_eq!(defense.levels[0].difficulty_factor, 50);
assert_eq!(defense.levels[1].difficulty_factor, 5000);
assert_eq!(defense.levels[2].difficulty_factor, 50_000);
assert_eq!(defense.levels[3].difficulty_factor, 500_000);
assert_eq!(defense.levels[4].difficulty_factor, 5_000_000);
}
#[test]
fn tighten_up_works() {
let defense = get_defense();
assert_eq!(defense.current_level(0).difficulty_factor, 50);
assert_eq!(defense.current_level(500).difficulty_factor, 5_000);
assert_eq!(defense.current_level(501).difficulty_factor, 50_000);
assert_eq!(defense.current_level(5_000).difficulty_factor, 50_000);
assert_eq!(defense.current_level(5_001).difficulty_factor, 500_000);
assert_eq!(defense.current_level(50_000).difficulty_factor, 500_000);
assert_eq!(defense.current_level(50_001).difficulty_factor, 5_000_000);
assert_eq!(defense.current_level(500_000).difficulty_factor, 5_000_000);
assert_eq!(defense.current_level(500_001).difficulty_factor, 5_000_000);
}
}

View file

@ -1,595 +0,0 @@
/* mCaptcha - A proof of work based DoS protection system
* Copyright © 2021 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use std::sync::atomic::{AtomicU32, Ordering};
use std::sync::Arc;
use std::time::Duration;
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
use super::defense::Defense;
use libmcaptcha::errors::*;
use libmcaptcha::master::messages as ManagerMessages;
/// Builder for [MCaptcha]
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct MCaptchaBuilder {
visitor_threshold: u32,
defense: Option<Defense>,
duration: Option<u64>,
}
impl Default for MCaptchaBuilder {
fn default() -> Self {
MCaptchaBuilder {
visitor_threshold: 0,
defense: None,
duration: None,
}
}
}
impl MCaptchaBuilder {
/// set defense
pub fn defense(&mut self, d: Defense) -> &mut Self {
self.defense = Some(d);
self
}
/// set duration
pub fn duration(&mut self, d: u64) -> &mut Self {
self.duration = Some(d);
self
}
/// Builds new [MCaptcha]
pub fn build(self: &mut MCaptchaBuilder) -> CaptchaResult<MCaptcha> {
if self.duration.is_none() {
Err(CaptchaError::PleaseSetValue("duration".into()))
} else if self.defense.is_none() {
Err(CaptchaError::PleaseSetValue("defense".into()))
} else if self.duration <= Some(0) {
Err(CaptchaError::CaptchaDurationZero)
} else {
let m = MCaptcha {
duration: self.duration.unwrap(),
defense: self.defense.clone().unwrap(),
visitor_threshold: Arc::new(AtomicU32::new(self.visitor_threshold)),
};
Ok(m)
}
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct MCaptcha {
visitor_threshold: Arc<AtomicU32>,
defense: Defense,
duration: u64,
}
impl MCaptcha {
/// increments the visitor count by one
#[inline]
pub fn add_visitor(&self) -> u32 {
// self.visitor_threshold += 1;
let current_visitor_level = self.visitor_threshold.fetch_add(1, Ordering::SeqCst) + 1;
let current_level = self.defense.current_level(current_visitor_level);
current_level.difficulty_factor
}
/// decrements the visitor count by specified count
#[inline]
pub fn set_visitor_count(&self, new_current: u32) {
self.visitor_threshold
.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |mut current| {
if current != new_current {
Some(new_current)
} else {
None
}
});
}
/// decrements the visitor count by specified count
#[inline]
pub fn decrement_visitor_by(&self, count: u32) {
self.visitor_threshold
.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |mut current| {
if current > 0 {
if current >= count {
current -= count;
} else {
current = 0;
}
Some(current)
} else {
None
}
});
}
/// get [Counter]'s current visitor_threshold
pub fn get_visitors(&self) -> u32 {
self.visitor_threshold.load(Ordering::SeqCst)
}
}
#[derive(Clone, Serialize, Deserialize)]
pub struct Manager {
pub captchas: Arc<DashMap<String, Arc<MCaptcha>>>,
pub gc: u64,
}
impl Manager {
/// add [Counter] actor to [Manager]
pub fn add_captcha(&self, m: Arc<MCaptcha>, id: String) {
self.captchas.insert(id, m);
}
/// create new master
/// accepts a `u64` to configure garbage collection period
pub fn new(gc: u64) -> Self {
Manager {
captchas: Arc::new(DashMap::new()),
gc,
}
}
fn gc(captchas: Arc<DashMap<String, Arc<MCaptcha>>>) {
for captcha in captchas.iter() {
let visitor = { captcha.value().get_visitors() };
if visitor == 0 {
captchas.remove(captcha.key());
}
}
}
/// get [Counter] actor from [Manager]
pub fn get_captcha(&self, id: &str) -> Option<Arc<MCaptcha>> {
if let Some(captcha) = self.captchas.get(id) {
Some(captcha.clone())
} else {
None
}
}
/// removes [Counter] actor from [Manager]
pub fn rm_captcha(&self, id: &str) -> Option<(String, Arc<MCaptcha>)> {
self.captchas.remove(id)
}
/// renames [Counter] actor
pub fn rename(&self, current_id: &str, new_id: String) {
// If actor isn't present, it's okay to not throw an error
// since actors are lazyily initialized and are cleaned up when inactive
if let Some((_, captcha)) = self.captchas.remove(current_id) {
self.add_captcha(captcha, new_id);
}
}
pub async fn clean_all_after_cold_start(&self, updated: Manager) {
updated.captchas.iter().for_each(|x| {
self.captchas
.insert(x.key().to_owned(), x.value().to_owned());
});
let captchas = self.clone();
let keys: Vec<String> = captchas
.captchas
.clone()
.iter()
.map(|x| x.key().to_owned())
.collect();
let fut = async move {
tokio::time::sleep(Duration::new(captchas.gc, 0)).await;
for key in keys.iter() {
captchas.rm_captcha(key);
}
};
tokio::spawn(fut);
}
pub fn add_visitor(
&self,
msg: &ManagerMessages::AddVisitor,
) -> Option<libmcaptcha::master::AddVisitorResult> {
if let Some(captcha) = self.captchas.get(&msg.0) {
let difficulty_factor = captcha.add_visitor();
// let id = msg.0.clone();
let c = captcha.clone();
let captchas = self.captchas.clone();
let fut = async move {
tokio::time::sleep(Duration::new(c.duration, 0)).await;
c.decrement_visitor_by(1);
// Self::gc(captchas);
// if c.get_visitors() == 0 {
// println!("Removing captcha addvivi");
// captchas.remove(&id);
// }
};
tokio::spawn(fut);
Some(libmcaptcha::master::AddVisitorResult {
duration: captcha.duration,
difficulty_factor,
})
} else {
None
}
}
pub fn get_internal_data(&self) -> HashMap<String, libmcaptcha::mcaptcha::MCaptcha> {
let mut res = HashMap::with_capacity(self.captchas.len());
for value in self.captchas.iter() {
res.insert(value.key().to_owned(), value.value().as_ref().into());
}
res
}
pub fn set_internal_data(&self, mut map: HashMap<String, libmcaptcha::mcaptcha::MCaptcha>) {
for (id, captcha) in map.drain() {
let visitors = captcha.get_visitors();
let new_captcha: MCaptcha = (&captcha).into();
let new_captcha = Arc::new(new_captcha);
self.captchas.insert(id.clone(), new_captcha.clone());
let msg = ManagerMessages::AddVisitor(id);
for _ in 0..visitors {
self.add_visitor(&msg);
}
}
}
}
impl From<&libmcaptcha::mcaptcha::MCaptcha> for MCaptcha {
fn from(value: &libmcaptcha::mcaptcha::MCaptcha) -> Self {
let mut defense = super::defense::DefenseBuilder::default();
for level in value.get_defense().get_levels() {
let _ = defense.add_level(level);
}
let defense = defense.build().unwrap();
let new_captcha = MCaptchaBuilder::default()
.defense(defense)
.duration(value.get_duration())
.build()
.unwrap();
// for _ in 0..value.get_visitors() {
// new_captcha.add_visitor();
// }
new_captcha
}
}
impl From<&MCaptcha> for libmcaptcha::mcaptcha::MCaptcha {
fn from(value: &MCaptcha) -> Self {
let mut defense = libmcaptcha::defense::DefenseBuilder::default();
for level in value.defense.get_levels().drain(0..) {
let _ = defense.add_level(level);
}
let defense = defense.build().unwrap();
let mut new_captcha = libmcaptcha::mcaptcha::MCaptchaBuilder::default()
.defense(defense)
.duration(value.duration)
.build()
.unwrap();
for _ in 0..value.get_visitors() {
new_captcha.add_visitor();
}
new_captcha
}
}
#[cfg(test)]
mod tests {
use super::*;
use libmcaptcha::defense::LevelBuilder;
use libmcaptcha::master::messages::*;
pub const LEVEL_1: (u32, u32) = (50, 50);
pub const LEVEL_2: (u32, u32) = (500, 500);
pub const DURATION: u64 = 5;
use crate::mcaptcha::defense::*;
pub fn get_defense() -> Defense {
DefenseBuilder::default()
.add_level(
LevelBuilder::default()
.visitor_threshold(LEVEL_1.0)
.difficulty_factor(LEVEL_1.1)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(LEVEL_2.0)
.difficulty_factor(LEVEL_2.1)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build()
.unwrap()
}
async fn race(manager: &Manager, id: String, count: (u32, u32)) {
let msg = ManagerMessages::AddVisitor(id);
for _ in 0..count.0 as usize - 1 {
manager.add_visitor(&msg);
}
}
// pub fn get_counter() -> Counter {
// get_mcaptcha().into()
// }
pub fn get_mcaptcha() -> MCaptcha {
MCaptchaBuilder::default()
.defense(get_defense())
.duration(DURATION)
.build()
.unwrap()
}
#[actix_rt::test]
async fn manager_works() {
let manager = Manager::new(1);
// let get_add_site_msg = |id: String, mcaptcha: MCaptcha| {
// AddSiteBuilder::default()
// .id(id)
// .mcaptcha(mcaptcha)
// .build()
// .unwrap()
// };
let id = "yo";
manager.add_captcha(Arc::new(get_mcaptcha()), id.into());
let mcaptcha_addr = manager.get_captcha(id);
assert!(mcaptcha_addr.is_some());
let mut mcaptcha_data = manager.get_internal_data();
mcaptcha_data.get_mut(id).unwrap().add_visitor();
mcaptcha_data.get_mut(id).unwrap().add_visitor();
mcaptcha_data.get_mut(id).unwrap().add_visitor();
// let mcaptcha_data: HashMap<String, libmcaptcha::mcaptcha::MCaptcha> = {
// let serialized = serde_json::to_string(&mcaptcha_data).unwrap();
// serde_json::from_str(&serialized).unwrap()
// };
// println!("{:?}", mcaptcha_data);
manager.set_internal_data(mcaptcha_data);
let mcaptcha_data = manager.get_internal_data();
assert_eq!(
manager.get_captcha(id).unwrap().get_visitors(),
mcaptcha_data.get(id).unwrap().get_visitors()
);
let new_id = "yoyo";
manager.rename(id, new_id.into());
{
let mcaptcha_addr = manager.get_captcha(new_id);
assert!(mcaptcha_addr.is_some());
let addr_doesnt_exist = manager.get_captcha(id);
assert!(addr_doesnt_exist.is_none());
let timer_expire = Duration::new(DURATION, 0);
tokio::time::sleep(timer_expire).await;
tokio::time::sleep(timer_expire).await;
}
// Manager::gc(manager.captchas.clone());
// let mcaptcha_addr = manager.get_captcha(new_id);
// assert_eq!(mcaptcha_addr.as_ref().unwrap().get_visitors(), 0);
// assert!(mcaptcha_addr.is_none());
//
// assert!(
// manager.rm_captcha(new_id.into()).is_some());
}
#[actix_rt::test]
async fn counter_defense_works() {
let manager = Manager::new(1);
let id = "yo";
manager.add_captcha(Arc::new(get_mcaptcha()), id.into());
let mut mcaptcha = manager
.add_visitor(&ManagerMessages::AddVisitor(id.to_string()))
.unwrap();
assert_eq!(mcaptcha.difficulty_factor, LEVEL_1.0);
race(&manager, id.to_string(), LEVEL_2).await;
mcaptcha = manager
.add_visitor(&ManagerMessages::AddVisitor(id.to_string()))
.unwrap();
assert_eq!(mcaptcha.difficulty_factor, LEVEL_2.1);
tokio::time::sleep(Duration::new(DURATION * 2, 0)).await;
assert_eq!(manager.get_captcha(id).unwrap().get_visitors(), 0);
}
}
//
//#[cfg(test)]
//pub mod tests {
// use super::*;
// use crate::defense::*;
// use crate::errors::*;
// use crate::mcaptcha;
// use crate::mcaptcha::MCaptchaBuilder;
//
// // constants for testing
// // (visitor count, level)
// pub const LEVEL_1: (u32, u32) = (50, 50);
// pub const LEVEL_2: (u32, u32) = (500, 500);
// pub const DURATION: u64 = 5;
//
// type MyActor = Addr<Counter>;
//
// pub fn get_defense() -> Defense {
// DefenseBuilder::default()
// .add_level(
// LevelBuilder::default()
// .visitor_threshold(LEVEL_1.0)
// .difficulty_factor(LEVEL_1.1)
// .unwrap()
// .build()
// .unwrap(),
// )
// .unwrap()
// .add_level(
// LevelBuilder::default()
// .visitor_threshold(LEVEL_2.0)
// .difficulty_factor(LEVEL_2.1)
// .unwrap()
// .build()
// .unwrap(),
// )
// .unwrap()
// .build()
// .unwrap()
// }
//
// async fn race(addr: Addr<Counter>, count: (u32, u32)) {
// for _ in 0..count.0 as usize - 1 {
// let _ = addr.send(AddVisitor).await.unwrap();
// }
// }
//
// pub fn get_counter() -> Counter {
// get_mcaptcha().into()
// }
//
// pub fn get_mcaptcha() -> MCaptcha {
// MCaptchaBuilder::default()
// .defense(get_defense())
// .duration(DURATION)
// .build()
// .unwrap()
// }
//
// #[test]
// fn mcaptcha_decrement_by_works() {
// let mut m = get_mcaptcha();
// for _ in 0..100 {
// m.add_visitor();
// }
// m.decrement_visitor_by(50);
// assert_eq!(m.get_visitors(), 50);
// m.decrement_visitor_by(500);
// assert_eq!(m.get_visitors(), 0);
// }
//
//
// #[actix_rt::test]
// async fn counter_defense_loosenup_works() {
// //use actix::clock::sleep;
// //use actix::clock::delay_for;
// let addr: MyActor = get_counter().start();
//
// race(addr.clone(), LEVEL_2).await;
// race(addr.clone(), LEVEL_2).await;
// let mut mcaptcha = addr.send(AddVisitor).await.unwrap();
// assert_eq!(mcaptcha.difficulty_factor, LEVEL_2.1);
//
// let duration = Duration::new(DURATION, 0);
// sleep(duration).await;
// //delay_for(duration).await;
//
// mcaptcha = addr.send(AddVisitor).await.unwrap();
// assert_eq!(mcaptcha.difficulty_factor, LEVEL_1.1);
// }
//
// #[test]
// fn test_mcatcptha_builder() {
// let defense = get_defense();
// let m = MCaptchaBuilder::default()
// .duration(0)
// .defense(defense.clone())
// .build();
//
// assert_eq!(m.err(), Some(CaptchaError::CaptchaDurationZero));
//
// let m = MCaptchaBuilder::default().duration(30).build();
// assert_eq!(
// m.err(),
// Some(CaptchaError::PleaseSetValue("defense".into()))
// );
//
// let m = MCaptchaBuilder::default().defense(defense).build();
// assert_eq!(
// m.err(),
// Some(CaptchaError::PleaseSetValue("duration".into()))
// );
// }
//
// #[actix_rt::test]
// async fn get_current_visitor_count_works() {
// let addr: MyActor = get_counter().start();
//
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// let count = addr.send(GetCurrentVisitorCount).await.unwrap();
//
// assert_eq!(count, 4);
// }
//
// #[actix_rt::test]
// #[should_panic]
// async fn stop_works() {
// let addr: MyActor = get_counter().start();
// addr.send(Stop).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// }
//
// #[actix_rt::test]
// async fn get_set_internal_data_works() {
// let addr: MyActor = get_counter().start();
// let mut mcaptcha = addr.send(GetInternalData).await.unwrap();
// mcaptcha.add_visitor();
// addr.send(SetInternalData(mcaptcha.clone())).await.unwrap();
// assert_eq!(
// addr.send(GetInternalData).await.unwrap().get_visitors(),
// mcaptcha.get_visitors()
// );
//
// let duration = Duration::new(mcaptcha.get_duration() + 3, 0);
// sleep(duration).await;
// assert_eq!(addr.send(GetCurrentVisitorCount).await.unwrap(), 0);
// }
//
// #[actix_rt::test]
// async fn bulk_delete_works() {
// let addr: MyActor = get_counter().start();
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// assert_eq!(addr.send(GetCurrentVisitorCount).await.unwrap(), 2);
// addr.send(BulkDecrement(3)).await.unwrap();
// assert_eq!(addr.send(GetCurrentVisitorCount).await.unwrap(), 0);
// }
//}

View file

@ -1,3 +0,0 @@
pub mod cache;
mod defense;
pub mod mcaptcha;

246
src/network/api.rs Normal file
View file

@ -0,0 +1,246 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2023 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::sync::{Arc, RwLock};
use actix::dev::channel::channel;
use actix::WrapFuture;
use actix_web::get;
use actix_web::post;
use actix_web::web;
use actix_web::web::Data;
use actix_web::Responder;
use libmcaptcha::cache::messages::{CachedPoWConfig, RetrivePoW, VerifyCaptchaResult};
use libmcaptcha::master::messages::GetInternalData;
use serde::Deserialize;
use serde::Serialize;
use web::Json;
use crate::app::DcacheApp;
use crate::store::DcacheRequest;
use crate::store::DcacheResponse;
#[post("/write")]
pub async fn write(
app: Data<DcacheApp>,
req: Json<DcacheRequest>,
) -> actix_web::Result<impl Responder> {
let response = app.raft.client_write(req.0).await;
Ok(Json(response))
}
#[get("/state")]
pub async fn state(app: Data<DcacheApp>) -> actix_web::Result<impl Responder> {
let sm = app.store.state_machine.read().await;
let resp = sm
.data
.master
.send(GetInternalData)
.await
.unwrap()
.await
.unwrap()
.unwrap();
Ok(Json(resp))
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum ReadRequest {
RetrivePoW(RetrivePoW), //Reader
VerifyCaptchaResult(VerifyCaptchaResult), //Reader
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum ReadResponse {
VerifyCaptchaResult(bool),
RetrivePoW(Option<CachedPoWConfig>),
}
#[post("/read")]
pub async fn read(
app: Data<DcacheApp>,
req: Json<ReadRequest>,
) -> actix_web::Result<impl Responder> {
let sm = app.store.state_machine.read().await;
let req = req.into_inner();
let res = match req {
ReadRequest::RetrivePoW(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::RetrivePoW(cache_res)
}
ReadRequest::VerifyCaptchaResult(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::VerifyCaptchaResult(cache_res)
}
};
Ok(Json(res))
}
#[post("/pipeline/read")]
pub async fn pipeline_read(
app: Data<DcacheApp>,
requests: Json<Vec<ReadRequest>>,
) -> actix_web::Result<impl Responder> {
let requests = requests.into_inner();
let mut responses = Vec::with_capacity(requests.len());
let sm = app.store.state_machine.read().await;
for request in requests {
let res = match request {
ReadRequest::RetrivePoW(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::RetrivePoW(cache_res)
}
ReadRequest::VerifyCaptchaResult(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::VerifyCaptchaResult(cache_res)
}
};
responses.push(res);
}
Ok(Json(responses))
}
#[post("/pipeline/write")]
pub async fn pipeline_write(
app: Data<DcacheApp>,
requests: Json<Vec<DcacheRequest>>,
) -> actix_web::Result<impl Responder> {
let mut responses = Vec::with_capacity(requests.len());
let mut requests = requests.into_inner();
for req in requests.drain(0..) {
responses.push(app.raft.client_write(req).await);
}
Ok(Json(responses))
}
use actix::prelude::*;
use actix::{Actor, StreamHandler};
use actix_web::{Error, HttpRequest, HttpResponse};
use actix_web_actors::ws;
/// Define HTTP actor
struct MyWs {
app: Data<DcacheApp>,
}
impl Actor for MyWs {
type Context = ws::WebsocketContext<Self>;
}
/// Handler for ws::Message message
impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
match msg {
Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
Ok(ws::Message::Text(text)) => {
let msg: super::raft::RaftMessage = serde_json::from_str(&text).unwrap();
let res: Arc<RwLock<Option<String>>> = Arc::new(RwLock::new(None));
let res2 = res.clone();
let app = self.app.clone();
let fut = async move {
let mut w = res2.write().unwrap();
match msg {
super::raft::RaftMessage::VoteRequest(msg) => {
let res = app.raft.vote(msg).await;
panic!("setting vote req");
*w = Some(serde_json::to_string(&res).unwrap());
}
super::raft::RaftMessage::Append(msg) => {
let res = app.raft.append_entries(msg).await;
panic!("setting app req");
*w = Some(serde_json::to_string(&res).unwrap());
}
super::raft::RaftMessage::Snapshot(msg) => {
panic!("setting snap req");
let res = app.raft.install_snapshot(msg).await;
*w = Some(serde_json::to_string(&res).unwrap());
}
}
}
.into_actor(self);
ctx.wait(fut);
//loop {
println!("running read loop");
let res = res.read().unwrap();
// if res.is_some() {
let res: &str = res.as_ref().unwrap();
ctx.text(res);
// break;
// }
//}
// if let Ok(res) = res.read() {
// if res.is_some() {
// let res: &str = res.as_ref().unwrap();
// ctx.text(res);
// }
// };
}
_ => (),
}
}
}
#[get("/ws/write")]
async fn ws_write(
req: HttpRequest,
app: Data<DcacheApp>,
stream: web::Payload,
) -> Result<HttpResponse, Error> {
let resp = ws::start(MyWs { app }, &req, stream);
println!("{:?}", resp);
resp
}

View file

@ -15,15 +15,66 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::sync::Arc;
//use actix_web::web;
//use actix_web::web::Data;
use actix_web::get;
use actix_web::post;
use actix_web::web;
use actix_web::web::Data;
use actix_web::Responder;
use openraft::error::Infallible;
use openraft::BasicNode;
use openraft::RaftMetrics;
use web::Json;
use crate::app::DcacheApp;
use crate::DcacheNodeId;
#[post("/add-learner")]
pub async fn add_learner(
app: Data<DcacheApp>,
req: Json<(DcacheNodeId, String)>,
) -> actix_web::Result<impl Responder> {
let node_id = req.0 .0;
let node = BasicNode {
addr: req.0 .1.clone(),
};
let res = app.raft.add_learner(node_id, node, true).await;
Ok(Json(res))
}
#[post("/change-membership")]
pub async fn change_membership(
app: Data<DcacheApp>,
req: Json<BTreeSet<DcacheNodeId>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.change_membership(req.0, false).await;
Ok(Json(res))
}
#[post("/init")]
pub async fn init(app: Data<DcacheApp>) -> actix_web::Result<impl Responder> {
let mut nodes = BTreeMap::new();
nodes.insert(
app.id,
BasicNode {
addr: app.addr.clone(),
},
);
let res = app.raft.initialize(nodes).await;
Ok(Json(res))
}
#[get("/metrics")]
pub async fn metrics(app: Data<DcacheApp>) -> actix_web::Result<impl Responder> {
let metrics = app.raft.metrics().borrow().clone();
let res: Result<RaftMetrics<DcacheNodeId, BasicNode>, Infallible> = Ok(metrics);
Ok(Json(res))
}
use tokio::sync::mpsc;
#[derive(Debug)]
@ -36,7 +87,7 @@ pub struct HealthMetrics;
impl HealthMetrics {
pub async fn spawn(
app: Arc<DcacheApp>,
app: Data<DcacheApp>,
threshold: usize,
mut rx: mpsc::Receiver<HealthStatus>,
) -> tokio::task::JoinHandle<()> {
@ -63,7 +114,7 @@ impl HealthMetrics {
new_nodes.push(*node.0);
}
let _res =
let res =
app.raft.change_membership(new_nodes, false).await.unwrap();
}
} else {
@ -77,3 +128,20 @@ impl HealthMetrics {
tokio::spawn(fut)
}
}
//#[get("/self/remove/{id}")]
//pub async fn remove_node(app: Data<DcacheApp>, id: web::Path<u64>) -> actix_web::Result<impl Responder> {
// let cluster_metrics = app.raft.metrics().borrow().clone();
// let remote_id: u64 = 3;
// let mut new_nodes: Vec<DcacheNodeId> = Vec::new();
// for node in cluster_metrics.membership_config.nodes() {
// if *node.0 == remote_id {
// continue;
// }
//
// new_nodes.push(*node.0);
// }
//
// let res = app.raft.change_membership(new_nodes, false).await;
// Ok(Json(res))
//}

View file

@ -15,7 +15,7 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
//pub mod api;
pub mod api;
pub mod management;
//pub mod raft;
pub mod raft;
pub mod raft_network_impl;

80
src/network/raft.rs Normal file
View file

@ -0,0 +1,80 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2023 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use actix_web::post;
use actix_web::web;
use actix_web::web::Data;
use actix_web::Responder;
use openraft::error::*;
use openraft::raft::AppendEntriesRequest;
use openraft::raft::InstallSnapshotRequest;
use openraft::raft::VoteRequest;
use openraft::raft::*;
use serde::*;
use web::Json;
use crate::app::DcacheApp;
use crate::DcacheNodeId;
use crate::DcacheTypeConfig;
// --- Raft communication
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum RaftMessage {
VoteRequest(VoteRequest<DcacheNodeId>),
Append(AppendEntriesRequest<DcacheTypeConfig>),
Snapshot(InstallSnapshotRequest<DcacheTypeConfig>),
}
#[derive(Debug, Serialize, Deserialize)]
pub enum RaftRes {
VoteRes(Result<VoteResponse<DcacheNodeId>, RaftError<DcacheNodeId>>),
SnapshotRes(
Result<
InstallSnapshotResponse<DcacheNodeId>,
RaftError<DcacheNodeId, InstallSnapshotError>,
>,
),
AppendRes(Result<AppendEntriesResponse<DcacheNodeId>, RaftError<DcacheNodeId>>),
}
#[post("/raft-vote")]
pub async fn vote(
app: Data<DcacheApp>,
req: Json<VoteRequest<DcacheNodeId>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.vote(req.0).await;
Ok(Json(res))
}
#[post("/raft-append")]
pub async fn append(
app: Data<DcacheApp>,
req: Json<AppendEntriesRequest<DcacheTypeConfig>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.append_entries(req.0).await;
Ok(Json(res))
}
#[post("/raft-snapshot")]
pub async fn snapshot(
app: Data<DcacheApp>,
req: Json<InstallSnapshotRequest<DcacheTypeConfig>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.install_snapshot(req.0).await;
Ok(Json(res))
}

View file

@ -1,3 +1,6 @@
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashSet;
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2023 Aravinth Manivannan <realravinth@batsense.net>
@ -15,17 +18,24 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::RwLock;
use std::time::Duration;
use std::time::Instant;
use futures_util::{future, pin_mut, StreamExt};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio_tungstenite::{connect_async, tungstenite::protocol::Message};
use tokio::sync::mpsc;
use super::management::HealthStatus;
use crate::DcacheNodeId;
use crate::DcacheTypeConfig;
use async_trait::async_trait;
use openraft::error::InstallSnapshotError;
use openraft::error::NetworkError;
use openraft::error::RPCError;
use openraft::error::RaftError;
use openraft::error::RemoteError;
use openraft::raft::AppendEntriesRequest;
use openraft::raft::AppendEntriesResponse;
use openraft::raft::InstallSnapshotRequest;
@ -35,130 +45,70 @@ use openraft::raft::VoteResponse;
use openraft::BasicNode;
use openraft::RaftNetwork;
use openraft::RaftNetworkFactory;
use reqwest::Client;
use serde::de::DeserializeOwned;
use serde::Serialize;
use tokio::sync::mpsc::Sender;
use tonic::transport::channel::Channel;
use tower_service::Service;
use crate::pool::*;
use crate::protobuf::dcache::dcache_service_client::DcacheServiceClient;
use crate::protobuf::dcache::RaftRequest;
#[derive(Debug)]
struct ChannelManager {}
#[async_trait]
impl ItemManager for ChannelManager {
type Key = String;
type Item = Channel;
type Error = tonic::transport::Error;
async fn build(&self, addr: &Self::Key) -> Result<Channel, tonic::transport::Error> {
tonic::transport::Endpoint::new(addr.clone())?
.connect()
.await
}
async fn check(&self, mut ch: Channel) -> Result<Channel, tonic::transport::Error> {
futures::future::poll_fn(|cx| (&mut ch).poll_ready(cx)).await?;
Ok(ch)
}
}
use super::management::HealthStatus;
use super::raft::{RaftMessage, RaftRes};
use crate::store::DcacheRequest;
use crate::store::DcacheResponse;
use crate::DcacheNodeId;
use crate::DcacheTypeConfig;
#[derive(Clone)]
pub struct DcacheNetwork {
pub signal: Sender<HealthStatus>,
conn_pool: Pool<ChannelManager>,
}
pub enum RPCType {
Vote,
Snapshot,
Append,
pub client: Client,
}
impl DcacheNetwork {
pub fn new(signal: Sender<HealthStatus>) -> Self {
let mgr = ChannelManager {};
Self {
signal,
conn_pool: Pool::new(mgr, Duration::from_millis(50)),
}
pub fn new(signal: Sender<HealthStatus>, client: Client) -> Self {
Self { signal, client }
}
pub async fn send_rpc<Req, Resp, Err>(
&self,
target: DcacheNodeId,
target_node: &BasicNode,
uri: &str,
req: Req,
event: RPCType,
) -> Result<Resp, RPCError<DcacheNodeId, BasicNode, Err>>
where
Req: Serialize,
Err: std::error::Error + DeserializeOwned,
Resp: DeserializeOwned,
{
let mut client = self.make_client(&target, target_node).await;
let addr = &target_node.addr;
let res = match event {
RPCType::Vote => {
client
.vote(RaftRequest {
data: serde_json::to_string(&req).unwrap(),
})
.await
}
let url = format!("http://{}/{}", addr, uri);
RPCType::Snapshot => {
client
.install_snapshot(RaftRequest {
data: serde_json::to_string(&req).unwrap(),
})
.await
}
tracing::debug!("send_rpc to url: {}", url);
RPCType::Append => {
client
.append_entries(RaftRequest {
data: serde_json::to_string(&req).unwrap(),
})
.await
}
};
match res {
Ok(res) => {
let signal2 = self.signal.clone();
let fut = async move {
let _ = signal2.send(HealthStatus::Healthy(target)).await;
};
tokio::spawn(fut);
let res = res.into_inner();
Ok(serde_json::from_str(&res.data).unwrap())
}
let resp = match self.client.post(url).json(&req).send().await {
Ok(resp) => Ok(resp),
Err(e) => {
let _ = self.signal.send(HealthStatus::Down(target)).await;
self.signal.send(HealthStatus::Down(target)).await;
Err(RPCError::Network(NetworkError::new(&e)))
}
}?;
tracing::debug!("client.post() is sent");
let res: Result<Resp, Err> = resp
.json()
.await
.map_err(|e| RPCError::Network(NetworkError::new(&e)))?;
let res = res.map_err(|e| RPCError::RemoteError(RemoteError::new(target, e)));
if res.is_ok() {
let signal2 = self.signal.clone();
let fut = async move {
let _ = signal2.send(HealthStatus::Healthy(target)).await;
};
tokio::spawn(fut);
}
}
pub async fn make_client(
&self,
target: &DcacheNodeId,
target_node: &BasicNode,
) -> DcacheServiceClient<Channel> {
let addr = format!("http://{}", &target_node.addr);
tracing::debug!("connect: target={}: {}", target, addr);
let channel = self.conn_pool.get(&addr).await.unwrap();
let client = DcacheServiceClient::new(channel);
tracing::info!("connected: target={}: {}", target, addr);
client
res
}
}
@ -169,10 +119,20 @@ impl RaftNetworkFactory<DcacheTypeConfig> for Arc<DcacheNetwork> {
type Network = DcacheNetworkConnection;
async fn new_client(&mut self, target: DcacheNodeId, node: &BasicNode) -> Self::Network {
let addr = &node.addr;
let url = format!("ws://{}/{}", addr, "ws/write");
let (write, rx) = mpsc::channel(30);
let (tx, read) = mpsc::channel(30);
let ws_client = WSClient::spawn(rx, tx, url).await;
DcacheNetworkConnection {
owner: self.clone(),
target,
target_node: node.clone(),
// ws_client,
read,
write,
}
}
}
@ -181,6 +141,9 @@ pub struct DcacheNetworkConnection {
owner: Arc<DcacheNetwork>,
target: DcacheNodeId,
target_node: BasicNode,
// ws_client: WSClient,
write: mpsc::Sender<RaftMessage>,
read: mpsc::Receiver<RaftRes>,
}
#[async_trait]
@ -192,9 +155,16 @@ impl RaftNetwork<DcacheTypeConfig> for DcacheNetworkConnection {
AppendEntriesResponse<DcacheNodeId>,
RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId>>,
> {
self.owner
.send_rpc(self.target, &self.target_node, req, RPCType::Append)
.await
self.write.send(RaftMessage::Append(req)).await.unwrap();
match self.read.recv().await.unwrap() {
RaftRes::AppendRes(res) => {
res.map_err(|e| RPCError::RemoteError(RemoteError::new(self.target, e)))
}
_ => unimplemented!(),
}
// self.owner
// .send_rpc(self.target, &self.target_node, "raft-append", req)
// .await
}
async fn send_install_snapshot(
@ -204,9 +174,16 @@ impl RaftNetwork<DcacheTypeConfig> for DcacheNetworkConnection {
InstallSnapshotResponse<DcacheNodeId>,
RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId, InstallSnapshotError>>,
> {
self.owner
.send_rpc(self.target, &self.target_node, req, RPCType::Append)
.await
// self.owner
// .send_rpc(self.target, &self.target_node, "raft-snapshot", req)
// .await
self.write.send(RaftMessage::Snapshot(req)).await.unwrap();
match self.read.recv().await.unwrap() {
RaftRes::SnapshotRes(res) => {
res.map_err(|e| RPCError::RemoteError(RemoteError::new(self.target, e)))
}
_ => unimplemented!(),
}
}
async fn send_vote(
@ -216,8 +193,53 @@ impl RaftNetwork<DcacheTypeConfig> for DcacheNetworkConnection {
VoteResponse<DcacheNodeId>,
RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId>>,
> {
self.owner
.send_rpc(self.target, &self.target_node, req, RPCType::Vote)
// self.owner
// .send_rpc(self.target, &self.target_node, "raft-vote", req)
// .await
self.write
.send(RaftMessage::VoteRequest(req))
.await
.unwrap();
match self.read.recv().await.unwrap() {
RaftRes::VoteRes(res) => {
res.map_err(|e| RPCError::RemoteError(RemoteError::new(self.target, e)))
}
_ => unimplemented!(),
}
}
}
pub struct WSClient;
impl WSClient {
pub async fn spawn(
mut rx: mpsc::Receiver<RaftMessage>,
tx: mpsc::Sender<RaftRes>,
url: String,
) {
use futures_util::SinkExt;
let (ws_stream, _) = connect_async(url).await.expect("Failed to connect");
println!("WebSocket handshake has been successfully completed");
let (mut write, mut read) = ws_stream.split();
let fut = async move {
while let Some(msg) = rx.recv().await {
write
.send(Message::Text(serde_json::to_string(&msg).unwrap()))
.await
.unwrap();
match read.next().await.unwrap().unwrap() {
Message::Text(msg) => {
tx.send(serde_json::from_str(&msg).unwrap()).await;
}
_ => (),
}
}
};
tokio::spawn(fut);
}
}

View file

@ -1,154 +0,0 @@
use std::collections::HashMap;
use std::fmt::Debug;
use std::hash::Hash;
use std::marker::PhantomData;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;
use async_trait::async_trait;
use tokio::time::sleep;
//use log::debug;
//use crate::base::tokio;
pub type PoolItem<T> = Arc<tokio::sync::Mutex<Option<T>>>;
/// To build or check an item.
///
/// When an item is requested, ItemManager `build()` one for the pool.
/// When an item is reused, ItemManager `check()` if it is still valid.
#[async_trait]
pub trait ItemManager {
type Key;
type Item;
type Error;
/// Make a new item to put into the pool.
///
/// An impl should hold that an item returned by `build()` is passed `check()`.
async fn build(&self, key: &Self::Key) -> Result<Self::Item, Self::Error>;
/// Check if an existent item still valid.
///
/// E.g.: check if a tcp connection still alive.
/// If the item is valid, `check` should return it in a Ok().
/// Otherwise, the item should be dropped and `check` returns an Err().
async fn check(&self, item: Self::Item) -> Result<Self::Item, Self::Error>;
}
/// Pool assumes the items in it is `Clone`, thus it keeps only one item for each key.
#[allow(clippy::type_complexity)]
#[derive(Debug, Clone)]
pub struct Pool<Mgr>
where
Mgr: ItemManager + Debug,
{
/// The first sleep time when `build()` fails.
/// The next sleep time is 2 times of the previous one.
pub initial_retry_interval: Duration,
/// Pooled items indexed by key.
pub items: Arc<Mutex<HashMap<Mgr::Key, PoolItem<Mgr::Item>>>>,
manager: Mgr,
err_type: PhantomData<Mgr::Error>,
n_retries: u32,
}
impl<Mgr> Pool<Mgr>
where
Mgr: ItemManager + Debug,
Mgr::Key: Clone + Eq + Hash + Send + Debug,
Mgr::Item: Clone + Sync + Send + Debug,
Mgr::Error: Sync + Debug,
{
pub fn new(manager: Mgr, initial_retry_interval: Duration) -> Self {
Pool {
initial_retry_interval,
items: Default::default(),
manager,
err_type: Default::default(),
n_retries: 3,
}
}
pub fn with_retries(mut self, retries: u32) -> Self {
self.n_retries = retries;
self
}
pub fn item_manager(&self) -> &Mgr {
&self.manager
}
/// Return an raw pool item.
///
/// The returned one may be an uninitialized one, i.e., it contains a None.
/// The lock for `items` should not be held for long, e.g. when `build()` a new connection, it takes dozen ms.
fn get_pool_item(&self, key: &Mgr::Key) -> PoolItem<Mgr::Item> {
let mut items = self.items.lock().unwrap();
if let Some(item) = items.get(key) {
item.clone()
} else {
let item = PoolItem::default();
items.insert(key.clone(), item.clone());
item
}
}
/// Return a item, by cloning an existent one or making a new one.
///
/// When returning an existent one, `check()` will be called on it to ensure it is still valid.
/// E.g., when returning a tcp connection.
// #[logcall::logcall(err = "debug")]
// #[minitrace::trace]
pub async fn get(&self, key: &Mgr::Key) -> Result<Mgr::Item, Mgr::Error> {
let pool_item = self.get_pool_item(key);
let mut guard = pool_item.lock().await;
let item_opt = (*guard).clone();
if let Some(ref item) = item_opt {
let check_res = self.manager.check(item.clone()).await;
// debug!("check reused item res: {:?}", check_res);
if let Ok(itm) = check_res {
return Ok(itm);
} else {
// mark broken conn as deleted
*guard = None;
}
}
let mut interval = self.initial_retry_interval;
for i in 0..self.n_retries {
// debug!("build new item of key: {:?}", key);
let new_item = self.manager.build(key).await;
// debug!("build new item of key res: {:?}", new_item);
match new_item {
Ok(x) => {
*guard = Some(x.clone());
return Ok(x);
}
Err(err) => {
if i == self.n_retries - 1 {
return Err(err);
}
}
}
sleep(interval).await;
interval *= 2;
}
unreachable!("the loop should always return!");
}
}

View file

@ -1,546 +0,0 @@
use std::sync::Arc;
use libmcaptcha::cache::messages as CacheMessages;
use libmcaptcha::defense;
use libmcaptcha::master::messages as MasterMessages;
use libmcaptcha::mcaptcha;
use openraft::BasicNode;
use serde::de::DeserializeOwned;
use serde::Serialize;
use tonic::Response;
use dcache::dcache_request::DcacheRequest as PipelineReq;
use dcache::dcache_response::DcacheResponse as InnerPipelineRes;
use dcache::dcache_service_server::DcacheService;
use dcache::DcacheResponse as OuterPipelineRes;
use dcache::{Learner, RaftReply, RaftRequest};
use crate::app::DcacheApp;
use crate::store::{DcacheRequest, DcacheResponse};
pub mod dcache {
tonic::include_proto!("dcache"); // The string specified here must match the proto package name
}
#[derive(Clone)]
pub struct MyDcacheImpl {
app: Arc<DcacheApp>,
}
impl MyDcacheImpl {
pub fn new(app: Arc<DcacheApp>) -> Self {
Self { app }
}
}
#[tonic::async_trait]
impl DcacheService for MyDcacheImpl {
async fn add_learner(
&self,
request: tonic::Request<Learner>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let node_id = req.id;
let node = BasicNode {
addr: req.addr.clone(),
};
println!("Learner added: {:?}", &req.addr);
let res = self.app.raft.add_learner(node_id, node, true).await;
Ok(Response::new(res.into()))
}
async fn add_captcha(
&self,
request: tonic::Request<dcache::AddCaptchaRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::AddCaptcha(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn add_visitor(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::OptionAddVisitorResult>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::AddVisitor(MasterMessages::AddVisitor(
req.id,
)))
.await
.map_err(|e| {
tonic::Status::new(tonic::Code::Internal, serde_json::to_string(&e).unwrap())
})?;
match res.data {
DcacheResponse::AddVisitorResult(res) => {
Ok(Response::new(dcache::OptionAddVisitorResult {
result: res.map(|f| f.into()),
}))
}
_ => unimplemented!(),
}
}
async fn rename_captcha(
&self,
request: tonic::Request<dcache::RenameCaptchaRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::RenameCaptcha(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn remove_captcha(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::RemoveCaptcha(MasterMessages::RemoveCaptcha(
req.id,
)))
.await;
Ok(Response::new(res.into()))
}
async fn cache_pow(
&self,
request: tonic::Request<dcache::CachePowRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::CachePoW(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn retrieve_pow(
&self,
request: tonic::Request<dcache::RetrievePowRequest>,
) -> std::result::Result<tonic::Response<dcache::OptionalRetrievePoWResponse>, tonic::Status>
{
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
let res = sm.results.retrieve_pow_config(req.into());
Ok(Response::new(dcache::OptionalRetrievePoWResponse {
result: res.map(|x| x.into()),
}))
}
async fn delete_pow(
&self,
request: tonic::Request<dcache::DeletePowRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::DeletePoW(CacheMessages::DeletePoW(
req.string,
)))
.await;
Ok(Response::new(res.into()))
}
async fn cache_result(
&self,
request: tonic::Request<dcache::CacheResultRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::CacheResult(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn verify_captcha_result(
&self,
request: tonic::Request<dcache::RetrievePowRequest>,
) -> std::result::Result<tonic::Response<dcache::CaptchaResultVerified>, tonic::Status> {
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
let verified = sm.results.verify_captcha_result(req.into());
Ok(Response::new(dcache::CaptchaResultVerified { verified }))
}
async fn delete_captcha_result(
&self,
request: tonic::Request<dcache::DeleteCaptchaResultRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::DeleteCaptchaResult(
CacheMessages::DeleteCaptchaResult { token: req.token },
))
.await;
Ok(Response::new(res.into()))
}
async fn captcha_exists(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::CaptchaExistsResponse>, tonic::Status> {
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
let exists = sm.counter.get_captcha(&req.id).is_some();
Ok(Response::new(dcache::CaptchaExistsResponse { exists }))
}
async fn get_visitor_count(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::OptionGetVisitorCountResponse>, tonic::Status>
{
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
if let Some(captcha) = sm.counter.get_captcha(&req.id) {
let res = captcha.get_visitors();
Ok(Response::new(dcache::OptionGetVisitorCountResponse {
result: Some(dcache::GetVisitorCountResponse { visitors: res }),
}))
} else {
Ok(Response::new(dcache::OptionGetVisitorCountResponse {
result: None,
}))
}
}
// type PipelineDcacheOpsStream =
// Pin<Box<dyn Stream<Item = Result<OuterPipelineRes, tonic::Status>> + Send + 'static>>;
// async fn pipeline_dcache_ops(
// &self,
// request: tonic::Request<tonic::Streaming<dcache::DcacheRequest>>,
// ) -> std::result::Result<tonic::Response<Self::PipelineDcacheOpsStream>, tonic::Status> {
async fn pipeline_dcache_ops(
&self,
request: tonic::Request<dcache::DcacheBatchRequest>,
) -> Result<Response<dcache::DcacheBatchResponse>, tonic::Status> {
let mut reqs = request.into_inner();
let mut responses = Vec::with_capacity(reqs.requests.len());
for req in reqs.requests.drain(0..) {
let res = match req.dcache_request.unwrap() {
PipelineReq::AddCaptcha(add_captcha_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::AddCaptcha(add_captcha_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::AddVisitor(add_visitor_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::AddVisitor(MasterMessages::AddVisitor(
add_visitor_req.id,
)))
.await;
match res {
Err(_) => OuterPipelineRes {
dcache_response: None,
},
Ok(res) => match res.data {
DcacheResponse::AddVisitorResult(res) => {
let res = dcache::OptionAddVisitorResult {
result: res.map(|f| f.into()),
};
OuterPipelineRes {
dcache_response: Some(
InnerPipelineRes::OptionAddVisitorResult(res),
),
}
}
_ => unimplemented!(),
},
}
}
PipelineReq::RenameCaptcha(rename_captcha_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::RenameCaptcha(rename_captcha_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::RemoveCaptcha(remove_captcha_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::RemoveCaptcha(MasterMessages::RemoveCaptcha(
remove_captcha_req.id,
)))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::CachePow(cache_pow_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::CachePoW(cache_pow_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::CacheResult(cache_result_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::CacheResult(cache_result_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::CaptchaExists(captcha_exists_req) => {
let sm = self.app.store.state_machine.read().await;
let exists = sm.counter.get_captcha(&captcha_exists_req.id).is_some();
let res = dcache::CaptchaExistsResponse { exists };
drop(sm);
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::CaptchaExists(res)),
}
}
PipelineReq::GetVisitorCount(get_visitor_count_req) => {
let sm = self.app.store.state_machine.read().await;
if let Some(captcha) = sm.counter.get_captcha(&get_visitor_count_req.id) {
let res = captcha.get_visitors();
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::GetVisitorCount(
dcache::OptionGetVisitorCountResponse {
result: Some(dcache::GetVisitorCountResponse { visitors: res }),
},
)),
}
} else {
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::GetVisitorCount(
dcache::OptionGetVisitorCountResponse { result: None },
)),
}
}
}
};
responses.push(res);
}
Ok(Response::new(dcache::DcacheBatchResponse { responses }))
}
async fn write(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.client_write(req).await;
Ok(Response::new(res.into()))
}
/// / Forward a request to other
async fn forward(
&self,
_request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
unimplemented!();
}
async fn append_entries(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.append_entries(req).await;
Ok(Response::new(res.into()))
}
async fn install_snapshot(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.install_snapshot(req).await;
Ok(Response::new(res.into()))
}
async fn vote(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.vote(req).await;
Ok(Response::new(res.into()))
}
}
impl<T, E> From<RaftReply> for Result<T, E>
where
T: DeserializeOwned,
E: DeserializeOwned,
{
fn from(msg: RaftReply) -> Self {
if !msg.data.is_empty() {
let resp: T = serde_json::from_str(&msg.data).expect("fail to deserialize");
Ok(resp)
} else {
let err: E = serde_json::from_str(&msg.error).expect("fail to deserialize");
Err(err)
}
}
}
impl<T, E> From<Result<T, E>> for RaftReply
where
T: Serialize,
E: Serialize,
{
fn from(r: Result<T, E>) -> Self {
match r {
Ok(x) => {
let data = serde_json::to_string(&x).expect("fail to serialize");
RaftReply {
data,
error: Default::default(),
}
}
Err(e) => {
let error = serde_json::to_string(&e).expect("fail to serialize");
RaftReply {
data: Default::default(),
error,
}
}
}
}
}
impl From<dcache::AddCaptchaRequest> for MasterMessages::AddSite {
fn from(value: dcache::AddCaptchaRequest) -> Self {
let req_mcaptcha = value.mcaptcha.unwrap();
let mut defense = req_mcaptcha.defense.unwrap();
let mut new_defense = defense::DefenseBuilder::default();
for level in defense.levels.drain(0..) {
new_defense
.add_level(
defense::LevelBuilder::default()
.difficulty_factor(level.difficulty_factor)
.unwrap()
.visitor_threshold(level.visitor_threshold)
.build()
.unwrap(),
)
.unwrap();
}
let defense = new_defense.build().unwrap();
let mcaptcha = mcaptcha::MCaptchaBuilder::default()
.defense(defense)
.duration(req_mcaptcha.duration)
.build()
.unwrap();
Self {
id: value.id,
mcaptcha,
}
}
}
impl From<libmcaptcha::master::AddVisitorResult> for dcache::AddVisitorResult {
fn from(value: libmcaptcha::master::AddVisitorResult) -> Self {
Self {
duration: value.duration,
difficulty_factor: value.difficulty_factor,
}
}
}
impl From<dcache::RenameCaptchaRequest> for MasterMessages::Rename {
fn from(value: dcache::RenameCaptchaRequest) -> Self {
Self {
name: value.name,
rename_to: value.rename_to,
}
}
}
impl From<dcache::CachePowRequest> for CacheMessages::CachePoW {
fn from(value: dcache::CachePowRequest) -> Self {
Self {
string: value.string,
difficulty_factor: value.difficulty_factor,
duration: value.duration,
key: value.key,
}
}
}
impl From<CacheMessages::CachePoW> for dcache::CachePowRequest {
fn from(value: CacheMessages::CachePoW) -> Self {
Self {
string: value.string,
difficulty_factor: value.difficulty_factor,
duration: value.duration,
key: value.key,
}
}
}
impl From<CacheMessages::CachedPoWConfig> for dcache::RetrievePowResponse {
fn from(value: CacheMessages::CachedPoWConfig) -> Self {
Self {
difficulty_factor: value.difficulty_factor,
duration: value.duration,
key: value.key,
}
}
}
impl From<dcache::CacheResultRequest> for CacheMessages::CacheResult {
fn from(value: dcache::CacheResultRequest) -> Self {
Self {
token: value.token,
key: value.key,
duration: value.duration,
}
}
}
impl From<dcache::RetrievePowRequest> for CacheMessages::VerifyCaptchaResult {
fn from(value: dcache::RetrievePowRequest) -> Self {
Self {
token: value.token,
key: value.key,
}
}
}

View file

@ -23,6 +23,8 @@ use std::ops::RangeBounds;
use std::sync::Arc;
use std::sync::Mutex;
use actix::prelude::*;
use libmcaptcha::cache::messages::CachedPoWConfig;
use libmcaptcha::AddVisitorResult;
use libmcaptcha::MCaptcha;
use openraft::async_trait::async_trait;
@ -47,11 +49,15 @@ use openraft::Vote;
use serde::Deserialize;
use serde::Serialize;
use tokio::sync::RwLock;
use url::quirks::set_pathname;
use crate::DcacheNodeId;
use crate::DcacheTypeConfig;
use libmcaptcha::cache::messages::{CachePoW, CacheResult, DeleteCaptchaResult, DeletePoW};
use actix::prelude::*;
use libmcaptcha::cache::messages::{
CachePoW, CacheResult, DeleteCaptchaResult, DeletePoW, RetrivePoW, VerifyCaptchaResult,
};
use libmcaptcha::master::messages::{
AddSite as AddCaptcha, AddVisitor, GetInternalData, RemoveCaptcha, Rename as RenameCaptcha,
SetInternalData,
@ -60,7 +66,8 @@ use libmcaptcha::{master::embedded::master::Master as EmbeddedMaster, system::Sy
pub mod system;
#[derive(Serialize, Deserialize, Clone, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug, Message)]
#[rtype(result = "DcacheResponse")]
pub enum DcacheRequest {
// master
AddVisitor(AddVisitor),
@ -74,7 +81,8 @@ pub enum DcacheRequest {
DeleteCaptchaResult(DeleteCaptchaResult),
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Message, Debug, Clone)]
#[rtype(result = "()")]
pub enum DcacheResponse {
AddVisitorResult(Option<AddVisitorResult>),
Empty, // AddCaptcha, RenameCaptcha, RemoveCaptcha, Cachepow, CacheResult,
@ -94,9 +102,7 @@ pub struct DcacheStateMachine {
pub last_membership: StoredMembership<DcacheNodeId, BasicNode>,
/// Application data.
// pub data: Arc<System<HashCache, EmbeddedMaster>>,
pub counter: crate::mcaptcha::mcaptcha::Manager,
pub results: crate::mcaptcha::cache::HashCache,
pub data: Arc<System<HashCache, EmbeddedMaster>>,
}
#[derive(Serialize, Deserialize, Clone)]
@ -105,34 +111,42 @@ struct PersistableStateMachine {
last_membership: StoredMembership<DcacheNodeId, BasicNode>,
counter: crate::mcaptcha::mcaptcha::Manager,
results: crate::mcaptcha::cache::HashCache,
/// Application data.
data: HashMap<String, MCaptcha>,
}
impl PersistableStateMachine {
async fn from_statemachine(m: &DcacheStateMachine) -> Self {
let counter = m.counter.clone();
let results = m.results.clone();
let internal_data = m
.data
.master
.send(GetInternalData)
.await
.unwrap()
.await
.unwrap()
.unwrap();
Self {
last_applied_log: m.last_applied_log,
last_applied_log: m.last_applied_log.clone(),
last_membership: m.last_membership.clone(),
counter,
results,
data: internal_data,
}
}
async fn to_statemachine(
self,
counter: crate::mcaptcha::mcaptcha::Manager,
results: crate::mcaptcha::cache::HashCache,
data: Arc<System<HashCache, EmbeddedMaster>>,
) -> DcacheStateMachine {
self.counter.clean_all_after_cold_start(counter).await;
self.results.clean_all_after_cold_start(results).await;
data.master
.send(SetInternalData {
mcaptcha: self.data,
})
.await
.unwrap();
DcacheStateMachine {
last_applied_log: self.last_applied_log,
last_membership: self.last_membership,
results: self.results,
counter: self.counter,
data,
}
}
}
@ -159,8 +173,7 @@ impl DcacheStore {
let state_machine = RwLock::new(DcacheStateMachine {
last_applied_log: Default::default(),
last_membership: Default::default(),
counter: crate::mcaptcha::mcaptcha::Manager::new(30),
results: crate::mcaptcha::cache::HashCache::default(),
data: system::init_system(salt),
});
Self {
@ -386,42 +399,83 @@ impl RaftStorage<DcacheTypeConfig> for Arc<DcacheStore> {
EntryPayload::Blank => res.push(DcacheResponse::Empty),
EntryPayload::Normal(ref req) => match req {
DcacheRequest::AddVisitor(msg) => {
let r = sm.counter.add_visitor(msg);
let r = sm
.data
.master
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::AddVisitorResult(r));
}
DcacheRequest::AddCaptcha(msg) => {
sm.counter
.add_captcha(Arc::new((&msg.mcaptcha).into()), msg.id.clone());
sm.data
.master
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty);
}
DcacheRequest::RenameCaptcha(msg) => {
sm.counter.rename(&msg.name, msg.rename_to.clone());
sm.data
.master
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty);
}
DcacheRequest::RemoveCaptcha(msg) => {
sm.counter.rm_captcha(&msg.0);
sm.data
.master
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty);
}
// cache
DcacheRequest::CachePoW(msg) => {
sm.results.cache_pow(msg.clone());
sm.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty);
}
DcacheRequest::DeletePoW(msg) => {
sm.results.remove_pow_config(&msg.0);
// sm.data.cache.send(msg.clone()).await.unwrap().unwrap();
sm.data.cache.send(msg.clone()).await.unwrap().unwrap();
res.push(DcacheResponse::Empty);
}
DcacheRequest::CacheResult(msg) => {
sm.results.cache_result(msg.clone());
sm.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty);
}
DcacheRequest::DeleteCaptchaResult(msg) => {
sm.results.remove_cache_result(&msg.token);
sm.data.cache.send(msg.clone()).await.unwrap().unwrap();
res.push(DcacheResponse::Empty);
}
},
@ -468,7 +522,7 @@ impl RaftStorage<DcacheTypeConfig> for Arc<DcacheStore> {
})?;
let mut state_machine = self.state_machine.write().await;
let updated_state_machine = updated_state_machine
.to_statemachine(state_machine.counter.clone(), state_machine.results.clone())
.to_statemachine(state_machine.data.clone())
.await;
*state_machine = updated_state_machine;
}
@ -503,19 +557,3 @@ impl RaftStorage<DcacheTypeConfig> for Arc<DcacheStore> {
self.clone()
}
}
#[cfg(test)]
mod tests {
use super::*;
async fn provision_dcache_store() -> Arc<DcacheStore> {
Arc::new(DcacheStore::new(
"adsfasdfasdfadsfadfadfadfadsfasdfasdfasdfasdf".into(),
))
}
#[test]
fn test_dcache_store() {
openraft::testing::Suite::test_all(provision_dcache_store).unwrap();
}
}

104
test.py
View file

@ -17,14 +17,6 @@
from pprint import pprint
import requests
import grpc
import json
from dcache_py import dcache_pb2 as dcache
from dcache_py.dcache_pb2 import RaftRequest
from dcache_py.dcache_pb2_grpc import DcacheServiceStub
# import dcache_py.dcache_resources
def init(host: str):
@ -36,6 +28,7 @@ def add_host(host: str, id: int, peer: str):
params = [id, peer]
resp = requests.post(f"http://{host}/add-learner", json=params)
print(f"Adding host {peer}. Status: {resp.status_code}")
pprint(resp.json())
def switch_to_cluster(host: str, nodes: [int]):
@ -54,6 +47,7 @@ def write(host, data):
print(f"RPC Status: {resp.status_code}")
resp = resp.json()
if "Err" in resp:
pprint(resp)
leader = resp["Err"]["APIError"]["ForwardToLeader"]["leader_node"]["addr"]
print(f"Forwarding write to leader {leader}")
return write(leader, data)
@ -62,6 +56,7 @@ def write(host, data):
def add_vote(host: str, captcha_id: str):
resp = write(host, data={"AddVisitor": captcha_id})
#pprint(resp.json())
pprint(resp)
@ -87,92 +82,33 @@ def add_captcha(host: str, captcha_id: str):
host = "localhost:9001"
peers = [(2, "localhost:9002"), (3, "localhost:9003"), (4, "localhost:9004")]
peers = [(2, "localhost:9002"), (3, "localhost:9003")]#, (4, "localhost:9004")]
captcha_id = "test_1"
def initialize_cluster():
init(host)
for peer_id, peer in peers:
add_host(host=host, id=peer_id, peer=peer)
switch_to_cluster(host, nodes=[1, 2, 3, 4])
#switch_to_cluster(host, nodes=[1, 2,3, 4])
switch_to_cluster(host, nodes=[1, 2,3])#, 4])
def add_data():
add_captcha(host, captcha_id)
add_vote(host, captcha_id)
for _ in range(0, 600):
add_vote(host, captcha_id)
def grpc_add_vote(stub: DcacheServiceStub, captcha_id: str):
msg = dcache.CaptchaID(id=captcha_id)
# msg = RaftRequest(data=json.dumps({"AddVisitor": captcha_id}))
# resp = stub.Write(msg)
resp = stub.AddVisitor(msg)
pprint(resp)
def grpc_add_captcha(stub: DcacheServiceStub, captcha_id: str):
msg = dcache.AddCaptchaRequest(
id=captcha_id,
mcaptcha=dcache.MCaptcha(
duration=30,
defense=dcache.Defense(
levels=[
dcache.Level(visitor_threshold=50, difficulty_factor=500),
dcache.Level(visitor_threshold=5000, difficulty_factor=50000),
]
),
),
)
# params = {
# "AddCaptcha": {
# "id": captcha_id,
# "mcaptcha": {
# "defense": {
# "levels": [
# {"visitor_threshold": 50, "difficulty_factor": 500},
# {"visitor_threshold": 5000, "difficulty_factor": 50000},
# ],
# "current_visitor_threshold": 0,
# },
# "duration": 30,
# },
# }
# }
# msg = RaftRequest(data = json.dumps(params))
resp = stub.AddCaptcha(msg)
pprint(f"Captcha added {captcha_id}: {resp}")
# for _ in range(0, 60):
# add_vote(host, captcha_id)
msgs = []
for _ in range(0,1000):
msgs.append(
dcache.DcacheRequest(addVisitor=dcache.CaptchaID(id=captcha_id)),
)
msgs = dcache.DcacheBatchRequest(requests=msgs)
def grpc_pipeline_add_vote(stub):
responses = stub.PipelineDcacheOps(msgs)
for r in responses.responses:
print(f"received respo: {r}")
def grpc_run():
with grpc.insecure_channel(host) as channel:
stub = DcacheServiceStub(channel)
grpc_add_captcha(stub, captcha_id)
grpc_pipeline_add_vote(stub)
#grpc_add_vote(stub, captcha_id)
if __name__ == "__main__":
grpc_run()
# add_vote("localhost:9002", captcha_id)
#if __name__ == "__main__":
# init(host)
# # initialize_cluster()
# add_data()
## switch_to_cluster(host, nodes=[1, 2,3])
# add_vote("localhost:9001", captcha_id)
# add_vote("localhost:9001", captcha_id)
## initialize_cluster()
## add_vote("localhost:9002")
# for _ in range(0, 60):
# add_vote("localhost:9002", captcha_id)

138
tests/.gitignore vendored
View file

@ -1,138 +0,0 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/

Some files were not shown because too many files have changed in this diff Show more