Compare commits

..

267 commits

Author SHA1 Message Date
953d1bc3b2 Merge pull request 'chore(deps): update dependency bulma to v1.0.3' (#129) from renovate/bulma-1.x into master
Reviewed-on: #129
2025-03-20 14:06:20 +05:30
Renovate Bot
ed152fae82 chore(deps): update dependency bulma to v1.0.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2025-02-09 23:39:19 +00:00
c3189b9aa7 Merge pull request 'fix(deps): update rust crate clap to v4.5.18' (#125) from renovate/clap-4.x-lockfile into master
Reviewed-on: #125
2024-09-21 20:05:31 +05:30
f1118ffa40 Merge pull request 'chore(deps): update dependency greenlet to v3.1.1' (#124) from renovate/greenlet-3.x into master
Reviewed-on: #124
2024-09-21 20:05:24 +05:30
Renovate Bot
4f217484c4 fix(deps): update rust crate clap to v4.5.18
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-20 20:31:14 +00:00
Renovate Bot
6d18931a34 chore(deps): update dependency greenlet to v3.1.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-20 17:31:04 +00:00
a2eb1f4f9b Merge pull request 'chore(deps): update dependency protobuf to v4.25.5' (#123) from renovate/protobuf-4.x into master
Reviewed-on: #123
2024-09-19 11:40:27 +05:30
Renovate Bot
8b19c1bf03 chore(deps): update dependency protobuf to v4.25.5
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-18 22:30:55 +00:00
cb873388ad Merge pull request 'chore(deps): update dependency idna to v3.10' (#120) from renovate/idna-3.x into master
Reviewed-on: #120
2024-09-16 14:09:27 +05:30
6ab2c9c269 Merge pull request 'chore(deps): update dependency locust to v2.31.6' (#122) from renovate/locust-2.x into master
Reviewed-on: #122
2024-09-16 14:09:18 +05:30
861fec12da Merge pull request 'chore(deps): update rust crate anyhow to v1.0.89' (#121) from renovate/anyhow-1.x-lockfile into master
Reviewed-on: #121
2024-09-16 14:09:12 +05:30
Renovate Bot
2db08a3091 chore(deps): update dependency idna to v3.10
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-15 18:31:39 +00:00
Renovate Bot
34b31b83de chore(deps): update dependency locust to v2.31.6
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-15 11:01:05 +00:00
Renovate Bot
e237312401 chore(deps): update rust crate anyhow to v1.0.89
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-15 03:31:26 +00:00
86ed023a80 Merge pull request 'fix(deps): update rust crate tokio to v1.40.0' (#119) from renovate/tokio-1.x-lockfile into master
Reviewed-on: #119
2024-09-13 21:05:06 +05:30
267e5e9907 Merge pull request 'fix(deps): update rust crate serde to v1.0.210' (#116) from renovate/serde-monorepo into master
Reviewed-on: #116
2024-09-13 21:04:48 +05:30
Renovate Bot
01e027cb92 fix(deps): update rust crate tokio to v1.40.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-13 12:35:00 +00:00
1db67cd02e Merge pull request 'fix(deps): update rust crate tokio-stream to v0.1.16' (#117) from renovate/tokio-stream-0.x-lockfile into master
Reviewed-on: #117
2024-09-13 17:55:57 +05:30
09bc80967f Merge pull request 'fix(deps): update rust crate derive_builder to v0.20.1' (#115) from renovate/derive_builder-0.x-lockfile into master
Reviewed-on: #115
2024-09-13 17:55:51 +05:30
Renovate Bot
82c539a316 fix(deps): update rust crate tokio-stream to v0.1.16
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-09-13 09:31:20 +00:00
Renovate Bot
e8919d0e2a fix(deps): update rust crate serde to v1.0.210
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-09-13 09:31:15 +00:00
Renovate Bot
768f4fccb1 fix(deps): update rust crate derive_builder to v0.20.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-13 09:31:08 +00:00
47cd7b8ca3 Merge pull request 'fix(deps): update rust crate clap to v4.5.17' (#114) from renovate/clap-4.x-lockfile into master
Reviewed-on: #114
2024-09-13 14:42:49 +05:30
602d7ef08d Merge pull request 'fix(deps): update rust crate async-trait to v0.1.82' (#113) from renovate/async-trait-0.x-lockfile into master
Reviewed-on: #113
2024-09-13 14:42:46 +05:30
755a84d1ca Merge pull request 'chore(deps): update rust crate serde_json to v1.0.128' (#112) from renovate/serde_json-1.x-lockfile into master
Reviewed-on: #112
2024-09-13 14:42:43 +05:30
5a7ae2bd2b Merge pull request 'chore(deps): update dependency urllib3 to v2.2.3' (#111) from renovate/urllib3-2.x into master
Reviewed-on: #111
2024-09-13 14:42:40 +05:30
Renovate Bot
ae61cb406c fix(deps): update rust crate clap to v4.5.17
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-12 17:03:11 +00:00
Renovate Bot
692d1e858c fix(deps): update rust crate async-trait to v0.1.82
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-09-12 17:03:01 +00:00
Renovate Bot
568245fe82 chore(deps): update rust crate serde_json to v1.0.128
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-12 17:02:55 +00:00
Renovate Bot
bf1c1403c3 chore(deps): update dependency urllib3 to v2.2.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-12 17:02:40 +00:00
60462cae5e Merge pull request 'chore(deps): update rust crate anyhow to v1.0.88' (#110) from renovate/anyhow-1.x-lockfile into master
Reviewed-on: #110
2024-09-12 22:11:28 +05:30
bb32d39ad3 Merge pull request 'chore(deps): update dependency greenlet to v3.1.0' (#109) from renovate/greenlet-3.x into master
Reviewed-on: #109
2024-09-12 22:06:04 +05:30
23197ec442 Merge pull request 'chore(deps): update dependency msgpack to v1.1.0' (#108) from renovate/msgpack-1.x into master
Reviewed-on: #108
2024-09-12 22:05:54 +05:30
b6d3ed6fb8 Merge pull request 'chore(deps): update dependency flask-cors to v5' (#106) from renovate/flask-cors-5.x into master
Reviewed-on: #106
2024-09-12 22:03:21 +05:30
Renovate Bot
f2e892507f chore(deps): update rust crate anyhow to v1.0.88
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-11 18:32:24 +00:00
Renovate Bot
7fbb44a57c chore(deps): update dependency greenlet to v3.1.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-10 17:01:40 +00:00
Renovate Bot
cb0af940d3 chore(deps): update dependency msgpack to v1.1.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-10 04:31:16 +00:00
Renovate Bot
6d7ae8016f chore(deps): update dependency flask-cors to v5
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-02 07:31:33 +00:00
c196bae7bc Merge pull request 'chore(deps): update dependency locust to v2.31.5' (#107) from renovate/locust-2.x into master
Reviewed-on: #107
2024-09-02 12:57:17 +05:30
ad598c500d Merge pull request 'chore(deps): update dependency flask-cors to v4.0.2' (#105) from renovate/flask-cors-4.x into master
Reviewed-on: #105
2024-09-02 12:57:00 +05:30
Renovate Bot
fe84700531 chore(deps): update dependency locust to v2.31.5
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-31 18:32:09 +00:00
Renovate Bot
8fe48cd226 chore(deps): update dependency flask-cors to v4.0.2
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-30 17:01:03 +00:00
bc337132b0 Merge pull request 'chore(deps): update dependency certifi to v2024.8.30' (#104) from renovate/certifi-2024.x into master
Reviewed-on: #104
2024-08-30 12:07:53 +05:30
Renovate Bot
caae3d7163 chore(deps): update dependency certifi to v2024.8.30
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-30 02:01:13 +00:00
b7579cb0b1 Merge pull request 'chore(deps): update dependency grpcio to v1.66.1' (#103) from renovate/grpcio-1.x into master
Reviewed-on: #103
2024-08-29 10:46:32 +05:30
Renovate Bot
ea5e44aabe chore(deps): update dependency grpcio to v1.66.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-28 21:01:00 +00:00
a9d8e35b73 Merge pull request 'chore(deps): update dependency idna to v3.8' (#102) from renovate/idna-3.x into master
Reviewed-on: #102
2024-08-28 21:41:43 +05:30
24893456d1 Merge pull request 'chore(deps): update dependency zope.interface to v7.0.3' (#101) from renovate/zope.interface-7.x into master
Reviewed-on: #101
2024-08-28 21:41:39 +05:30
8107aac12e Merge pull request 'chore(deps): update dependency werkzeug to v3.0.4' (#100) from renovate/werkzeug-3.x into master
Reviewed-on: #100
2024-08-28 21:41:36 +05:30
205d6d873d Merge pull request 'chore(deps): update dependency locust to v2.31.4' (#99) from renovate/locust-2.x into master
Reviewed-on: #99
2024-08-28 21:41:32 +05:30
Renovate Bot
9e0764eb56 chore(deps): update dependency idna to v3.8
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-28 08:31:23 +00:00
Renovate Bot
1115878113 chore(deps): update dependency zope.interface to v7.0.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-28 08:31:10 +00:00
Renovate Bot
2b121d7ba5 chore(deps): update dependency werkzeug to v3.0.4
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-28 08:31:08 +00:00
Renovate Bot
aa5b9c1144 chore(deps): update dependency locust to v2.31.4
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-28 08:31:04 +00:00
94817fcc3d Merge pull request 'chore(deps): update dependency grpcio to v1.66.0' (#95) from renovate/grpcio-1.x into master
Reviewed-on: #95
2024-08-28 13:42:14 +05:30
ea1f70e8b3 Merge pull request 'chore(deps): update dependency pyzmq to v26.2.0' (#96) from renovate/pyzmq-26.x into master
Reviewed-on: #96
2024-08-28 13:41:54 +05:30
4269dd5a9b Merge pull request 'fix(deps): update rust crate tokio to v1.39.3' (#98) from renovate/tokio-1.x-lockfile into master
Reviewed-on: #98
2024-08-28 13:40:52 +05:30
249153c45b Merge pull request 'fix(deps): update rust crate clap to v4.5.16' (#97) from renovate/clap-4.x-lockfile into master
Reviewed-on: #97
2024-08-28 13:40:48 +05:30
Renovate Bot
4b9b6f7a98 chore(deps): update dependency grpcio to v1.66.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-22 19:30:56 +00:00
Renovate Bot
817cc08d23 chore(deps): update dependency pyzmq to v26.2.0
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-22 09:01:00 +00:00
Renovate Bot
92f270d8af fix(deps): update rust crate tokio to v1.39.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-19 08:31:05 +00:00
Renovate Bot
2d2c3390a8 fix(deps): update rust crate clap to v4.5.16
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-19 08:31:02 +00:00
958d083513 Merge pull request 'chore(deps): update dependency locust to v2.31.3' (#87) from renovate/locust-2.x into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/87
2024-08-19 13:54:24 +05:30
3483cf0cd8 Merge pull request 'fix(deps): update rust crate serde to v1.0.208' (#94) from renovate/serde-monorepo into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/94
2024-08-19 13:54:09 +05:30
35c0a3e1f0 Merge pull request 'chore(deps): update rust crate serde_json to v1.0.125' (#93) from renovate/serde_json-1.x-lockfile into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/93
2024-08-19 13:52:55 +05:30
cd86506988 Merge pull request 'fix(deps): update rust crate tower-service to v0.3.3' (#92) from renovate/tower-service-0.x-lockfile into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/92
2024-08-19 13:51:58 +05:30
Renovate Bot
495c960243 chore(deps): update dependency locust to v2.31.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-15 19:00:54 +00:00
Renovate Bot
a7531d9b07 fix(deps): update rust crate serde to v1.0.208
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-15 16:31:58 +00:00
Renovate Bot
b48d57f824 chore(deps): update rust crate serde_json to v1.0.125
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-15 06:31:15 +00:00
Renovate Bot
e31a56d1cd fix(deps): update rust crate tower-service to v0.3.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-13 16:31:03 +00:00
04ab73fdca Merge pull request 'fix(deps): update rust crate serde to v1.0.207' (#89) from renovate/serde-monorepo into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/89
2024-08-13 15:35:11 +05:30
192da9f84c Merge pull request 'chore(deps): update rust crate serde_json to v1.0.124' (#91) from renovate/serde_json-1.x-lockfile into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/91
2024-08-13 15:32:12 +05:30
4e1d54bb1e Merge pull request 'fix(deps): update rust crate clap to v4.5.15' (#90) from renovate/clap-4.x-lockfile into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/90
2024-08-13 15:29:25 +05:30
Renovate Bot
85c59089f9 fix(deps): update rust crate serde to v1.0.207
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-12 21:02:14 +00:00
Renovate Bot
dfb2ba3186 chore(deps): update rust crate serde_json to v1.0.124
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-11 22:02:46 +00:00
Renovate Bot
af808795f3 fix(deps): update rust crate clap to v4.5.15
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-08-10 00:31:25 +00:00
325fca3327 Merge pull request 'chore(deps): update dependency zope.interface to v7.0.1' (#86) from renovate/zope.interface-7.x into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/86
2024-08-06 23:28:05 +05:30
Renovate Bot
d56a83424b chore(deps): update dependency zope.interface to v7.0.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-06 16:30:45 +00:00
62654de143 Merge pull request 'chore(deps): update dependency zope.interface to v7' (#85) from renovate/zope.interface-7.x into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/85
2024-08-06 16:37:14 +05:30
5ae93fa64d Merge pull request 'chore(deps): update dependency locust to v2.31.1' (#79) from renovate/locust-2.x into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/79
2024-08-06 16:36:55 +05:30
Renovate Bot
8cfd7d6958 chore(deps): update dependency zope.interface to v7
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-06 07:00:49 +00:00
Renovate Bot
654eac2cfa chore(deps): update dependency locust to v2.31.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-05 18:00:41 +00:00
387ab34350 Merge pull request 'chore(deps): update dependency pyzmq to v26.1.0' (#84) from renovate/pyzmq-26.x into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/84
2024-08-04 21:51:34 +05:30
Renovate Bot
e85aeee9fc chore(deps): update dependency pyzmq to v26.1.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-04 07:00:41 +00:00
f5e57c2f59 Merge pull request 'chore(deps): update dependency grpcio to v1.65.4' (#83) from renovate/grpcio-1.x into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/83
2024-08-03 10:45:41 +05:30
Renovate Bot
5e049ff360 chore(deps): update dependency grpcio to v1.65.4
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-02 18:00:41 +00:00
6951d5beb9 Merge pull request 'chore(deps): update rust crate serde_json to v1.0.122' (#82) from renovate/serde_json-1.x-lockfile into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/82
2024-08-02 11:48:23 +05:30
Renovate Bot
81f6be7c84 chore(deps): update rust crate serde_json to v1.0.122
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-01 21:31:18 +00:00
8d2160a8f0 Merge pull request 'fix(deps): update rust crate clap to v4.5.13' (#81) from renovate/clap-4.x-lockfile into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/81
2024-08-01 09:20:30 +05:30
Renovate Bot
4c21b156de fix(deps): update rust crate clap to v4.5.13
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-31 22:01:14 +00:00
168e8ca5b4 Merge pull request 'fix(deps): update rust crate clap to v4.5.12' (#80) from renovate/clap-4.x-lockfile into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/80
2024-08-01 03:27:08 +05:30
Renovate Bot
fbba2c4494 fix(deps): update rust crate clap to v4.5.12
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-07-31 17:30:57 +00:00
37e4c659ae Merge pull request 'chore(deps): update dependency grpcio to v1.65.2' (#78) from renovate/grpcio-1.x into master
Reviewed-on: https://git.batsense.net///mCaptcha/dcache/pulls/78
2024-07-31 21:29:48 +05:30
Renovate Bot
1b2ae0fc7c chore(deps): update dependency grpcio to v1.65.2
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-31 07:00:38 +00:00
f1961bd6de Merge pull request 'chore(deps): update rust crate serde_json to v1.0.121' (#77) from renovate/serde_json-1.x-lockfile into master
Reviewed-on: #77
2024-07-29 16:10:18 +05:30
Renovate Bot
0a38f929ac chore(deps): update rust crate serde_json to v1.0.121
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-07-28 22:01:24 +00:00
509ca76f93 Merge pull request 'fix(deps): update rust crate tokio to v1.39.2' (#74) from renovate/tokio-1.x-lockfile into master
Reviewed-on: #74
2024-07-27 20:28:30 +05:30
Renovate Bot
e653ee3dee fix(deps): update rust crate tokio to v1.39.2
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-07-27 11:31:00 +00:00
d5e3e9ec45 Merge pull request 'chore(deps): update dependency bulma to v1.0.2' (#76) from renovate/bulma-1.x into master
Reviewed-on: #76
2024-07-26 13:44:07 +05:30
2ca4162784 Merge pull request 'fix(deps): update rust crate clap to v4.5.11' (#73) from renovate/clap-4.x-lockfile into master
Reviewed-on: #73
2024-07-26 13:44:01 +05:30
Renovate Bot
5f232d8957 chore(deps): update dependency bulma to v1.0.2
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-07-25 16:00:43 +00:00
Renovate Bot
37514b774e fix(deps): update rust crate clap to v4.5.11
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-07-25 14:01:02 +00:00
86c295ff1e Merge pull request 'chore(deps): update dependency protobuf to v4.25.4' (#75) from renovate/protobuf-4.x into master
Reviewed-on: #75
2024-07-25 17:30:57 +05:30
Renovate Bot
bb2a80bfb2 chore(deps): update dependency protobuf to v4.25.4
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-24 20:30:34 +00:00
7f17651b40 Merge pull request 'fix(deps): update rust crate tokio to v1.39.0' (#72) from renovate/tokio-1.x-lockfile into master
Reviewed-on: #72
2024-07-23 21:27:02 +05:30
Renovate Bot
09539279f5 fix(deps): update rust crate tokio to v1.39.0
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-07-23 14:00:47 +00:00
ac13be93a2 Merge pull request 'fix(deps): update rust crate uuid to v1.10.0' (#71) from renovate/uuid-1.x-lockfile into master
Reviewed-on: #71
2024-07-23 14:28:25 +05:30
Renovate Bot
074508d797 fix(deps): update rust crate uuid to v1.10.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 15:01:05 +00:00
b3271d9458 Merge pull request 'chore(deps): update dependency grpcio to v1.65.1' (#69) from renovate/grpcio-1.x into master
Reviewed-on: #69
2024-07-22 20:13:33 +05:30
4e3d2021dd Merge pull request 'chore(deps): update dependency certifi to v2024.7.4' (#68) from renovate/certifi-2024.x into master
Reviewed-on: #68
2024-07-22 20:13:28 +05:30
b0942760a8 Merge pull request 'fix(deps): update rust crate tokio to v1.38.1' (#67) from renovate/tokio-1.x-lockfile into master
Reviewed-on: #67
2024-07-22 20:13:24 +05:30
9f4ba5ff22 Merge pull request 'fix(deps): update rust crate serde to v1.0.204' (#66) from renovate/serde-monorepo into master
Reviewed-on: #66
2024-07-22 20:13:20 +05:30
8b028e2012 Merge pull request 'fix(deps): update rust crate async-trait to v0.1.81' (#65) from renovate/async-trait-0.x-lockfile into master
Reviewed-on: #65
2024-07-22 20:13:17 +05:30
b6f2752d45 Merge pull request 'chore(deps): update rust crate serde_json to v1.0.120' (#64) from renovate/serde_json-1.x-lockfile into master
Reviewed-on: #64
2024-07-22 20:13:14 +05:30
Renovate Bot
754d5299ea chore(deps): update dependency grpcio to v1.65.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:31:15 +00:00
Renovate Bot
d8a9739593 chore(deps): update dependency certifi to v2024.7.4
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:31:11 +00:00
Renovate Bot
c315b99715 fix(deps): update rust crate tokio to v1.38.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:31:07 +00:00
Renovate Bot
94bbdcd8c3 fix(deps): update rust crate serde to v1.0.204
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:31:01 +00:00
Renovate Bot
6da09e5da4 fix(deps): update rust crate async-trait to v0.1.81
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:30:55 +00:00
Renovate Bot
6f6ce4f0a4 chore(deps): update rust crate serde_json to v1.0.120
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:30:47 +00:00
be00f2984b Merge pull request 'fix(deps): update rust crate lazy_static to v1.5.0' (#57) from renovate/lazy_static-1.x-lockfile into master
Reviewed-on: #57
2024-07-22 19:45:25 +05:30
cb51e988a0 Merge pull request 'fix(deps): update rust crate actix-rt to v2.10.0' (#50) from renovate/actix-rt-2.x-lockfile into master
Reviewed-on: #50
2024-07-22 19:45:22 +05:30
0dc42fa0ab Merge pull request 'fix(deps): update rust crate actix to v0.13.5' (#49) from renovate/actix-0.x-lockfile into master
Reviewed-on: #49
2024-07-22 19:45:03 +05:30
74e0aed3c4 Merge pull request 'fix(deps): update rust crate derive_more to v0.99.18' (#52) from renovate/derive_more-0.x-lockfile into master
Reviewed-on: #52
2024-07-22 19:44:51 +05:30
aad69666b7 Merge pull request 'fix(deps): update rust crate clap to v4.5.9' (#62) from renovate/clap-4.x-lockfile into master
Reviewed-on: #62
2024-07-22 19:44:48 +05:30
e6d1eb9d4c Merge pull request 'chore(deps): update dependency locust to v2.29.1' (#61) from renovate/locust-2.x into master
Reviewed-on: #61
2024-07-22 19:44:40 +05:30
Renovate Bot
18ad29790d fix(deps): update rust crate lazy_static to v1.5.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:01:16 +00:00
Renovate Bot
aded3f8281 fix(deps): update rust crate actix-rt to v2.10.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:01:11 +00:00
Renovate Bot
6637536551 fix(deps): update rust crate derive_more to v0.99.18
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:01:06 +00:00
Renovate Bot
5ccc380128 fix(deps): update rust crate clap to v4.5.9
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:01:02 +00:00
Renovate Bot
858bf4e936 fix(deps): update rust crate actix to v0.13.5
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:00:58 +00:00
Renovate Bot
19ad291b2b chore(deps): update dependency locust to v2.29.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-22 14:00:45 +00:00
abc7a33b86 Merge pull request 'fix(deps): update rust crate dashmap to v6' (#60) from renovate/dashmap-6.x into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #60
2024-07-13 12:58:39 +05:30
e3cd0ed9f0 Merge pull request 'fix(deps): update rust crate uuid to v1.9.1' (#59) from renovate/uuid-1.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #59
2024-06-26 00:33:59 +05:30
51738f46f0 Merge pull request 'chore(deps): update rust crate serde_json to v1.0.118' (#58) from renovate/serde_json-1.x-lockfile into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #58
2024-06-26 00:33:54 +05:30
Renovate Bot
a0634a1f37 fix(deps): update rust crate dashmap to v6
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-25 06:31:28 +00:00
Renovate Bot
c12629410d fix(deps): update rust crate uuid to v1.9.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-25 06:31:23 +00:00
Renovate Bot
23482a937d chore(deps): update rust crate serde_json to v1.0.118
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-25 06:31:09 +00:00
9ce7f67471 Merge pull request 'chore(deps): update dependency psutil to v6' (#56) from renovate/psutil-6.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #56
2024-06-25 11:53:23 +05:30
d0a740c12d Merge pull request 'fix(deps): update rust crate url to v2.5.2' (#55) from renovate/url-2.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #55
2024-06-25 11:53:20 +05:30
38e74c118b Merge pull request 'chore(deps): update dependency urllib3 to v2.2.2' (#53) from renovate/urllib3-2.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #53
2024-06-25 11:53:17 +05:30
Renovate Bot
c292ba42d8 chore(deps): update dependency psutil to v6
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-18 22:00:43 +00:00
Renovate Bot
7706e3ae45 fix(deps): update rust crate url to v2.5.2
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-18 21:01:22 +00:00
Renovate Bot
ff1bdfd04b chore(deps): update dependency urllib3 to v2.2.2
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-18 13:31:45 +00:00
41f36eb67f Merge pull request 'fix(deps): update rust crate clap to v4.5.7' (#51) from renovate/clap-4.x-lockfile into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #51
2024-06-10 23:54:32 +05:30
Renovate Bot
5bcd0a76ad fix(deps): update rust crate clap to v4.5.7
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-10 15:01:04 +00:00
8dccd63ef2 Merge pull request 'chore(deps): update dependency pyzmq to v26' (#48) from renovate/pyzmq-26.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #48
2024-06-08 13:27:35 +05:30
Renovate Bot
dfa21dcc69 chore(deps): update dependency pyzmq to v26
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 21:30:25 +00:00
73f0193ec5 Merge pull request 'chore(deps): update dependency gevent to v24' (#46) from renovate/gevent-24.x into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #46
2024-06-08 02:46:28 +05:30
e51cfc748a Merge pull request 'fix(deps): update tonic monorepo to 0.11.0' (#43) from renovate/tonic-monorepo into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #43
2024-06-08 02:46:12 +05:30
dd7176d47c Merge branch 'master' into renovate/tonic-monorepo
All checks were successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-08 02:45:56 +05:30
0e0db2772c Merge pull request 'fix(deps): update rust crate derive_builder to 0.20.0' (#38) from renovate/derive_builder-0.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #38
2024-06-08 02:45:49 +05:30
cdc23c4013 Merge pull request 'chore(deps): update dependency bulma to v1' (#44) from renovate/bulma-1.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #44
2024-06-08 02:45:23 +05:30
8373e417c1 Merge pull request 'fix(deps): update rust crate uuid to v1.8.0' (#42) from renovate/uuid-1.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #42
2024-06-08 02:45:15 +05:30
df3e557319 Merge pull request 'chore(deps): update dependency locust to v2.29.0' (#31) from renovate/locust-2.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #31
2024-06-08 02:45:10 +05:30
8745761634 Merge pull request 'chore(deps): update dependency certifi to v2024' (#45) from renovate/certifi-2024.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #45
2024-06-08 02:45:04 +05:30
Renovate Bot
c04e14f2a0 chore(deps): update dependency gevent to v24
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 19:31:03 +00:00
Renovate Bot
e4add5ed97 chore(deps): update dependency certifi to v2024
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 19:31:02 +00:00
Renovate Bot
74ab8abe8b chore(deps): update dependency bulma to v1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 19:30:58 +00:00
Renovate Bot
57694b469a fix(deps): update tonic monorepo to 0.11.0
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-07 19:30:54 +00:00
Renovate Bot
ee126cff49 fix(deps): update rust crate uuid to v1.8.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 19:30:49 +00:00
Renovate Bot
3ff434ea0c fix(deps): update rust crate derive_builder to 0.20.0
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 19:30:42 +00:00
Renovate Bot
37a9a0342d chore(deps): update dependency locust to v2.29.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 19:30:30 +00:00
7cecb8e985 Merge pull request 'fix(deps): update rust crate config to 0.14' (#37) from renovate/config-0.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #37
2024-06-08 00:37:32 +05:30
de15326c71 Merge pull request 'fix(deps): update rust crate tokio to v1.38.0' (#41) from renovate/tokio-1.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #41
2024-06-08 00:37:16 +05:30
9cd6591631 Merge pull request 'fix(deps): update rust crate pretty_env_logger to 0.5.0' (#40) from renovate/pretty_env_logger-0.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #40
2024-06-08 00:37:11 +05:30
f9e40fd0f8 Merge pull request 'fix(deps): update rust crate clap to v4.5.6' (#36) from renovate/clap-4.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #36
2024-06-08 00:37:03 +05:30
03cefb4f45 Merge pull request 'chore(deps): update rust crate base64 to 0.22.0' (#35) from renovate/base64-0.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #35
2024-06-08 00:36:58 +05:30
4bf46f6957 Merge pull request 'chore(deps): update dependency itsdangerous to v2.2.0' (#30) from renovate/itsdangerous-2.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #30
2024-06-08 00:36:55 +05:30
Renovate Bot
e1190647a3 fix(deps): update rust crate tokio to v1.38.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 17:01:46 +00:00
Renovate Bot
753888a3ab fix(deps): update rust crate pretty_env_logger to 0.5.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 17:01:41 +00:00
Renovate Bot
642d81c151 fix(deps): update rust crate config to 0.14
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 17:01:22 +00:00
Renovate Bot
62c81eff13 fix(deps): update rust crate clap to v4.5.6
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 17:01:10 +00:00
Renovate Bot
f48cb7d4b0 chore(deps): update rust crate base64 to 0.22.0
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 17:01:05 +00:00
Renovate Bot
87185f964e chore(deps): update dependency itsdangerous to v2.2.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 17:00:57 +00:00
7dc383c183 Merge pull request 'chore(deps): update dependency zope.interface to v6.4.post2' (#34) from renovate/zope.interface-6.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #34
2024-06-07 22:22:17 +05:30
650f54c47d Merge pull request 'chore(deps): update dependency urllib3 to v2.2.1' (#33) from renovate/urllib3-2.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #33
2024-06-07 22:22:12 +05:30
77d388bf37 Merge pull request 'chore(deps): update dependency requests to v2.32.3' (#32) from renovate/requests-2.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #32
2024-06-07 22:22:08 +05:30
d7aafcc0a0 Merge pull request 'chore(deps): update dependency idna to v3.7' (#29) from renovate/idna-3.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #29
2024-06-07 22:21:58 +05:30
1f164763ba Merge pull request 'chore(deps): update dependency grpcio to v1.64.1' (#27) from renovate/grpcio-1.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #27
2024-06-07 22:21:53 +05:30
2917167471 Merge pull request 'chore(deps): update dependency geventhttpclient to v2.3.1' (#26) from renovate/geventhttpclient-2.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #26
2024-06-07 22:21:49 +05:30
3ef8964551 Merge pull request 'chore(deps): update dependency msgpack to v1.0.8' (#12) from renovate/msgpack-1.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #12
2024-06-07 22:21:45 +05:30
Renovate Bot
a8bdf8b101 chore(deps): update dependency zope.interface to v6.4.post2
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 14:30:58 +00:00
Renovate Bot
fe64fefcd3 chore(deps): update dependency urllib3 to v2.2.1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 14:30:57 +00:00
Renovate Bot
5ae8f1d8e9 chore(deps): update dependency requests to v2.32.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 14:30:56 +00:00
Renovate Bot
f0933b145a chore(deps): update dependency idna to v3.7
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 14:30:49 +00:00
Renovate Bot
14ca663da7 chore(deps): update dependency grpcio to v1.64.1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 14:30:42 +00:00
Renovate Bot
67ad8ed916 chore(deps): update dependency geventhttpclient to v2.3.1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 14:30:39 +00:00
Renovate Bot
45f4c86759 chore(deps): update dependency msgpack to v1.0.8
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 14:30:38 +00:00
ac86a68911 Merge pull request 'chore(deps): update rust crate serde_json to v1.0.117' (#18) from renovate/serde_json-1.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #18
2024-06-07 19:55:57 +05:30
5db9a4fdaf Merge pull request 'chore(deps): update dependency protobuf to v4.25.3' (#13) from renovate/protobuf-4.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #13
2024-06-07 19:55:37 +05:30
fde65b9967 Merge pull request 'fix(deps): update rust crate actix to v0.13.3' (#19) from renovate/actix-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #19
2024-06-07 19:55:33 +05:30
27b8b233ae Merge pull request 'fix(deps): update rust crate async-trait to v0.1.80' (#20) from renovate/async-trait-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #20
2024-06-07 19:55:29 +05:30
927d37d35d Merge pull request 'fix(deps): update rust crate prost to v0.12.6' (#21) from renovate/tokio-prost-monorepo into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #21
2024-06-07 19:55:25 +05:30
a57c008bb6 Merge pull request 'fix(deps): update rust crate serde to v1.0.203' (#22) from renovate/serde-monorepo into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #22
2024-06-07 19:55:21 +05:30
52e1544b8a Merge pull request 'fix(deps): update rust crate tokio-stream to v0.1.15' (#23) from renovate/tokio-stream-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #23
2024-06-07 19:55:16 +05:30
dc271018e9 Merge pull request 'chore(deps): update dependency blinker to v1.8.2' (#24) from renovate/blinker-1.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #24
2024-06-07 19:55:12 +05:30
92a48574a9 Merge pull request 'chore(deps): update dependency bulma to v0.9.4' (#25) from renovate/bulma-0.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #25
2024-06-07 19:55:07 +05:30
Renovate Bot
31ac9ea859 chore(deps): update dependency bulma to v0.9.4
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:59 +00:00
Renovate Bot
e34fc6b52f chore(deps): update dependency blinker to v1.8.2
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:58 +00:00
Renovate Bot
04f74b8a5d fix(deps): update rust crate tokio-stream to v0.1.15
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:55 +00:00
Renovate Bot
d4b11163b6 fix(deps): update rust crate serde to v1.0.203
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:52 +00:00
Renovate Bot
6c3d57743c fix(deps): update rust crate prost to v0.12.6
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:48 +00:00
Renovate Bot
8447a9fabf fix(deps): update rust crate async-trait to v0.1.80
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:45 +00:00
Renovate Bot
db1f226360 fix(deps): update rust crate actix to v0.13.3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:41 +00:00
Renovate Bot
62f7503910 chore(deps): update dependency protobuf to v4.25.3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:02:32 +00:00
70b97b1d4f Merge pull request 'chore(deps): update dependency flask-cors to v4.0.1' (#8) from renovate/flask-cors-4.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #8
2024-06-07 15:31:44 +05:30
62cc4c43e5 Merge pull request 'chore(deps): update dependency greenlet to v3.0.3' (#9) from renovate/greenlet-3.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #9
2024-06-07 15:31:38 +05:30
579310f7a9 Merge pull request 'chore(deps): update dependency jinja2 to v3.1.4' (#10) from renovate/jinja2-3.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #10
2024-06-07 15:31:31 +05:30
619c39d102 Merge pull request 'chore(deps): update dependency markupsafe to v2.1.5' (#11) from renovate/markupsafe-2.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #11
2024-06-07 15:31:24 +05:30
394b42a414 Merge pull request 'chore(deps): update dependency flask to v3.0.3' (#7) from renovate/flask-3.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #7
2024-06-07 15:31:11 +05:30
Renovate Bot
1a6986203f chore(deps): update rust crate serde_json to v1.0.117
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-07 10:01:06 +00:00
7d5ca30ae8 Merge pull request 'chore(deps): update dependency psutil to v5.9.8' (#14) from renovate/psutil-5.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #14
2024-06-07 15:30:59 +05:30
eb95c05651 Merge pull request 'chore(deps): update dependency werkzeug to v3.0.3' (#15) from renovate/werkzeug-3.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #15
2024-06-07 15:30:52 +05:30
2b0ce36477 Merge pull request 'chore(deps): update rust crate anyhow to v1.0.86' (#16) from renovate/anyhow-1.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #16
2024-06-07 15:30:46 +05:30
Renovate Bot
8387a6338e chore(deps): update rust crate anyhow to v1.0.86
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:01:19 +00:00
Renovate Bot
8cc021d1d1 chore(deps): update dependency werkzeug to v3.0.3
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:01:05 +00:00
Renovate Bot
29aa823dc6 chore(deps): update dependency psutil to v5.9.8
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:01:03 +00:00
Renovate Bot
5f66f088ac chore(deps): update dependency markupsafe to v2.1.5
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:00:59 +00:00
Renovate Bot
92c2437cf9 chore(deps): update dependency jinja2 to v3.1.4
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:00:58 +00:00
Renovate Bot
172035626b chore(deps): update dependency greenlet to v3.0.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:00:56 +00:00
Renovate Bot
c32b1b717e chore(deps): update dependency flask-cors to v4.0.1
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:00:55 +00:00
Renovate Bot
d11ebbc50a chore(deps): update dependency flask to v3.0.3
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 08:00:53 +00:00
a6a48eec47 Merge pull request 'chore: Configure Renovate' (#6) from renovate/configure into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #6
2024-06-04 13:04:52 +05:30
Renovate Bot
0ff5669e96 Add renovate.json
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-06-04 07:34:00 +00:00
2ae3d9625e Merge pull request 'Integration an unit tests' (#4) from feat-integration-tests into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #4
2023-12-31 03:25:29 +05:30
ff71e35da3
fix: create virtualenv
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2023-12-31 03:05:06 +05:30
d7fe9332d6
debug: mv dcache_py into tess
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline is pending
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-31 03:04:34 +05:30
a10fb878f5
fix: rm stub
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
2023-12-31 03:03:00 +05:30
f47c0867d3
fix: use python virtualenv 2023-12-31 03:00:36 +05:30
ae6651e624
fix: rm stray command 2023-12-31 03:00:36 +05:30
73aa755035
fix: CI: use different stage name for integration tests 2023-12-31 03:00:36 +05:30
240b5ec13a
chore: linting 2023-12-31 03:00:36 +05:30
e548a532a0
feat: add integration testing 2023-12-31 03:00:36 +05:30
c72688656f
feat: add integration tests 2023-12-31 03:00:36 +05:30
853ed44ba7 Merge pull request 'feat: benchmark report' (#5) from benchmark-report into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #5
2023-12-31 02:54:50 +05:30
a82b9044d5
feat: benchmark report
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2023-12-31 02:54:11 +05:30
f20d044537
feat: add readme
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2023-12-31 01:57:03 +05:30
9b281151e7 Merge pull request 'Use atomic types to speedup variable difficulty alogirthm' (#3) from optimize-libmcaptha into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #3
2023-12-31 01:31:22 +05:30
fd9ac9d312
feat: implement RetrievePow, DeletePow, CacheResult, VerifyCaptchaResult, DeleteCaptchaResult, CaptchaExists& GetVisitorCount RPCs
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2023-12-31 01:21:32 +05:30
b8a2a026d6
feat: test variable difficulty driver
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2023-12-30 20:28:05 +05:30
1f4d3574ab
feat: use DashMap to speedup hashcash implementation
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2023-12-30 18:00:09 +05:30
c11f89094a
feat: post bottleneck single req and pipeline reqs results
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2023-12-30 16:19:25 +05:30
20296d5a70
feat: use faster counter impl 2023-12-30 16:19:24 +05:30
45a49288b7
feat: custom counter implementation to solve bottleneck 2023-12-30 16:19:24 +05:30
59180fd86f
feat: connection pool for log replication 2023-12-30 16:19:24 +05:30
41e438828c
feat: bottlenect diagnosis and solution flamegraphs 2023-12-30 16:19:24 +05:30
0f6d9e387f
feat: identify libmcaptcha bottleneck 2023-12-30 16:19:23 +05:30
b4469e03d0 Merge pull request 'feat-protobuf' (#1) from feat-protobuf into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #1
2023-12-30 14:24:35 +05:30
01d4c2fce3
feat: install protoc
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2023-12-30 14:18:15 +05:30
aed464fe38 Merge pull request 'benches' (#2) from benches into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #2
2023-12-30 14:07:18 +05:30
59b847b740
feat: bench pipeline implementaion
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
2023-12-29 16:07:45 +05:30
a288450721
feat: batch RPC
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-29 15:58:36 +05:30
0f5762536b
debug: pipeline dump 2023-12-28 19:15:07 +05:30
565ffec3c6
feat: port locust to use GRPC payloads
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
2023-12-28 14:23:45 +05:30
69172db518
feat: add nojson results 2023-12-28 14:16:58 +05:30
67051bc187
feat: call individual RPC methods without the JSON hack in test script 2023-12-28 13:45:43 +05:30
3ad3d280d9
feat: use protobuf without JSON hack 2023-12-28 13:45:01 +05:30
337f89f25a
feat: use grpc within locust
Some checks failed
ci/woodpecker/pr/woodpecker Pipeline is pending
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-28 13:44:21 +05:30
e50b7a5751
chore: log learner joins 2023-12-28 13:43:53 +05:30
4446cd83bd
feat: python grpc test client
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
2023-12-27 19:48:51 +05:30
6d90790e58
fix: actix actors will only start on actix_rt 2023-12-27 19:44:22 +05:30
ad77db65f3
feat: add nopooling(pipeline+nopipeline) and pooled pipeline benches
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2023-12-26 18:46:18 +05:30
bebab4c8a5
feat: conn pool no pipeline
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
note: lost previous benchmarks, so redoing
2023-12-26 18:38:06 +05:30
ba9694b31f
chore: cleanup deadcode from REST impl
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
2023-12-26 15:13:41 +05:30
a2dd2c31f6
feat: add protobuf bench
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-26 14:59:43 +05:30
70ef43b720
feat: use protobuf for RPC
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-12-26 14:58:55 +05:30
109 changed files with 10205 additions and 2311 deletions

1
.gitignore vendored
View file

@ -156,3 +156,4 @@ keys
htmlcov/ htmlcov/
tmp/ tmp/
static/ static/
nohup.out

View file

@ -2,11 +2,26 @@ steps:
backend: backend:
image: rust image: rust
commands: commands:
- apt update
- apt-get install -y --no-install-recommends protobuf-compiler
- cargo build - cargo build
- cargo test --lib
# - make migrate # - make migrate
# - make # - make
# - make release # - make release
# - make test // requires Docker-in-Docker # - make test // requires Docker-in-Docker
integration_tests:
image: python
commands:
- pip install virtualenv && virtualenv venv
- . venv/bin/activate && pip install -r requirements.txt
- nohup ./target/debug/main --id 1 --http-addr 127.0.0.1:9001 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
- sleep 1
- nohup ./target/debug/main --id 2 --http-addr 127.0.0.1:9002 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
- sleep 1
- nohup ./target/debug/main --id 3 --http-addr 127.0.0.1:9003 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
- mv dcache_py/ tests/
- . venv/bin/activate && python tests/test.py
build_docker_img: build_docker_img:
image: plugins/docker image: plugins/docker

2316
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -2,6 +2,7 @@
name = "dcache" name = "dcache"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
build = "build.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -12,38 +13,40 @@ openraft = { version = "0.8.8", features = ["serde", "single-term-leader"]}
#libmcaptcha = { path="/src/atm/code/mcaptcha/libmcaptcha", features=["full"] } #libmcaptcha = { path="/src/atm/code/mcaptcha/libmcaptcha", features=["full"] }
libmcaptcha = { git = "https://github.com/mcaptcha/libmcaptcha", branch = "feat-dcache", features = ["full"]} libmcaptcha = { git = "https://github.com/mcaptcha/libmcaptcha", branch = "feat-dcache", features = ["full"]}
tracing = { version = "0.1.37", features = ["log"] } tracing = { version = "0.1.37", features = ["log"] }
serde_json = "1.0.96" serde_json = "1"
serde = { version = "1.0.163", features = ["derive"] } serde = { version = "1", features = ["derive"] }
byteorder = "1.4.3" byteorder = "1.4.3"
actix-web = "4"
actix-web-httpauth = "0.8.0"
futures-util = { version = "0.3.17", default-features = false, features = ["std"] } futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
lazy_static = "1.4.0" lazy_static = "1.4.0"
pretty_env_logger = "0.4.0" pretty_env_logger = "0.5.0"
uuid = { version = "1", features = ["v4"] } uuid = { version = "1", features = ["v4"] }
actix-web-codegen-const-routes = { version = "0.1.0", tag = "0.1.0", git = "https://github.com/realaravinth/actix-web-codegen-const-routes" } derive_builder = "0.20.0"
derive_builder = "0.11.2" config = { version = "0.14", features = ["toml"] }
config = { version = "0.11", features = ["toml"] }
derive_more = "0.99.17" derive_more = "0.99.17"
url = { version = "2.2.2", features = ["serde"]} url = { version = "2.2.2", features = ["serde"]}
async-trait = "0.1.36" async-trait = "0.1.36"
clap = { version = "4.1.11", features = ["derive", "env"] } clap = { version = "4.1.11", features = ["derive", "env"] }
reqwest = { version = "0.11.9", features = ["json"] } tokio = { version = "1.0", default-features = false, features = ["sync", "macros", "rt-multi-thread", "time"] }
tokio = { version = "1.0", default-features = false, features = ["sync"] }
tracing-subscriber = { version = "0.3.0", features = ["env-filter"] } tracing-subscriber = { version = "0.3.0", features = ["env-filter"] }
actix = "0.13.0" actix = "0.13.0"
actix-web-actors = "4.2.0" tonic = { version = "0.11.0", features = ["transport", "channel"] }
awc = "3.2.0" prost = "0.12.3"
futures = "0.3.29" tokio-stream = "0.1.14"
actix-codec = "0.5.1" async-stream = "0.3.5"
tokio-tungstenite = "0.21.0" actix-rt = "2.9.0"
futures = "0.3.30"
tower-service = "0.3.2"
dashmap = { version = "6.0.0", features = ["serde"] }
[build-dependencies] [build-dependencies]
serde_json = "1" serde_json = "1"
tonic-build = "0.11.0"
[dev-dependencies] [dev-dependencies]
actix-rt = "2.7.0" base64 = "0.22.0"
base64 = "0.13.0"
anyhow = "1.0.63" anyhow = "1.0.63"
maplit = "1.0.2" maplit = "1.0.2"
#[profile.release]
#debug = true

View file

@ -4,6 +4,7 @@
FROM rust:latest as rust FROM rust:latest as rust
WORKDIR /src WORKDIR /src
RUN apt update && apt-get install -y --no-install-recommends protobuf-compiler
COPY . . COPY . .
RUN cargo build --release RUN cargo build --release

6
Makefile Normal file
View file

@ -0,0 +1,6 @@
python.gen:
. venv/bin/activate && cd dcache_py
python -m grpc_tools.protoc \
-I=./proto/dcache/ --python_out=dcache_py/ \
--pyi_out=dcache_py/ \
--grpc_python_out=dcache_py/ ./proto/dcache/dcache.proto

44
README.md Normal file
View file

@ -0,0 +1,44 @@
[![status-badge](https://ci.batsense.net/api/badges/105/status.svg)](https://ci.batsense.net/repos/105)
---
# dcache: Distributed, Highly Available cache implementation for mCaptcha
## Overview
- Uses Raft consensus algorithm via [openraft](https://crates.io/crates/openraft)
- GRPC via [tonic](https://crates.io/crates/tonic)
## Tips
We recommend running at least three instances of dcache in your
deployment.
**NOTE: Catastrophic failure will occur when n/2 + 1 instances are
down.**
## Usage
## Firewall configuration
dcache uses a single, configurable port for both server-to-server and client-to-server
communications. Please open that port on your server.
## Launch
```bash
dcache --id 1 \
--http-addr 127.0.0.1:9001 \
--introducer-addr 127.0.0.1:9001 \
--introducer-id 1 \
--cluster-size 3
```
### Options
| Name | Purpose |
| ----------------- | ----------------------------------------------------------- |
| --id | Unique integer to identify node in network |
| --http-addr | Socket address to bind and listen for connections |
| --introducer-addr | Socket address of introducer node; required to join network |
| --intdocuer-id | ID of the introducer node; required to join network |
| --cluster-size | Total size of the cluster |

64
bench/adaptor.py Normal file
View file

@ -0,0 +1,64 @@
import time
from typing import Any, Callable
import grpc
import grpc.experimental.gevent as grpc_gevent
from grpc_interceptor import ClientInterceptor
from locust import User
from locust.exception import LocustError
# patch grpc so that it uses gevent instead of asyncio
grpc_gevent.init_gevent()
class LocustInterceptor(ClientInterceptor):
def __init__(self, environment, *args, **kwargs):
super().__init__(*args, **kwargs)
self.env = environment
def intercept(
self,
method: Callable,
request_or_iterator: Any,
call_details: grpc.ClientCallDetails,
):
response = None
exception = None
start_perf_counter = time.perf_counter()
response_length = 0
try:
response = method(request_or_iterator, call_details)
response_length = response.result().ByteSize()
except grpc.RpcError as e:
exception = e
self.env.events.request.fire(
request_type="grpc",
name=call_details.method,
response_time=(time.perf_counter() - start_perf_counter) * 1000,
response_length=response_length,
response=response,
context=None,
exception=exception,
)
return response
class GrpcUser(User):
abstract = True
stub_class = None
def __init__(self, environment):
super().__init__(environment)
for attr_value, attr_name in (
(self.host, "host"),
(self.stub_class, "stub_class"),
):
if attr_value is None:
raise LocustError(f"You must specify the {attr_name}.")
self._channel = grpc.insecure_channel(self.host)
interceptor = LocustInterceptor(environment=environment)
self._channel = grpc.intercept_channel(self._channel, interceptor)
self.stub = self.stub_class(self._channel)

View file

@ -1,95 +1,69 @@
import json
import time
import grpc
import gevent
from pprint import pprint from pprint import pprint
from locust import FastHttpUser, between, task from locust import FastHttpUser, between, task, events
password = "fooobarasdfasdf" from dcache_py import dcache_pb2 as dcache
username = "realaravinth" from dcache_py.dcache_pb2 import RaftRequest
from dcache_py.dcache_pb2_grpc import DcacheServiceStub
import adaptor
class Unprotected(FastHttpUser): host = "localhost:9001"
# wait_time = between(5, 15)
peers = [
"http://localhost:9001",
"http://localhost:9002",
"http://localhost:9003",
]
leader = "http://localhost:9001"
host = leader
captcha_id = "locust" captcha_id = "locust"
# pipeline_vote = []
# for _ in range(0,100):
# pipeline_vote.append({"AddVisitor": captcha_id})
# def on_start(self): def add_captcha(stub: DcacheServiceStub, captcha_id: str):
# resp = self.client.get(f"{self.leader}/metrics") msg = dcache.AddCaptchaRequest(
# data = resp.json() id=captcha_id,
# leader = data["Ok"]["membership_config"]["log_id"]["leader_id"]["node_id"] mcaptcha=dcache.MCaptcha(
# self.leader = self.peers[leader - 1] duration=30,
# self.host = self.leader defense=dcache.Defense(
# print(f"Leader: {self.host}") levels=[
# self.add_captcha(captcha_id="locust") dcache.Level(visitor_threshold=50, difficulty_factor=500),
dcache.Level(visitor_threshold=5000, difficulty_factor=50000),
]
),
def write(self, data): ),
resp = self.client.post(f"{self.host}/write", json=data) )
# print(f"RPC Status: {resp.status_code}") resp = stub.AddCaptcha(msg)
resp = resp.json()
if "Err" in resp:
leader = resp["Err"]["APIError"]["ForwardToLeader"]["leader_node"]["addr"]
print(f"Forwarding write to leader {leader}")
return write(leader, data)
return resp["Ok"]["data"]
def pipeline_write(self, data):
resp = self.client.post(f"{self.host}/pipeline/write", json=data)
# print(f"RPC Status: {resp.status_code}")
resp = resp.json()
if "Err" in resp:
leader = resp["Err"]["APIError"]["ForwardToLeader"]["leader_node"]["addr"]
print(f"Forwarding write to leader {leader}")
return write(leader, data)
return resp
def add_vote(self, captcha_id: str):
resp = self.write(data={"AddVisitor": captcha_id})
# pprint(resp)
def add_vote_pipeline(self, captcha_id: str):
resp = self.pipeline_write(data=self.pipeline_vote)
# pprint(resp)
def add_captcha(self, captcha_id: str):
params = {
"AddCaptcha": {
"id": captcha_id,
"mcaptcha": {
"visitor_threshold": 0,
"defense": {
"levels": [
{"visitor_threshold": 50, "difficulty_factor": 500},
{"visitor_threshold": 5000, "difficulty_factor": 50000},
],
"current_visitor_threshold": 0,
},
"duration": 30,
},
}
}
resp = self.write(data=params)
pprint(f"Captcha added {captcha_id}: {resp}") pprint(f"Captcha added {captcha_id}: {resp}")
with grpc.insecure_channel(host) as channel:
stub = DcacheServiceStub(channel)
add_captcha(stub=stub, captcha_id=captcha_id)
pipeline_msgs = []
for _ in range(0,10):
pipeline_msgs.append(dcache.DcacheRequest(addVisitor=dcache.CaptchaID(id=captcha_id)))
pipeline_msgs = dcache.DcacheBatchRequest(requests=pipeline_msgs)
#def pipeline_generate_messages():
# for msg in pipeline_msgs:
# yield msg
class HelloGrpcUser(adaptor.GrpcUser):
stub_class = DcacheServiceStub
host = host
captcha_id = captcha_id
msg = dcache.CaptchaID(id=captcha_id)
def add_vote(self, captcha_id: str):
resp = self.stub.AddVisitor(self.msg)
def add_vote_pipeline(self):
res = self.stub.PipelineDcacheOps(pipeline_msgs)
# @task
# def addVote(self):
# self.add_vote(self.captcha_id)
@task @task
def unprotected(self): def addVotePipeline(self):
#self.add_vote_pipeline(captcha_id=self.captcha_id) self.add_vote_pipeline()
self.add_vote(captcha_id="locust")
# data = {
# "username": username,
# "password": username,
# "confirm_password": username,
# }
# self.client.post("/unprotected", data=data)

View file

@ -1,27 +1,27 @@
blinker==1.7.0 blinker==1.8.2
Brotli==1.1.0 Brotli==1.1.0
certifi==2023.11.17 certifi==2024.8.30
charset-normalizer==3.3.2 charset-normalizer==3.3.2
click==8.1.7 click==8.1.7
ConfigArgParse==1.7 ConfigArgParse==1.7
Flask==3.0.0 Flask==3.0.3
Flask-BasicAuth==0.2.0 Flask-BasicAuth==0.2.0
Flask-Cors==4.0.0 Flask-Cors==5.0.0
gevent==23.9.1 gevent==24.2.1
geventhttpclient==2.0.11 geventhttpclient==2.3.1
greenlet==3.0.2 greenlet==3.1.1
idna==3.6 idna==3.10
itsdangerous==2.1.2 itsdangerous==2.2.0
Jinja2==3.1.2 Jinja2==3.1.4
locust==2.20.0 locust==2.31.6
MarkupSafe==2.1.3 MarkupSafe==2.1.5
msgpack==1.0.7 msgpack==1.1.0
psutil==5.9.7 psutil==6.0.0
pyzmq==25.1.2 pyzmq==26.2.0
requests==2.31.0 requests==2.32.3
roundrobin==0.0.4 roundrobin==0.0.4
six==1.16.0 six==1.16.0
urllib3==2.1.0 urllib3==2.2.3
Werkzeug==3.0.1 Werkzeug==3.0.4
zope.event==5.0 zope.event==5.0
zope.interface==6.1 zope.interface==7.0.3

212
bench/results/README.md Normal file
View file

@ -0,0 +1,212 @@
# Benchmark Report
Benchmarks were run at various stages of development to keep track of
performance. Tech stacks were changed and the implementation optimized
to increase throughput. This report summarizes the findings of the
benchmarks
Ultimately, we were able to identify a bottleneck that was previously
hidden in mCaptcha (hidden because a different bottleneck like DB access
eclipsed it :p) [and were able to increase performance of the critical
path by ~147 times](https://git.batsense.net/mCaptcha/dcache/pulls/3)
through a trivial optimization.
## Environment
These benchmarks were run on a noisy development laptop and should be
used for guidance only.
- CPU: AMD Ryzen 5 5600U with Radeon Graphics (12) @ 4.289GHz
- Memory: 22849MiB
- OS: Arch Linux x86_64
- Kernel: 6.6.7-arch1-1
- rustc: 1.73.0 (cc66ad468 2023-10-03)
## Baseline: Tech stack version 1
Actix Web based networking with JSON for message format. Was chosen for
prototyping, and was later used to set a baseline.
## Without connection pooling in server-to-server communications
### Single requests (no batching)
<details>
<summary>Peak throughput observed was 1117 request/second (please click
to see charts)</summary>
#### Total number of requests vs time
![number of requests](./v1/nopooling/nopipelining/total_requests_per_second_1703969194.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/nopooling/nopipelining/response_times_(ms)_1703969194.png>)
#### Number of concurrent users vs time
![number of concurrent
users](./v1/nopooling/nopipelining/number_of_users_1703969194.png)
</details>
### Batched requests
<details>
<summary>
Each network request contained 1,000 application requests, so peak throughput observed was 1,800 request/second.
Please click to see charts</summary>
#### Total number of requests vs time
![number of requests](./v1/pooling/pipelining/total_requests_per_second_1703968582.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/pooling/pipelining/response_times_(ms)_1703968582.png>))
#### Number of concurrent users vs time
![number of concurrent
users](./v1/pooling/pipelining/number_of_users_1703968582.png)
</details>
## With connection pooling in server-to-server communications
### Single requests (no batching)
<details>
<summary>
Peak throughput observed was 3904 request/second. Please click to see
charts</summary>
#### Total number of requests vs time
![number of requests](./v1/pooling/nopipelining/total_requests_per_second_1703968214.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/pooling/nopipelining/response_times_(ms)_1703968215.png>)
#### Number of concurrent users vs time
![number of concurrent
users](./v1/pooling/nopipelining/number_of_users_1703968215.png)
</details>
### Batched requests
<details>
<summary>
Each network request contained 1,000 application requests, so peak throughput observed was 15,800 request/second.
Please click to see charts.
</summary>
#### Total number of requests vs time
![number of requests](./v1/pooling/pipelining/total_requests_per_second_1703968582.png)
#### Response times(ms) vs time
![repsonse times(ms)](<./v1/pooling/pipelining/response_times_(ms)_1703968582.png>))
#### Number of concurrent users vs time
![number of concurrent
users](./v1/pooling/pipelining/number_of_users_1703968582.png)
</details>
## Tech stack version 2
Tonic for the network stack and GRPC for wire format. We ran over a
dozen benchmarks with this tech stack. The trend was similar to the ones
observed above: throughput was higher when connection pool was used and
even higher when requests were batched. _But_ the throughput of all of these benchmarks were lower than the
baseline benchmarks!
The CPU was busier. We put it through
[flamgragh](https://github.com/flamegraph-rs/flamegraph) and hit it with
the same test suite to identify compute-heavy areas. The result was
unexpected:
![flamegraph indicating libmcaptcha being
slow](./v2/libmcaptcha-bottleneck/problem/flamegraph.svg)
libmCaptcha's [AddVisitor
handler](https://github.com/mCaptcha/libmcaptcha/blob/e3f456f35b2c9e55e0475b01b3e05d48b21fd51f/src/master/embedded/counter.rs#L124)
was taking up 59% of CPU time of the entire test run. This is a very
critical part of the variable difficulty factor PoW algorithm that
mCaptcha uses. We never ran into this bottleneck before because in other
cache implementations, it was always preceded with a database request.
It surfaced here as we are using in-memory data sources in dcache.
libmCaptcha uses an actor-based approach with message passing for clean
concurrent state management. Message passing is generally faster in most
cases, but in our case, sharing memory using CPU's concurrent primitives
turned out to be significantly faster:
![flamegraph indicating libmcaptcha being
slow](./v2/libmcaptcha-bottleneck/solution/flamegraph.svg)
CPU time was reduced from 59% to 0.4%, roughly by one 147 times!
With this fix in place:
### Connection pooled server-to-server communications, single requests (no batching)
Peak throughput observed was 4816 request/second, ~1000 requests/second
more than baseline.
#### Total number of requests vs time
![number of requests](./v2/grpc-conn-pool-post-bottleneck/single/total_requests_per_second_1703970940.png)
#### Response times(ms) vs time
![repsonse times(ms)](./v2/grpc-conn-pool-post-bottleneck/single/response_times_(ms)_1703970940.png)
#### Number of concurrent users vs time
![number of concurrent
users](./v2/grpc-conn-pool-post-bottleneck/single/number_of_users_1703970940.png)
### Connection pooled server-to-server communications, batched requests
Each network request contained 1,000 application requests, so peak throughput observed was 95,700 request/second. This six times higher than baseline.
Please click to see charts.
#### Total number of requests vs time
![number of requests](./v2/grpc-conn-pool-post-bottleneck/pipeline/total_requests_per_second_1703971082.png)
#### Response times(ms) vs time
![repsonse times(ms)](./v2/grpc-conn-pool-post-bottleneck/pipeline/response_times_(ms)_1703971082.png)
#### Number of concurrent users vs time
![number of concurrent
users](./v2/grpc-conn-pool-post-bottleneck/pipeline/number_of_users_1703971082.png)
</details>

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,348265,0,130,133.02968716351054,0,544,98.68140640029863,2517.2083812580477,0.0,130,160,180,190,310,350,370,390,420,530,540
,Aggregated,348265,0,130,133.02968716351054,0,544,98.68140640029863,2517.2083812580477,0.0,130,160,180,190,310,350,370,390,420,530,540
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 348265 0 130 133.02968716351054 0 544 98.68140640029863 2517.2083812580477 0.0 130 160 180 190 310 350 370 390 420 530 540
3 Aggregated 348265 0 130 133.02968716351054 0 544 98.68140640029863 2517.2083812580477 0.0 130 160 180 190 310 350 370 390 420 530 540

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
,Aggregated,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000
3 Aggregated 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
,Aggregated,47348,0,350,344.40586719608007,5,1034,97.76577680155445,993.0375320581431,0.0,350,390,410,430,480,520,590,660,850,1000,1000
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000
3 Aggregated 47348 0 350 344.40586719608007 5 1034 97.76577680155445 993.0375320581431 0.0 350 390 410 430 480 520 590 660 850 1000 1000

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info"> <div class="info">
<p>During: <span class="l10n datetime">2023-12-18 20:08:43</span> - <span class="l10n datetime">2023-12-18 20:09:30</span></p> <p>During: <span class="l10n datetime">2023-12-26 13:14:12</span> - <span class="l10n datetime">2023-12-26 13:15:15</span></p>
<p>Target Host: <span>http://localhost:9001</span></p> <p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p> <p>Script: <span>locustfile.py</span></p>
</div> </div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/write</td> <td>http://localhost:9001/write</td>
<td>47348</td> <td>70718</td>
<td>0</td> <td>0</td>
<td>344</td> <td>303</td>
<td>5</td> <td>5</td>
<td>1034</td> <td>939</td>
<td>97</td> <td>97</td>
<td>993.0</td> <td>1117.2</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>47348</td> <td>70718</td>
<td>0</td> <td>0</td>
<td>344</td> <td>303</td>
<td>5</td> <td>5</td>
<td>1034</td> <td>939</td>
<td>97</td> <td>97</td>
<td>993.0</td> <td>1117.2</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/write</td> <td>http://localhost:9001/write</td>
<td>300</td>
<td>320</td>
<td>350</td> <td>350</td>
<td>380</td> <td>390</td>
<td>400</td>
<td>430</td> <td>430</td>
<td>480</td> <td>470</td>
<td>520</td> <td>550</td>
<td>660</td> <td>940</td>
<td>1000</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>300</td>
<td>320</td>
<td>350</td> <td>350</td>
<td>380</td> <td>390</td>
<td>400</td>
<td>430</td> <td>430</td>
<td>480</td> <td>470</td>
<td>520</td> <td>550</td>
<td>660</td> <td>940</td>
<td>1000</td>
</tr> </tr>
</tbody> </tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script> <script>
var stats_history = { var stats_history = {
"time": ["20:08:45", "20:08:50", "20:08:55", "20:09:00", "20:09:05", "20:09:10", "20:09:15", "20:09:20", "20:09:25", "20:09:30"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()), "time": ["13:14:16", "13:14:21", "13:14:26", "13:14:31", "13:14:36", "13:14:41", "13:14:46", "13:14:51", "13:14:56", "13:15:01", "13:15:06", "13:15:11"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 100}, {"value": 350}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}], "user_count": [{"value": 250}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}],
"current_rps": [{"users": 100, "value": 0}, {"users": 350, "value": 758.25}, {"users": 400, "value": 883.4}, {"users": 400, "value": 962.3}, {"users": 400, "value": 1018.2}, {"users": 400, "value": 1096.2}, {"users": 400, "value": 1069.3}, {"users": 400, "value": 1076.7}, {"users": 400, "value": 956.9}, {"users": 400, "value": 921.9}], "current_rps": [{"users": 250, "value": 707.5}, {"users": 400, "value": 832.8571428571429}, {"users": 400, "value": 1047.2}, {"users": 400, "value": 1291.9}, {"users": 400, "value": 1333.4}, {"users": 400, "value": 1302.2}, {"users": 400, "value": 1211.4}, {"users": 400, "value": 1040.7}, {"users": 400, "value": 1094.1}, {"users": 400, "value": 1131.6}, {"users": 400, "value": 1027.1}, {"users": 400, "value": 1027.8}],
"current_fail_per_sec": [{"users": 100, "value": 0}, {"users": 350, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}], "current_fail_per_sec": [{"users": 250, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}],
"response_time_percentile_1": [{"users": 100, "value": 130}, {"users": 350, "value": 200}, {"users": 400, "value": 320}, {"users": 400, "value": 360}, {"users": 400, "value": 360}, {"users": 400, "value": 320}, {"users": 400, "value": 320}, {"users": 400, "value": 400}, {"users": 400, "value": 400}, {"users": 400, "value": 360}], "response_time_percentile_1": [{"users": 250, "value": 140}, {"users": 400, "value": 250}, {"users": 400, "value": 280}, {"users": 400, "value": 260}, {"users": 400, "value": 280}, {"users": 400, "value": 280}, {"users": 400, "value": 320}, {"users": 400, "value": 350}, {"users": 400, "value": 320}, {"users": 400, "value": 330}, {"users": 400, "value": 360}, {"users": 400, "value": 380}],
"response_time_percentile_2": [{"users": 100, "value": 170}, {"users": 350, "value": 380}, {"users": 400, "value": 510}, {"users": 400, "value": 520}, {"users": 400, "value": 500}, {"users": 400, "value": 480}, {"users": 400, "value": 550}, {"users": 400, "value": 560}, {"users": 400, "value": 490}, {"users": 400, "value": 480}], "response_time_percentile_2": [{"users": 250, "value": 250}, {"users": 400, "value": 450}, {"users": 400, "value": 430}, {"users": 400, "value": 340}, {"users": 400, "value": 360}, {"users": 400, "value": 380}, {"users": 400, "value": 440}, {"users": 400, "value": 450}, {"users": 400, "value": 470}, {"users": 400, "value": 480}, {"users": 400, "value": 490}, {"users": 400, "value": 500}],
"markers": [], "markers": [],
}; };
var percentile1 = 0.5 var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,70718,0,300,303.8036002149382,5,939,97.84323651686982,1117.209290974752,0.0,300,340,370,390,430,470,510,550,770,930,940
,Aggregated,70718,0,300,303.8036002149382,5,939,97.84323651686982,1117.209290974752,0.0,300,340,370,390,430,470,510,550,770,930,940
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 70718 0 300 303.8036002149382 5 939 97.84323651686982 1117.209290974752 0.0 300 340 370 390 430 470 510 550 770 930 940
3 Aggregated 70718 0 300 303.8036002149382 5 939 97.84323651686982 1117.209290974752 0.0 300 340 370 390 430 470 510 550 770 930 940

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info"> <div class="info">
<p>During: <span class="l10n datetime">2023-12-17 14:25:53</span> - <span class="l10n datetime">2023-12-17 14:27:01</span></p> <p>During: <span class="l10n datetime">2023-12-26 13:12:13</span> - <span class="l10n datetime">2023-12-26 13:13:24</span></p>
<p>Target Host: <span>http://localhost:9001</span></p> <p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p> <p>Script: <span>locustfile.py</span></p>
</div> </div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/pipeline/write</td> <td>http://localhost:9001/pipeline/write</td>
<td>10968</td> <td>70</td>
<td>0</td> <td>0</td>
<td>761</td> <td>9854</td>
<td>7</td> <td>7898</td>
<td>1293</td> <td>12282</td>
<td>9999</td> <td>98842</td>
<td>161.4</td> <td>1.0</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>10968</td> <td>70</td>
<td>0</td> <td>0</td>
<td>761</td> <td>9854</td>
<td>7</td> <td>7898</td>
<td>1293</td> <td>12282</td>
<td>9999</td> <td>98842</td>
<td>161.4</td> <td>1.0</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/pipeline/write</td> <td>http://localhost:9001/pipeline/write</td>
<td>620</td> <td>9700</td>
<td>860</td> <td>10000</td>
<td>1100</td> <td>10000</td>
<td>1100</td> <td>11000</td>
<td>1200</td> <td>12000</td>
<td>1200</td> <td>12000</td>
<td>1200</td> <td>12000</td>
<td>1300</td> <td>12000</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>620</td> <td>9700</td>
<td>860</td> <td>10000</td>
<td>1100</td> <td>10000</td>
<td>1100</td> <td>11000</td>
<td>1200</td> <td>12000</td>
<td>1200</td> <td>12000</td>
<td>1200</td> <td>12000</td>
<td>1300</td> <td>12000</td>
</tr> </tr>
</tbody> </tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script> <script>
var stats_history = { var stats_history = {
"time": ["14:25:56", "14:26:01", "14:26:06", "14:26:11", "14:26:16", "14:26:21", "14:26:26", "14:26:31", "14:26:36", "14:26:41", "14:26:46", "14:26:51", "14:26:56", "14:27:01"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()), "time": ["13:12:14", "13:12:19", "13:12:24", "13:12:29", "13:12:34", "13:12:39", "13:12:44", "13:12:49", "13:12:54", "13:12:59", "13:13:04", "13:13:09", "13:13:14", "13:13:19"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 3}, {"value": 36}, {"value": 86}, {"value": 100}, {"value": 100}, {"value": 100}, {"value": 100}, {"value": 110}, {"value": 160}, {"value": 200}, {"value": 200}, {"value": 200}, {"value": 200}, {"value": 200}], "user_count": [{"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}],
"current_rps": [{"users": 3, "value": 8.0}, {"users": 36, "value": 48.0}, {"users": 86, "value": 109.9}, {"users": 100, "value": 164.0}, {"users": 100, "value": 172.1}, {"users": 100, "value": 172.8}, {"users": 100, "value": 170.5}, {"users": 110, "value": 169.9}, {"users": 160, "value": 168.3}, {"users": 200, "value": 172.3}, {"users": 200, "value": 169.9}, {"users": 200, "value": 172.4}, {"users": 200, "value": 171.7}, {"users": 200, "value": 170.5}], "current_rps": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 1.0}, {"users": 10, "value": 0.2}, {"users": 10, "value": 0.2}, {"users": 10, "value": 1.1}, {"users": 10, "value": 0.8}, {"users": 10, "value": 1.1}, {"users": 10, "value": 1.0}],
"current_fail_per_sec": [{"users": 3, "value": 0}, {"users": 36, "value": 0}, {"users": 86, "value": 0}, {"users": 100, "value": 0}, {"users": 100, "value": 0}, {"users": 100, "value": 0}, {"users": 100, "value": 0}, {"users": 110, "value": 0}, {"users": 160, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}], "current_fail_per_sec": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}],
"response_time_percentile_1": [{"users": 3, "value": 57}, {"users": 36, "value": 120}, {"users": 86, "value": 250}, {"users": 100, "value": 520}, {"users": 100, "value": 580}, {"users": 100, "value": 580}, {"users": 100, "value": 580}, {"users": 110, "value": 590}, {"users": 160, "value": 620}, {"users": 200, "value": 870}, {"users": 200, "value": 1100}, {"users": 200, "value": 1100}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}], "response_time_percentile_1": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 8800}, {"users": 10, "value": 8900}, {"users": 10, "value": 9700}, {"users": 10, "value": 9700}, {"users": 10, "value": 10000}, {"users": 10, "value": 10000}, {"users": 10, "value": 12000}, {"users": 10, "value": 12000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 9900}, {"users": 10, "value": 9400}],
"response_time_percentile_2": [{"users": 3, "value": 62}, {"users": 36, "value": 220}, {"users": 86, "value": 480}, {"users": 100, "value": 610}, {"users": 100, "value": 620}, {"users": 100, "value": 620}, {"users": 100, "value": 670}, {"users": 110, "value": 670}, {"users": 160, "value": 860}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}, {"users": 200, "value": 1200}], "response_time_percentile_2": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 9800}, {"users": 10, "value": 9800}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 12000}, {"users": 10, "value": 12000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 11000}, {"users": 10, "value": 10000}],
"markers": [], "markers": [],
}; };
var percentile1 = 0.5 var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,70,0,9700,9854.628571428571,7898,12282,98842.68571428572,0.9866591262400619,0.0,9700,10000,11000,11000,12000,12000,12000,12000,12000,12000,12000
,Aggregated,70,0,9700,9854.628571428571,7898,12282,98842.68571428572,0.9866591262400619,0.0,9700,10000,11000,11000,12000,12000,12000,12000,12000,12000,12000
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 70 0 9700 9854.628571428571 7898 12282 98842.68571428572 0.9866591262400619 0.0 9700 10000 11000 11000 12000 12000 12000 12000 12000 12000 12000
3 Aggregated 70 0 9700 9854.628571428571 7898 12282 98842.68571428572 0.9866591262400619 0.0 9700 10000 11000 11000 12000 12000 12000 12000 12000 12000 12000

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,10968,0,620,761.5634573304158,7,1293,9999.697757111597,161.3685696386145,0.0,620,1100,1100,1100,1200,1200,1200,1200,1300,1300,1300
,Aggregated,10968,0,620,761.5634573304158,7,1293,9999.697757111597,161.3685696386145,0.0,620,1100,1100,1100,1200,1200,1200,1200,1300,1300,1300
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 10968 0 620 761.5634573304158 7 1293 9999.697757111597 161.3685696386145 0.0 620 1100 1100 1100 1200 1200 1200 1200 1300 1300 1300
3 Aggregated 10968 0 620 761.5634573304158 7 1293 9999.697757111597 161.3685696386145 0.0 620 1100 1100 1100 1200 1200 1200 1200 1300 1300 1300

View file

@ -1,3 +0,0 @@
Design:
Actix Web and reqwests with request pipelining

View file

@ -1,3 +0,0 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,1141,0,720,681.7598597721297,69,858,100027.22261174409,16.07784382812381,0.0,720,740,750,760,780,790,810,820,840,860,860
,Aggregated,1141,0,720,681.7598597721297,69,858,100027.22261174409,16.07784382812381,0.0,720,740,750,760,780,790,810,820,840,860,860
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 1141 0 720 681.7598597721297 69 858 100027.22261174409 16.07784382812381 0.0 720 740 750 760 780 790 810 820 840 860 860
3 Aggregated 1141 0 720 681.7598597721297 69 858 100027.22261174409 16.07784382812381 0.0 720 740 750 760 780 790 810 820 840 860 860

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info"> <div class="info">
<p>During: <span class="l10n datetime">2023-12-17 14:30:25</span> - <span class="l10n datetime">2023-12-17 14:32:43</span></p> <p>During: <span class="l10n datetime">2023-12-26 13:03:21</span> - <span class="l10n datetime">2023-12-26 13:03:59</span></p>
<p>Target Host: <span>http://localhost:9001</span></p> <p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p> <p>Script: <span>locustfile.py</span></p>
</div> </div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/write</td> <td>http://localhost:9001/write</td>
<td>348265</td> <td>148740</td>
<td>0</td> <td>0</td>
<td>133</td> <td>7</td>
<td>0</td> <td>0</td>
<td>544</td> <td>485</td>
<td>98</td> <td>98</td>
<td>2517.2</td> <td>3904.8</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>348265</td> <td>148740</td>
<td>0</td> <td>0</td>
<td>133</td> <td>7</td>
<td>0</td> <td>0</td>
<td>544</td> <td>485</td>
<td>98</td> <td>98</td>
<td>2517.2</td> <td>3904.8</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/write</td> <td>http://localhost:9001/write</td>
<td>130</td> <td>6</td>
<td>150</td> <td>7</td>
<td>160</td> <td>9</td>
<td>190</td> <td>10</td>
<td>310</td> <td>12</td>
<td>350</td> <td>15</td>
<td>390</td> <td>25</td>
<td>540</td> <td>480</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>130</td> <td>6</td>
<td>150</td> <td>7</td>
<td>160</td> <td>9</td>
<td>190</td> <td>10</td>
<td>310</td> <td>12</td>
<td>350</td> <td>15</td>
<td>390</td> <td>25</td>
<td>540</td> <td>480</td>
</tr> </tr>
</tbody> </tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script> <script>
var stats_history = { var stats_history = {
"time": ["14:30:27", "14:30:32", "14:30:37", "14:30:42", "14:30:47", "14:30:52", "14:30:57", "14:31:02", "14:31:07", "14:31:12", "14:31:17", "14:31:22", "14:31:27", "14:31:32", "14:31:37", "14:31:42", "14:31:47", "14:31:52", "14:31:57", "14:32:02", "14:32:07", "14:32:12", "14:32:17", "14:32:22", "14:32:27", "14:32:32", "14:32:37", "14:32:42"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()), "time": ["13:03:24", "13:03:29", "13:03:34", "13:03:39", "13:03:44", "13:03:49", "13:03:54"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 20}, {"value": 70}, {"value": 120}, {"value": 170}, {"value": 220}, {"value": 270}, {"value": 320}, {"value": 200}, {"value": 200}, {"value": 450}, {"value": 500}, {"value": 500}, {"value": 500}, {"value": 500}, {"value": 500}, {"value": 450}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 550}, {"value": 800}, {"value": 1000}, {"value": 1000}, {"value": 1000}, {"value": 1000}], "user_count": [{"value": 200}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}, {"value": 400}],
"current_rps": [{"users": 20, "value": 0}, {"users": 70, "value": 2461.4}, {"users": 120, "value": 2658.2}, {"users": 170, "value": 2756.2}, {"users": 220, "value": 2621.4}, {"users": 270, "value": 2564.6}, {"users": 320, "value": 2504.7}, {"users": 200, "value": 2470.7}, {"users": 200, "value": 2534.8}, {"users": 450, "value": 2588.5}, {"users": 500, "value": 2507.5}, {"users": 500, "value": 2472.9}, {"users": 500, "value": 2518.8}, {"users": 500, "value": 2491.6}, {"users": 500, "value": 2455.1}, {"users": 450, "value": 2479.9}, {"users": 400, "value": 2495.8}, {"users": 400, "value": 2513.2}, {"users": 400, "value": 2533.0}, {"users": 400, "value": 2482.6}, {"users": 400, "value": 2492.1}, {"users": 400, "value": 2496.6}, {"users": 550, "value": 2471.3}, {"users": 800, "value": 2440.5}, {"users": 1000, "value": 2363.0}, {"users": 1000, "value": 2438.9}, {"users": 1000, "value": 2390.1}, {"users": 1000, "value": 2401.2}], "current_rps": [{"users": 200, "value": 1874.0}, {"users": 400, "value": 3968.5}, {"users": 400, "value": 4127.4}, {"users": 400, "value": 3895.7}, {"users": 400, "value": 3895.4}, {"users": 400, "value": 3811.2}, {"users": 400, "value": 3801.2}],
"current_fail_per_sec": [{"users": 20, "value": 0}, {"users": 70, "value": 0}, {"users": 120, "value": 0}, {"users": 170, "value": 0}, {"users": 220, "value": 0}, {"users": 270, "value": 0}, {"users": 320, "value": 0}, {"users": 200, "value": 0}, {"users": 200, "value": 0}, {"users": 450, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 500, "value": 0}, {"users": 450, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 550, "value": 0}, {"users": 800, "value": 0}, {"users": 1000, "value": 0}, {"users": 1000, "value": 0}, {"users": 1000, "value": 0}, {"users": 1000, "value": 0}], "current_fail_per_sec": [{"users": 200, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}, {"users": 400, "value": 0}],
"response_time_percentile_1": [{"users": 20, "value": 4}, {"users": 70, "value": 11}, {"users": 120, "value": 20}, {"users": 170, "value": 40}, {"users": 220, "value": 58}, {"users": 270, "value": 75}, {"users": 320, "value": 93}, {"users": 200, "value": 94}, {"users": 200, "value": 70}, {"users": 450, "value": 74}, {"users": 500, "value": 140}, {"users": 500, "value": 170}, {"users": 500, "value": 170}, {"users": 500, "value": 170}, {"users": 500, "value": 170}, {"users": 450, "value": 170}, {"users": 400, "value": 150}, {"users": 400, "value": 140}, {"users": 400, "value": 140}, {"users": 400, "value": 140}, {"users": 400, "value": 130}, {"users": 400, "value": 140}, {"users": 550, "value": 140}, {"users": 800, "value": 170}, {"users": 1000, "value": 260}, {"users": 1000, "value": 330}, {"users": 1000, "value": 340}, {"users": 1000, "value": 340}], "response_time_percentile_1": [{"users": 200, "value": 5}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}, {"users": 400, "value": 6}],
"response_time_percentile_2": [{"users": 20, "value": 9}, {"users": 70, "value": 23}, {"users": 120, "value": 42}, {"users": 170, "value": 62}, {"users": 220, "value": 82}, {"users": 270, "value": 100}, {"users": 320, "value": 120}, {"users": 200, "value": 130}, {"users": 200, "value": 110}, {"users": 450, "value": 140}, {"users": 500, "value": 200}, {"users": 500, "value": 210}, {"users": 500, "value": 200}, {"users": 500, "value": 210}, {"users": 500, "value": 210}, {"users": 450, "value": 200}, {"users": 400, "value": 200}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 400, "value": 170}, {"users": 550, "value": 180}, {"users": 800, "value": 280}, {"users": 1000, "value": 360}, {"users": 1000, "value": 390}, {"users": 1000, "value": 400}, {"users": 1000, "value": 390}], "response_time_percentile_2": [{"users": 200, "value": 11}, {"users": 400, "value": 14}, {"users": 400, "value": 15}, {"users": 400, "value": 15}, {"users": 400, "value": 16}, {"users": 400, "value": 16}, {"users": 400, "value": 15}],
"markers": [], "markers": [],
}; };
var percentile1 = 0.5 var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/write,148740,0,6,7.030960064542154,0,485,98.25319349199947,3904.7975720558998,0.0,6,8,9,10,12,15,20,25,170,380,480
,Aggregated,148740,0,6,7.030960064542154,0,485,98.25319349199947,3904.7975720558998,0.0,6,8,9,10,12,15,20,25,170,380,480
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/write 148740 0 6 7.030960064542154 0 485 98.25319349199947 3904.7975720558998 0.0 6 8 9 10 12 15 20 25 170 380 480
3 Aggregated 148740 0 6 7.030960064542154 0 485 98.25319349199947 3904.7975720558998 0.0 6 8 9 10 12 15 20 25 170 380 480

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View file

@ -120,7 +120,7 @@ table.stats tr.total td {
<div class="info"> <div class="info">
<p>During: <span class="l10n datetime">2023-12-17 14:19:00</span> - <span class="l10n datetime">2023-12-17 14:20:11</span></p> <p>During: <span class="l10n datetime">2023-12-26 13:09:24</span> - <span class="l10n datetime">2023-12-26 13:10:06</span></p>
<p>Target Host: <span>http://localhost:9001</span></p> <p>Target Host: <span>http://localhost:9001</span></p>
<p>Script: <span>locustfile.py</span></p> <p>Script: <span>locustfile.py</span></p>
</div> </div>
@ -147,26 +147,26 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/pipeline/write</td> <td>http://localhost:9001/pipeline/write</td>
<td>1141</td> <td>673</td>
<td>0</td> <td>0</td>
<td>681</td> <td>625</td>
<td>69</td> <td>448</td>
<td>858</td> <td>734</td>
<td>100027</td> <td>99835</td>
<td>16.1</td> <td>15.8</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>1141</td> <td>673</td>
<td>0</td> <td>0</td>
<td>681</td> <td>625</td>
<td>69</td> <td>448</td>
<td>858</td> <td>734</td>
<td>100027</td> <td>99835</td>
<td>16.1</td> <td>15.8</td>
<td>0.0</td> <td>0.0</td>
</tr> </tr>
@ -196,27 +196,27 @@ table.stats tr.total td {
<tr> <tr>
<td>POST</td> <td>POST</td>
<td>http://localhost:9001/pipeline/write</td> <td>http://localhost:9001/pipeline/write</td>
<td>620</td>
<td>630</td>
<td>640</td>
<td>650</td>
<td>680</td>
<td>700</td>
<td>720</td> <td>720</td>
<td>730</td> <td>730</td>
<td>740</td>
<td>760</td>
<td>780</td>
<td>790</td>
<td>820</td>
<td>860</td>
</tr> </tr>
<tr class="total"> <tr class="total">
<td></td> <td></td>
<td>Aggregated</td> <td>Aggregated</td>
<td>620</td>
<td>630</td>
<td>640</td>
<td>650</td>
<td>680</td>
<td>700</td>
<td>720</td> <td>720</td>
<td>730</td> <td>730</td>
<td>740</td>
<td>760</td>
<td>780</td>
<td>790</td>
<td>820</td>
<td>860</td>
</tr> </tr>
</tbody> </tbody>
@ -517,12 +517,12 @@ function fillTasksFromRequest() {
<script> <script>
var stats_history = { var stats_history = {
"time": ["14:19:01", "14:19:06", "14:19:11", "14:19:16", "14:19:21", "14:19:26", "14:19:31", "14:19:36", "14:19:41", "14:19:46", "14:19:51", "14:19:56", "14:20:01", "14:20:06"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()), "time": ["13:09:26", "13:09:31", "13:09:36", "13:09:41", "13:09:46", "13:09:51", "13:09:56", "13:10:01", "13:10:06"].map(server_time => new Date(new Date().setUTCHours(...(server_time.split(":")))).toLocaleTimeString()),
"user_count": [{"value": 2}, {"value": 7}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}, {"value": 12}], "user_count": [{"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}, {"value": 10}],
"current_rps": [{"users": 2, "value": 0}, {"users": 7, "value": 9.25}, {"users": 12, "value": 12.88888888888889}, {"users": 12, "value": 16.1}, {"users": 12, "value": 16.2}, {"users": 12, "value": 16.6}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.1}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.5}, {"users": 12, "value": 16.5}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.4}, {"users": 12, "value": 16.3}], "current_rps": [{"users": 10, "value": 0}, {"users": 10, "value": 14.2}, {"users": 10, "value": 15.6}, {"users": 10, "value": 16.1}, {"users": 10, "value": 15.7}, {"users": 10, "value": 16.2}, {"users": 10, "value": 15.8}, {"users": 10, "value": 15.3}, {"users": 10, "value": 15.6}],
"current_fail_per_sec": [{"users": 2, "value": 0}, {"users": 7, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}, {"users": 12, "value": 0}], "current_fail_per_sec": [{"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}, {"users": 10, "value": 0}],
"response_time_percentile_1": [{"users": 2, "value": 0}, {"users": 7, "value": 260}, {"users": 12, "value": 400}, {"users": 12, "value": 670}, {"users": 12, "value": 710}, {"users": 12, "value": 700}, {"users": 12, "value": 740}, {"users": 12, "value": 740}, {"users": 12, "value": 730}, {"users": 12, "value": 720}, {"users": 12, "value": 730}, {"users": 12, "value": 730}, {"users": 12, "value": 720}, {"users": 12, "value": 730}], "response_time_percentile_1": [{"users": 10, "value": 630}, {"users": 10, "value": 600}, {"users": 10, "value": 600}, {"users": 10, "value": 620}, {"users": 10, "value": 640}, {"users": 10, "value": 640}, {"users": 10, "value": 640}, {"users": 10, "value": 640}, {"users": 10, "value": 630}],
"response_time_percentile_2": [{"users": 2, "value": 0}, {"users": 7, "value": 380}, {"users": 12, "value": 640}, {"users": 12, "value": 770}, {"users": 12, "value": 800}, {"users": 12, "value": 800}, {"users": 12, "value": 800}, {"users": 12, "value": 790}, {"users": 12, "value": 780}, {"users": 12, "value": 770}, {"users": 12, "value": 820}, {"users": 12, "value": 830}, {"users": 12, "value": 790}, {"users": 12, "value": 790}], "response_time_percentile_2": [{"users": 10, "value": 650}, {"users": 10, "value": 640}, {"users": 10, "value": 660}, {"users": 10, "value": 700}, {"users": 10, "value": 700}, {"users": 10, "value": 680}, {"users": 10, "value": 720}, {"users": 10, "value": 720}, {"users": 10, "value": 700}],
"markers": [], "markers": [],
}; };
var percentile1 = 0.5 var percentile1 = 0.5

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
POST,http://localhost:9001/pipeline/write,673,0,620,625.5542347696879,448,734,99835.94056463595,15.80574909851346,0.0,620,640,650,650,680,700,720,720,730,730,730
,Aggregated,673,0,620,625.5542347696879,448,734,99835.94056463595,15.80574909851346,0.0,620,640,650,650,680,700,720,720,730,730,730
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 POST http://localhost:9001/pipeline/write 673 0 620 625.5542347696879 448 734 99835.94056463595 15.80574909851346 0.0 620 640 650 650 680 700 720 720 730 730 730
3 Aggregated 673 0 620 625.5542347696879 448 734 99835.94056463595 15.80574909851346 0.0 620 640 650 650 680 700 720 720 730 730 730

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View file

@ -0,0 +1 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -0,0 +1 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/PipelineDcacheOps,3480,0,98,104.35343347919283,85.40578499378171,842.1087349997833,14999.985632183909,95.67244900465325,0.0,98,99,100,100,100,110,120,360,840,840,840
,Aggregated,3480,0,98,104.35343347919283,85.40578499378171,842.1087349997833,14999.985632183909,95.67244900465325,0.0,98,99,100,100,100,110,120,360,840,840,840
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/PipelineDcacheOps 3480 0 98 104.35343347919283 85.40578499378171 842.1087349997833 14999.985632183909 95.67244900465325 0.0 98 99 100 100 100 110 120 360 840 840 840
3 Aggregated 3480 0 98 104.35343347919283 85.40578499378171 842.1087349997833 14999.985632183909 95.67244900465325 0.0 98 99 100 100 100 110 120 360 840 840 840

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

View file

@ -0,0 +1 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -0,0 +1 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/AddVisitor,186109,0,79,74.60541254397303,3.7561320059467107,119.94536400015932,10.999731340236098,4816.33283284295,0.0,79,83,86,89,93,97,100,110,120,120,120
,Aggregated,186109,0,79,74.60541254397303,3.7561320059467107,119.94536400015932,10.999731340236098,4816.33283284295,0.0,79,83,86,89,93,97,100,110,120,120,120
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/AddVisitor 186109 0 79 74.60541254397303 3.7561320059467107 119.94536400015932 10.999731340236098 4816.33283284295 0.0 79 83 86 89 93 97 100 110 120 120 120
3 Aggregated 186109 0 79 74.60541254397303 3.7561320059467107 119.94536400015932 10.999731340236098 4816.33283284295 0.0 79 83 86 89 93 97 100 110 120 120 120

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

View file

@ -0,0 +1,7 @@
#!/bin/bash
ghz --insecure --proto ./proto/dcache/dcache.proto --call dcache.DcacheService.Write \
-c 300 -n 30000 --rps 4000 -O html -o out.html \
-d '{"data":"{\"AddVisitor\": \"test_1\"}"}' \
localhost:9001
# -d '{"data":"{\"AddCaptcha\":{\"id\":\"test_1\",\"mcaptcha\":{\"defense\":{\"current_visitor_threshold\":0,\"levels\":[{\"difficulty_factor\":500,\"visitor_threshold\":50},{\"difficulty_factor\":50000,\"visitor_threshold\":5000}]},\"duration\":30,\"visitor_threshold\":0}}}"}' \

View file

@ -0,0 +1 @@
10 messages per batch request

View file

@ -0,0 +1 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -0,0 +1 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/PipelineDcacheOps,41383,0,140.0,99.16818701259079,5.581609002547339,182.89305199868977,40.0,650.896214047811,0.0,140,150,150,150,160,160,160,170,180,180,180
,Aggregated,41383,0,140.0,99.16818701259079,5.581609002547339,182.89305199868977,40.0,650.896214047811,0.0,140,150,150,150,160,160,160,170,180,180,180
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/PipelineDcacheOps 41383 0 140.0 99.16818701259079 5.581609002547339 182.89305199868977 40.0 650.896214047811 0.0 140 150 150 150 160 160 160 170 180 180 180
3 Aggregated 41383 0 140.0 99.16818701259079 5.581609002547339 182.89305199868977 40.0 650.896214047811 0.0 140 150 150 150 160 160 160 170 180 180 180

View file

@ -0,0 +1 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -0,0 +1 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/Write,96465,0,600.0,530.5241670541676,3.931416000114041,2860.153126999876,130.11822940963043,732.274667601832,0.0,600,720,830,880,1100,1200,1300,1500,2300,2900,2900
,Aggregated,96465,0,600.0,530.5241670541676,3.931416000114041,2860.153126999876,130.11822940963043,732.274667601832,0.0,600,720,830,880,1100,1200,1300,1500,2300,2900,2900
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/Write 96465 0 600.0 530.5241670541676 3.931416000114041 2860.153126999876 130.11822940963043 732.274667601832 0.0 600 720 830 880 1100 1200 1300 1500 2300 2900 2900
3 Aggregated 96465 0 600.0 530.5241670541676 3.931416000114041 2860.153126999876 130.11822940963043 732.274667601832 0.0 600 720 830 880 1100 1200 1300 1500 2300 2900 2900

View file

@ -0,0 +1 @@
Count,Message,Traceback,Nodes
1 Count Message Traceback Nodes

View file

@ -0,0 +1 @@
Method,Name,Error,Occurrences
1 Method Name Error Occurrences

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,3 @@
Type,Name,Request Count,Failure Count,Median Response Time,Average Response Time,Min Response Time,Max Response Time,Average Content Size,Requests/s,Failures/s,50%,66%,75%,80%,90%,95%,98%,99%,99.9%,99.99%,100%
grpc,/dcache.DcacheService/AddVisitor,358924,0,79,77.86313645947614,3.354386999944836,123.28810700000759,0.0,4842.970815301002,0.0,79,84,86,88,92,96,100,100,110,120,120
,Aggregated,358924,0,79,77.86313645947614,3.354386999944836,123.28810700000759,0.0,4842.970815301002,0.0,79,84,86,88,92,96,100,100,110,120,120
1 Type Name Request Count Failure Count Median Response Time Average Response Time Min Response Time Max Response Time Average Content Size Requests/s Failures/s 50% 66% 75% 80% 90% 95% 98% 99% 99.9% 99.99% 100%
2 grpc /dcache.DcacheService/AddVisitor 358924 0 79 77.86313645947614 3.354386999944836 123.28810700000759 0.0 4842.970815301002 0.0 79 84 86 88 92 96 100 100 110 120 120
3 Aggregated 358924 0 79 77.86313645947614 3.354386999944836 123.28810700000759 0.0 4842.970815301002 0.0 79 84 86 88 92 96 100 100 110 120 120

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 674 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 1.2 MiB

10
build.rs Normal file
View file

@ -0,0 +1,10 @@
fn main() -> Result<(), Box<dyn std::error::Error>> {
// tonic_build::configure()
// .out_dir("protoout")
// .compile(
// &["proto/dcache/dcache.proto"],
// &["proto/dcache"],
// )?;
tonic_build::compile_protos("proto/dcache/dcache.proto")?;
Ok(())
}

9
check.sh Executable file
View file

@ -0,0 +1,9 @@
#!/bin/bash
protoc \
--proto_path=${PWD}/proto \
--proto_path=${PWD}/bufbuild \
--go_out=${PWD} \
--go-grpc_out=${PWD} \
--validate_out="lang=rust:${PWD}" \
${PWD}/proto/dcache/dcache.proto

80
dcache_py/dcache_pb2.py Normal file

File diff suppressed because one or more lines are too long

218
dcache_py/dcache_pb2.pyi Normal file
View file

@ -0,0 +1,218 @@
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
DESCRIPTOR: _descriptor.FileDescriptor
class Level(_message.Message):
__slots__ = ("visitor_threshold", "difficulty_factor")
VISITOR_THRESHOLD_FIELD_NUMBER: _ClassVar[int]
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
visitor_threshold: int
difficulty_factor: int
def __init__(self, visitor_threshold: _Optional[int] = ..., difficulty_factor: _Optional[int] = ...) -> None: ...
class Defense(_message.Message):
__slots__ = ("levels",)
LEVELS_FIELD_NUMBER: _ClassVar[int]
levels: _containers.RepeatedCompositeFieldContainer[Level]
def __init__(self, levels: _Optional[_Iterable[_Union[Level, _Mapping]]] = ...) -> None: ...
class MCaptcha(_message.Message):
__slots__ = ("duration", "defense")
DURATION_FIELD_NUMBER: _ClassVar[int]
DEFENSE_FIELD_NUMBER: _ClassVar[int]
duration: int
defense: Defense
def __init__(self, duration: _Optional[int] = ..., defense: _Optional[_Union[Defense, _Mapping]] = ...) -> None: ...
class AddCaptchaRequest(_message.Message):
__slots__ = ("id", "mcaptcha")
ID_FIELD_NUMBER: _ClassVar[int]
MCAPTCHA_FIELD_NUMBER: _ClassVar[int]
id: str
mcaptcha: MCaptcha
def __init__(self, id: _Optional[str] = ..., mcaptcha: _Optional[_Union[MCaptcha, _Mapping]] = ...) -> None: ...
class RenameCaptchaRequest(_message.Message):
__slots__ = ("name", "rename_to")
NAME_FIELD_NUMBER: _ClassVar[int]
RENAME_TO_FIELD_NUMBER: _ClassVar[int]
name: str
rename_to: str
def __init__(self, name: _Optional[str] = ..., rename_to: _Optional[str] = ...) -> None: ...
class CachePowRequest(_message.Message):
__slots__ = ("string", "difficulty_factor", "duration", "key")
STRING_FIELD_NUMBER: _ClassVar[int]
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
DURATION_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
string: str
difficulty_factor: int
duration: int
key: str
def __init__(self, string: _Optional[str] = ..., difficulty_factor: _Optional[int] = ..., duration: _Optional[int] = ..., key: _Optional[str] = ...) -> None: ...
class CacheResultRequest(_message.Message):
__slots__ = ("token", "key", "duration")
TOKEN_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
DURATION_FIELD_NUMBER: _ClassVar[int]
token: str
key: str
duration: int
def __init__(self, token: _Optional[str] = ..., key: _Optional[str] = ..., duration: _Optional[int] = ...) -> None: ...
class DeleteCaptchaResultRequest(_message.Message):
__slots__ = ("token",)
TOKEN_FIELD_NUMBER: _ClassVar[int]
token: str
def __init__(self, token: _Optional[str] = ...) -> None: ...
class CaptchaID(_message.Message):
__slots__ = ("id",)
ID_FIELD_NUMBER: _ClassVar[int]
id: str
def __init__(self, id: _Optional[str] = ...) -> None: ...
class PoID(_message.Message):
__slots__ = ("id",)
ID_FIELD_NUMBER: _ClassVar[int]
id: str
def __init__(self, id: _Optional[str] = ...) -> None: ...
class AddVisitorResult(_message.Message):
__slots__ = ("duration", "difficulty_factor")
DURATION_FIELD_NUMBER: _ClassVar[int]
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
duration: int
difficulty_factor: int
def __init__(self, duration: _Optional[int] = ..., difficulty_factor: _Optional[int] = ...) -> None: ...
class OptionAddVisitorResult(_message.Message):
__slots__ = ("result",)
RESULT_FIELD_NUMBER: _ClassVar[int]
result: AddVisitorResult
def __init__(self, result: _Optional[_Union[AddVisitorResult, _Mapping]] = ...) -> None: ...
class RaftRequest(_message.Message):
__slots__ = ("data",)
DATA_FIELD_NUMBER: _ClassVar[int]
data: str
def __init__(self, data: _Optional[str] = ...) -> None: ...
class RaftReply(_message.Message):
__slots__ = ("data", "error")
DATA_FIELD_NUMBER: _ClassVar[int]
ERROR_FIELD_NUMBER: _ClassVar[int]
data: str
error: str
def __init__(self, data: _Optional[str] = ..., error: _Optional[str] = ...) -> None: ...
class Learner(_message.Message):
__slots__ = ("id", "addr")
ID_FIELD_NUMBER: _ClassVar[int]
ADDR_FIELD_NUMBER: _ClassVar[int]
id: int
addr: str
def __init__(self, id: _Optional[int] = ..., addr: _Optional[str] = ...) -> None: ...
class CaptchaExistsResponse(_message.Message):
__slots__ = ("exists",)
EXISTS_FIELD_NUMBER: _ClassVar[int]
exists: bool
def __init__(self, exists: bool = ...) -> None: ...
class GetVisitorCountResponse(_message.Message):
__slots__ = ("visitors",)
VISITORS_FIELD_NUMBER: _ClassVar[int]
visitors: int
def __init__(self, visitors: _Optional[int] = ...) -> None: ...
class OptionGetVisitorCountResponse(_message.Message):
__slots__ = ("result",)
RESULT_FIELD_NUMBER: _ClassVar[int]
result: GetVisitorCountResponse
def __init__(self, result: _Optional[_Union[GetVisitorCountResponse, _Mapping]] = ...) -> None: ...
class DcacheRequest(_message.Message):
__slots__ = ("addCaptcha", "addVisitor", "renameCaptcha", "removeCaptcha", "cachePow", "cacheResult", "captchaExists", "getVisitorCount")
ADDCAPTCHA_FIELD_NUMBER: _ClassVar[int]
ADDVISITOR_FIELD_NUMBER: _ClassVar[int]
RENAMECAPTCHA_FIELD_NUMBER: _ClassVar[int]
REMOVECAPTCHA_FIELD_NUMBER: _ClassVar[int]
CACHEPOW_FIELD_NUMBER: _ClassVar[int]
CACHERESULT_FIELD_NUMBER: _ClassVar[int]
CAPTCHAEXISTS_FIELD_NUMBER: _ClassVar[int]
GETVISITORCOUNT_FIELD_NUMBER: _ClassVar[int]
addCaptcha: AddCaptchaRequest
addVisitor: CaptchaID
renameCaptcha: RenameCaptchaRequest
removeCaptcha: CaptchaID
cachePow: CachePowRequest
cacheResult: CacheResultRequest
captchaExists: CaptchaID
getVisitorCount: CaptchaID
def __init__(self, addCaptcha: _Optional[_Union[AddCaptchaRequest, _Mapping]] = ..., addVisitor: _Optional[_Union[CaptchaID, _Mapping]] = ..., renameCaptcha: _Optional[_Union[RenameCaptchaRequest, _Mapping]] = ..., removeCaptcha: _Optional[_Union[CaptchaID, _Mapping]] = ..., cachePow: _Optional[_Union[CachePowRequest, _Mapping]] = ..., cacheResult: _Optional[_Union[CacheResultRequest, _Mapping]] = ..., captchaExists: _Optional[_Union[CaptchaID, _Mapping]] = ..., getVisitorCount: _Optional[_Union[CaptchaID, _Mapping]] = ...) -> None: ...
class DcacheResponse(_message.Message):
__slots__ = ("option_add_visitor_result", "other", "captcha_exists", "get_visitor_count")
OPTION_ADD_VISITOR_RESULT_FIELD_NUMBER: _ClassVar[int]
OTHER_FIELD_NUMBER: _ClassVar[int]
CAPTCHA_EXISTS_FIELD_NUMBER: _ClassVar[int]
GET_VISITOR_COUNT_FIELD_NUMBER: _ClassVar[int]
option_add_visitor_result: OptionAddVisitorResult
other: RaftReply
captcha_exists: CaptchaExistsResponse
get_visitor_count: OptionGetVisitorCountResponse
def __init__(self, option_add_visitor_result: _Optional[_Union[OptionAddVisitorResult, _Mapping]] = ..., other: _Optional[_Union[RaftReply, _Mapping]] = ..., captcha_exists: _Optional[_Union[CaptchaExistsResponse, _Mapping]] = ..., get_visitor_count: _Optional[_Union[OptionGetVisitorCountResponse, _Mapping]] = ...) -> None: ...
class DcacheBatchRequest(_message.Message):
__slots__ = ("requests",)
REQUESTS_FIELD_NUMBER: _ClassVar[int]
requests: _containers.RepeatedCompositeFieldContainer[DcacheRequest]
def __init__(self, requests: _Optional[_Iterable[_Union[DcacheRequest, _Mapping]]] = ...) -> None: ...
class DcacheBatchResponse(_message.Message):
__slots__ = ("responses",)
RESPONSES_FIELD_NUMBER: _ClassVar[int]
responses: _containers.RepeatedCompositeFieldContainer[DcacheResponse]
def __init__(self, responses: _Optional[_Iterable[_Union[DcacheResponse, _Mapping]]] = ...) -> None: ...
class RetrievePowRequest(_message.Message):
__slots__ = ("token", "key")
TOKEN_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
token: str
key: str
def __init__(self, token: _Optional[str] = ..., key: _Optional[str] = ...) -> None: ...
class RetrievePowResponse(_message.Message):
__slots__ = ("difficulty_factor", "duration", "key")
DIFFICULTY_FACTOR_FIELD_NUMBER: _ClassVar[int]
DURATION_FIELD_NUMBER: _ClassVar[int]
KEY_FIELD_NUMBER: _ClassVar[int]
difficulty_factor: int
duration: int
key: str
def __init__(self, difficulty_factor: _Optional[int] = ..., duration: _Optional[int] = ..., key: _Optional[str] = ...) -> None: ...
class CaptchaResultVerified(_message.Message):
__slots__ = ("verified",)
VERIFIED_FIELD_NUMBER: _ClassVar[int]
verified: bool
def __init__(self, verified: bool = ...) -> None: ...
class DeletePowRequest(_message.Message):
__slots__ = ("string",)
STRING_FIELD_NUMBER: _ClassVar[int]
string: str
def __init__(self, string: _Optional[str] = ...) -> None: ...
class OptionalRetrievePoWResponse(_message.Message):
__slots__ = ("result",)
RESULT_FIELD_NUMBER: _ClassVar[int]
result: RetrievePowResponse
def __init__(self, result: _Optional[_Union[RetrievePowResponse, _Mapping]] = ...) -> None: ...

View file

@ -0,0 +1,663 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import dcache_py.dcache_pb2 as dcache__pb2
class DcacheServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.AddCaptcha = channel.unary_unary(
'/dcache.DcacheService/AddCaptcha',
request_serializer=dcache__pb2.AddCaptchaRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.AddVisitor = channel.unary_unary(
'/dcache.DcacheService/AddVisitor',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.OptionAddVisitorResult.FromString,
)
self.RenameCaptcha = channel.unary_unary(
'/dcache.DcacheService/RenameCaptcha',
request_serializer=dcache__pb2.RenameCaptchaRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.RemoveCaptcha = channel.unary_unary(
'/dcache.DcacheService/RemoveCaptcha',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.CachePow = channel.unary_unary(
'/dcache.DcacheService/CachePow',
request_serializer=dcache__pb2.CachePowRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.RetrievePow = channel.unary_unary(
'/dcache.DcacheService/RetrievePow',
request_serializer=dcache__pb2.RetrievePowRequest.SerializeToString,
response_deserializer=dcache__pb2.OptionalRetrievePoWResponse.FromString,
)
self.DeletePow = channel.unary_unary(
'/dcache.DcacheService/DeletePow',
request_serializer=dcache__pb2.DeletePowRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.CacheResult = channel.unary_unary(
'/dcache.DcacheService/CacheResult',
request_serializer=dcache__pb2.CacheResultRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.VerifyCaptchaResult = channel.unary_unary(
'/dcache.DcacheService/VerifyCaptchaResult',
request_serializer=dcache__pb2.RetrievePowRequest.SerializeToString,
response_deserializer=dcache__pb2.CaptchaResultVerified.FromString,
)
self.DeleteCaptchaResult = channel.unary_unary(
'/dcache.DcacheService/DeleteCaptchaResult',
request_serializer=dcache__pb2.DeleteCaptchaResultRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.CaptchaExists = channel.unary_unary(
'/dcache.DcacheService/CaptchaExists',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.CaptchaExistsResponse.FromString,
)
self.GetVisitorCount = channel.unary_unary(
'/dcache.DcacheService/GetVisitorCount',
request_serializer=dcache__pb2.CaptchaID.SerializeToString,
response_deserializer=dcache__pb2.OptionGetVisitorCountResponse.FromString,
)
self.PipelineDcacheOps = channel.unary_unary(
'/dcache.DcacheService/PipelineDcacheOps',
request_serializer=dcache__pb2.DcacheBatchRequest.SerializeToString,
response_deserializer=dcache__pb2.DcacheBatchResponse.FromString,
)
self.AddLearner = channel.unary_unary(
'/dcache.DcacheService/AddLearner',
request_serializer=dcache__pb2.Learner.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.Write = channel.unary_unary(
'/dcache.DcacheService/Write',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.Forward = channel.unary_unary(
'/dcache.DcacheService/Forward',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.AppendEntries = channel.unary_unary(
'/dcache.DcacheService/AppendEntries',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.InstallSnapshot = channel.unary_unary(
'/dcache.DcacheService/InstallSnapshot',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
self.vote = channel.unary_unary(
'/dcache.DcacheService/vote',
request_serializer=dcache__pb2.RaftRequest.SerializeToString,
response_deserializer=dcache__pb2.RaftReply.FromString,
)
class DcacheServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def AddCaptcha(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddVisitor(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RenameCaptcha(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RemoveCaptcha(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CachePow(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RetrievePow(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeletePow(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CacheResult(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def VerifyCaptchaResult(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteCaptchaResult(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CaptchaExists(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetVisitorCount(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PipelineDcacheOps(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddLearner(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Write(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Forward(self, request, context):
"""/ Forward a request to other
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AppendEntries(self, request, context):
"""raft RPC
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InstallSnapshot(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def vote(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DcacheServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'AddCaptcha': grpc.unary_unary_rpc_method_handler(
servicer.AddCaptcha,
request_deserializer=dcache__pb2.AddCaptchaRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'AddVisitor': grpc.unary_unary_rpc_method_handler(
servicer.AddVisitor,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.OptionAddVisitorResult.SerializeToString,
),
'RenameCaptcha': grpc.unary_unary_rpc_method_handler(
servicer.RenameCaptcha,
request_deserializer=dcache__pb2.RenameCaptchaRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'RemoveCaptcha': grpc.unary_unary_rpc_method_handler(
servicer.RemoveCaptcha,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'CachePow': grpc.unary_unary_rpc_method_handler(
servicer.CachePow,
request_deserializer=dcache__pb2.CachePowRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'RetrievePow': grpc.unary_unary_rpc_method_handler(
servicer.RetrievePow,
request_deserializer=dcache__pb2.RetrievePowRequest.FromString,
response_serializer=dcache__pb2.OptionalRetrievePoWResponse.SerializeToString,
),
'DeletePow': grpc.unary_unary_rpc_method_handler(
servicer.DeletePow,
request_deserializer=dcache__pb2.DeletePowRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'CacheResult': grpc.unary_unary_rpc_method_handler(
servicer.CacheResult,
request_deserializer=dcache__pb2.CacheResultRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'VerifyCaptchaResult': grpc.unary_unary_rpc_method_handler(
servicer.VerifyCaptchaResult,
request_deserializer=dcache__pb2.RetrievePowRequest.FromString,
response_serializer=dcache__pb2.CaptchaResultVerified.SerializeToString,
),
'DeleteCaptchaResult': grpc.unary_unary_rpc_method_handler(
servicer.DeleteCaptchaResult,
request_deserializer=dcache__pb2.DeleteCaptchaResultRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'CaptchaExists': grpc.unary_unary_rpc_method_handler(
servicer.CaptchaExists,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.CaptchaExistsResponse.SerializeToString,
),
'GetVisitorCount': grpc.unary_unary_rpc_method_handler(
servicer.GetVisitorCount,
request_deserializer=dcache__pb2.CaptchaID.FromString,
response_serializer=dcache__pb2.OptionGetVisitorCountResponse.SerializeToString,
),
'PipelineDcacheOps': grpc.unary_unary_rpc_method_handler(
servicer.PipelineDcacheOps,
request_deserializer=dcache__pb2.DcacheBatchRequest.FromString,
response_serializer=dcache__pb2.DcacheBatchResponse.SerializeToString,
),
'AddLearner': grpc.unary_unary_rpc_method_handler(
servicer.AddLearner,
request_deserializer=dcache__pb2.Learner.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'Write': grpc.unary_unary_rpc_method_handler(
servicer.Write,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'Forward': grpc.unary_unary_rpc_method_handler(
servicer.Forward,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'AppendEntries': grpc.unary_unary_rpc_method_handler(
servicer.AppendEntries,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'InstallSnapshot': grpc.unary_unary_rpc_method_handler(
servicer.InstallSnapshot,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
'vote': grpc.unary_unary_rpc_method_handler(
servicer.vote,
request_deserializer=dcache__pb2.RaftRequest.FromString,
response_serializer=dcache__pb2.RaftReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'dcache.DcacheService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class DcacheService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def AddCaptcha(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AddCaptcha',
dcache__pb2.AddCaptchaRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AddVisitor(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AddVisitor',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.OptionAddVisitorResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RenameCaptcha(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/RenameCaptcha',
dcache__pb2.RenameCaptchaRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RemoveCaptcha(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/RemoveCaptcha',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CachePow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/CachePow',
dcache__pb2.CachePowRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RetrievePow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/RetrievePow',
dcache__pb2.RetrievePowRequest.SerializeToString,
dcache__pb2.OptionalRetrievePoWResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeletePow(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/DeletePow',
dcache__pb2.DeletePowRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CacheResult(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/CacheResult',
dcache__pb2.CacheResultRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def VerifyCaptchaResult(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/VerifyCaptchaResult',
dcache__pb2.RetrievePowRequest.SerializeToString,
dcache__pb2.CaptchaResultVerified.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteCaptchaResult(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/DeleteCaptchaResult',
dcache__pb2.DeleteCaptchaResultRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CaptchaExists(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/CaptchaExists',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.CaptchaExistsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetVisitorCount(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/GetVisitorCount',
dcache__pb2.CaptchaID.SerializeToString,
dcache__pb2.OptionGetVisitorCountResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def PipelineDcacheOps(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/PipelineDcacheOps',
dcache__pb2.DcacheBatchRequest.SerializeToString,
dcache__pb2.DcacheBatchResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AddLearner(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AddLearner',
dcache__pb2.Learner.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Write(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/Write',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Forward(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/Forward',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AppendEntries(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/AppendEntries',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InstallSnapshot(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/InstallSnapshot',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def vote(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dcache.DcacheService/vote',
dcache__pb2.RaftRequest.SerializeToString,
dcache__pb2.RaftReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

8
launch.sh Executable file
View file

@ -0,0 +1,8 @@
#!/bin/bash
nohup ./target/release/main --id 1 --http-addr 127.0.0.1:9001 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
sleep 1
nohup ./target/release/main --id 2 --http-addr 127.0.0.1:9002 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
sleep 1
nohup ./target/release/main --id 3 --http-addr 127.0.0.1:9003 --introducer-addr 127.0.0.1:9001 --introducer-id 1 --cluster-size 3 &
read -p "Continue? (Y/N): " confirm && killall main

177
proto/dcache/dcache.proto Normal file
View file

@ -0,0 +1,177 @@
syntax = "proto3";
package dcache;
message Level {
uint32 visitor_threshold = 301;
uint32 difficulty_factor= 302;
}
message Defense {
repeated Level levels = 401;
}
message MCaptcha {
uint64 duration = 502;
Defense defense = 503;
}
message AddCaptchaRequest {
string id = 601;
MCaptcha mcaptcha = 602;
}
message RenameCaptchaRequest {
string name = 701;
string rename_to = 702;
}
message CachePowRequest {
string string= 801;
uint32 difficulty_factor = 802;
uint64 duration = 803;
string key = 804;
}
message CacheResultRequest {
string token = 817;
string key = 818;
uint64 duration= 819;
}
message DeleteCaptchaResultRequest {
string token = 821;
}
message CaptchaID{
string id = 1;
}
message PoID{
string id = 1;
}
message AddVisitorResult {
uint64 duration = 901;
uint32 difficulty_factor = 902;
}
message OptionAddVisitorResult {
optional AddVisitorResult result = 911;
}
message RaftRequest {
string data = 1;
}
message RaftReply {
string data = 1;
string error = 2;
}
message Learner {
uint64 id = 1;
string addr = 2;
}
message CaptchaExistsResponse {
bool exists = 1;
}
message GetVisitorCountResponse {
uint32 visitors = 1;
}
message OptionGetVisitorCountResponse {
optional GetVisitorCountResponse result = 1;
}
message DcacheRequest {
oneof DcacheRequest {
AddCaptchaRequest addCaptcha = 1;
CaptchaID addVisitor = 2;
RenameCaptchaRequest renameCaptcha = 3;
CaptchaID removeCaptcha = 4;
CachePowRequest cachePow = 5;
CacheResultRequest cacheResult = 6;
CaptchaID captchaExists = 7;
CaptchaID getVisitorCount = 8;
}
}
message DcacheResponse {
oneof DcacheResponse {
OptionAddVisitorResult option_add_visitor_result = 1;
RaftReply other = 2;
CaptchaExistsResponse captcha_exists = 3;
OptionGetVisitorCountResponse get_visitor_count = 4;
}
}
message DcacheBatchRequest {
repeated DcacheRequest requests = 1;
}
message DcacheBatchResponse {
repeated DcacheResponse responses = 1;
}
message RetrievePowRequest {
string token = 1;
string key = 2;
}
message RetrievePowResponse {
uint32 difficulty_factor = 1;
uint64 duration = 2;
string key = 3;
}
message CaptchaResultVerified {
bool verified = 1;
}
message DeletePowRequest {
string string = 1;
}
message OptionalRetrievePoWResponse {
optional RetrievePowResponse result = 1;
}
service DcacheService {
rpc AddCaptcha(AddCaptchaRequest) returns (RaftReply) {}
rpc AddVisitor(CaptchaID) returns (OptionAddVisitorResult) {}
rpc RenameCaptcha(RenameCaptchaRequest) returns (RaftReply) {}
rpc RemoveCaptcha(CaptchaID) returns (RaftReply) {}
rpc CachePow(CachePowRequest) returns (RaftReply) {}
rpc RetrievePow(RetrievePowRequest) returns (OptionalRetrievePoWResponse) {}
rpc DeletePow(DeletePowRequest) returns (RaftReply) {}
rpc CacheResult(CacheResultRequest) returns (RaftReply) {}
rpc VerifyCaptchaResult(RetrievePowRequest) returns (CaptchaResultVerified) {}
rpc DeleteCaptchaResult(DeleteCaptchaResultRequest) returns (RaftReply) {}
rpc CaptchaExists(CaptchaID) returns (CaptchaExistsResponse) {}
rpc GetVisitorCount(CaptchaID) returns (OptionGetVisitorCountResponse) {}
rpc PipelineDcacheOps(DcacheBatchRequest) returns (DcacheBatchResponse) {}
rpc AddLearner(Learner) returns (RaftReply) {}
rpc Write(RaftRequest) returns (RaftReply) {}
/// Forward a request to other
rpc Forward(RaftRequest) returns (RaftReply) {}
// raft RPC
rpc AppendEntries(RaftRequest) returns (RaftReply);
rpc InstallSnapshot(RaftRequest) returns (RaftReply);
rpc vote(RaftRequest) returns (RaftReply);
}

21
renovate.json Normal file
View file

@ -0,0 +1,21 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended",
":dependencyDashboard"
],
"labels": [
"renovate-bot"
],
"prHourlyLimit": 0,
"timezone": "Asia/kolkata",
"prCreation": "immediate",
"vulnerabilityAlerts": {
"enabled": true,
"labels": [
"renovate-bot",
"renovate-security",
"security"
]
}
}

32
requirements.txt Normal file
View file

@ -0,0 +1,32 @@
asyncio==3.4.3
blinker==1.8.2
Brotli==1.1.0
certifi==2024.8.30
charset-normalizer==3.3.2
click==8.1.7
ConfigArgParse==1.7
Flask==3.0.3
Flask-BasicAuth==0.2.0
Flask-Cors==5.0.0
gevent==24.2.1
geventhttpclient==2.3.1
greenlet==3.1.1
grpc-interceptor==0.15.4
grpcio==1.66.1
grpcio-tools==1.60.0
idna==3.10
itsdangerous==2.2.0
Jinja2==3.1.4
locust==2.31.6
MarkupSafe==2.1.5
msgpack==1.1.0
protobuf==4.25.5
psutil==6.0.0
pyzmq==26.2.0
requests==2.32.3
roundrobin==0.0.4
six==1.16.0
urllib3==2.2.3
Werkzeug==3.0.4
zope.event==5.0
zope.interface==7.0.3

View file

@ -17,7 +17,6 @@
*/ */
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use openraft::error::RaftError; use openraft::error::RaftError;
use openraft::BasicNode; use openraft::BasicNode;

View file

@ -16,11 +16,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
use clap::Parser; use clap::Parser;
use dcache::network::raft_network_impl::DcacheNetwork;
use dcache::start_example_raft_node; use dcache::start_example_raft_node;
use dcache::store::DcacheStore;
use dcache::DcacheTypeConfig;
use openraft::Raft;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
//pub type DcacheRaft = Raft<DcacheTypeConfig, DcacheNetwork, DcacheStore>; //pub type DcacheRaft = Raft<DcacheTypeConfig, DcacheNetwork, DcacheStore>;
@ -44,7 +40,7 @@ pub struct Opt {
pub cluster_size: usize, pub cluster_size: usize,
} }
#[actix_web::main] #[actix_rt::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
// Setup the logger // Setup the logger
tracing_subscriber::fmt() tracing_subscriber::fmt()

View file

@ -20,27 +20,26 @@
use std::io::Cursor; use std::io::Cursor;
use std::sync::Arc; use std::sync::Arc;
use actix_web::middleware;
use actix_web::middleware::Logger;
use actix_web::web::Data;
use actix_web::App;
use actix_web::HttpServer;
use openraft::storage::Adaptor; use openraft::storage::Adaptor;
use openraft::BasicNode; use openraft::BasicNode;
use openraft::Config; use openraft::Config;
use openraft::Raft; use openraft::Raft;
use tonic::transport::Server;
use crate::app::DcacheApp; use crate::app::DcacheApp;
use crate::network::api;
use crate::network::management;
use crate::network::raft;
use crate::network::raft_network_impl::DcacheNetwork; use crate::network::raft_network_impl::DcacheNetwork;
use crate::protobuf::dcache::dcache_service_client::DcacheServiceClient;
use crate::protobuf::dcache::dcache_service_server::DcacheServiceServer;
use crate::protobuf::dcache::Learner;
use crate::store::DcacheRequest; use crate::store::DcacheRequest;
use crate::store::DcacheResponse; use crate::store::DcacheResponse;
use crate::store::DcacheStore; use crate::store::DcacheStore;
pub mod app; pub mod app;
mod mcaptcha;
pub mod network; pub mod network;
mod pool;
mod protobuf;
pub mod store; pub mod store;
pub type DcacheNodeId = u64; pub type DcacheNodeId = u64;
@ -98,7 +97,6 @@ pub async fn start_example_raft_node(
let store = Arc::new(DcacheStore::new(salt)); let store = Arc::new(DcacheStore::new(salt));
let (log_store, state_machine) = Adaptor::new(store.clone()); let (log_store, state_machine) = Adaptor::new(store.clone());
let client = reqwest::Client::new();
// Create the network layer that will connect and communicate the raft instances and // Create the network layer that will connect and communicate the raft instances and
// will be used in conjunction with the store created above. // will be used in conjunction with the store created above.
@ -106,7 +104,7 @@ pub async fn start_example_raft_node(
let (manager_tx, manager_rx) = tokio::sync::mpsc::channel(1000); let (manager_tx, manager_rx) = tokio::sync::mpsc::channel(1000);
// let health = Arc::new(crate::network::raft_network_impl::HealthLedger::new(manager_tx)); // let health = Arc::new(crate::network::raft_network_impl::HealthLedger::new(manager_tx));
// let network = Arc::new(DcacheNetwork::new(health)); // let network = Arc::new(DcacheNetwork::new(health));
let network = Arc::new(DcacheNetwork::new(manager_tx, client.clone())); let network = Arc::new(DcacheNetwork::new(manager_tx));
// Create a local raft instance. // Create a local raft instance.
let raft = Raft::new( let raft = Raft::new(
@ -120,68 +118,50 @@ pub async fn start_example_raft_node(
.unwrap(); .unwrap();
raft.enable_heartbeat(true); raft.enable_heartbeat(true);
raft.enable_elect(true); raft.enable_elect(true);
// raft.enable_tick(true); // raft.enable_tick(true);
// Create an application that will store all the instances created above, this will // Create an application that will store all the instances created above, this will
// be later used on the actix-web services. // be later used on the actix-web services.
let app = Data::new(DcacheApp { let app = DcacheApp {
id: node_id, id: node_id,
addr: http_addr.clone(), addr: http_addr.clone(),
raft, raft,
store, store,
config, config,
network, network,
}); };
let app = Arc::new(app);
let dcache_service = protobuf::MyDcacheImpl::new(app.clone());
if introducer_addr == http_addr { if introducer_addr == http_addr {
app.init().await.unwrap(); app.init().await.unwrap();
} }
let app_copy = app.clone(); let app_copy = app.clone();
// Start the actix-web server.
let server = HttpServer::new(move || {
App::new()
.wrap(Logger::default())
.wrap(Logger::new("%a %{User-Agent}i"))
.wrap(middleware::Compress::default())
.app_data(app.clone())
// raft internal RPC
.service(raft::append)
.service(raft::snapshot)
.service(raft::vote)
// admin API
.service(management::init)
.service(management::add_learner)
.service(management::change_membership)
.service(management::metrics)
// application API
.service(api::write)
.service(api::state)
.service(api::read)
.service(api::pipeline_read)
.service(api::pipeline_write)
.service(api::ws_write)
// .service(api::consistent_read)
});
let x = server.bind(&http_addr)?; let svc = DcacheServiceServer::new(dcache_service);
let x = Server::builder()
.add_service(svc)
.serve(http_addr.clone().parse().unwrap());
let server_fut = tokio::spawn(x);
let server_fut = tokio::spawn(x.run());
tokio::time::sleep(std::time::Duration::new(3, 0)).await; tokio::time::sleep(std::time::Duration::new(3, 0)).await;
let req: (DcacheNodeId, String) = (node_id, http_addr); let url = format!("http://{}", introducer_addr);
let c = reqwest::Client::new(); let mut client = DcacheServiceClient::connect(url).await.unwrap();
c.post(format!("http://{}/add-learner", introducer_addr)) client
.json(&req) .add_learner(Learner {
.send() id: node_id,
addr: http_addr,
})
.await .await
.unwrap(); .unwrap();
// let health_job = tokio::spawn(DcacheApp::health_job(app_copy));
let health_metrics_handle = let health_metrics_handle =
crate::network::management::HealthMetrics::spawn(app_copy, 5, manager_rx).await; crate::network::management::HealthMetrics::spawn(app_copy, 5, manager_rx).await;
server_fut.await??; server_fut.await?.unwrap();
health_metrics_handle.abort(); health_metrics_handle.abort();
// health_job.abort();
Ok(()) Ok(())
} }

330
src/mcaptcha/cache.rs Normal file
View file

@ -0,0 +1,330 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2021 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
//! In-memory cache implementation that uses [HashMap]
use std::sync::Arc;
use std::time::Duration;
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
use libmcaptcha::cache::messages::*;
use libmcaptcha::errors::*;
#[derive(Clone, Default, Serialize, Deserialize)]
/// cache datastructure implementing [Save]
pub struct HashCache {
difficulty_map: Arc<DashMap<String, CachedPoWConfig>>,
result_map: Arc<DashMap<String, (String, u64)>>,
}
impl HashCache {
// save [PoWConfig] to cache
fn save_pow_config(&self, config: CachePoW) -> CaptchaResult<()> {
let challenge = config.string;
let config: CachedPoWConfig = CachedPoWConfig {
key: config.key,
difficulty_factor: config.difficulty_factor,
duration: config.duration,
};
if self.difficulty_map.get(&challenge).is_none() {
self.difficulty_map.insert(challenge, config);
Ok(())
} else {
Err(CaptchaError::InvalidPoW)
}
}
pub async fn clean_all_after_cold_start(&self, updated: HashCache) {
updated.difficulty_map.iter().for_each(|x| {
self.difficulty_map
.insert(x.key().to_owned(), x.value().to_owned());
});
updated.result_map.iter().for_each(|x| {
self.result_map
.insert(x.key().to_owned(), x.value().to_owned());
});
let cache = self.clone();
let fut = async move {
for values in cache.result_map.iter() {
let inner_cache = cache.clone();
let duration = values.value().1;
let key = values.key().to_owned();
let inner_fut = async move {
tokio::time::sleep(Duration::new(duration, 0)).await;
inner_cache.remove_cache_result(&key);
};
tokio::spawn(inner_fut);
}
for values in cache.difficulty_map.iter() {
let inner_cache = cache.clone();
let duration = values.value().duration;
let key = values.key().to_owned();
let inner_fut = async move {
tokio::time::sleep(Duration::new(duration, 0)).await;
inner_cache.remove_pow_config(&key);
};
tokio::spawn(inner_fut);
}
};
tokio::spawn(fut);
}
// retrieve [PoWConfig] from cache. Deletes config post retrival
pub fn retrieve_pow_config(&self, msg: VerifyCaptchaResult) -> Option<CachedPoWConfig> {
if let Some(difficulty_factor) = self.remove_pow_config(&msg.token) {
Some(difficulty_factor)
} else {
None
}
}
// delete [PoWConfig] from cache
pub fn remove_pow_config(&self, string: &str) -> Option<CachedPoWConfig> {
self.difficulty_map.remove(string).map(|x| x.1)
}
// save captcha result
fn save_captcha_result(&self, res: CacheResult) {
self.result_map.insert(res.token, (res.key, res.duration));
}
// verify captcha result
pub fn verify_captcha_result(&self, challenge: VerifyCaptchaResult) -> bool {
if let Some(captcha_id) = self.remove_cache_result(&challenge.token) {
if captcha_id == challenge.key {
true
} else {
false
}
} else {
false
}
}
// delete cache result
pub fn remove_cache_result(&self, string: &str) -> Option<String> {
self.result_map.remove(string).map(|x| x.1 .0)
}
pub fn cache_pow(&self, msg: CachePoW) {
use std::time::Duration;
use tokio::time::sleep;
let duration: Duration = Duration::new(msg.duration, 0);
let string = msg.string.clone();
let cache = self.clone();
let wait_for = async move {
sleep(duration).await;
//delay_for(duration).await;
cache.remove_pow_config(&string);
};
let _ = self.save_pow_config(msg);
tokio::spawn(wait_for);
}
/// cache PoW result
pub fn cache_result(&self, msg: CacheResult) {
use std::time::Duration;
use tokio::time::sleep;
let token = msg.token.clone();
msg.token.clone();
msg.token.clone();
msg.token.clone();
let duration: Duration = Duration::new(msg.duration, 0);
let cache = self.clone();
let wait_for = async move {
sleep(duration).await;
//delay_for(duration).await;
cache.remove_cache_result(&token);
};
tokio::spawn(wait_for);
let _ = self.save_captcha_result(msg);
}
}
#[cfg(test)]
mod tests {
use super::*;
use libmcaptcha::master::AddVisitorResult;
use libmcaptcha::pow::PoWConfig;
use std::time::Duration;
#[actix_rt::test]
async fn merge_works() {
const DIFFICULTY_FACTOR: u32 = 54;
const RES: &str = "b";
const DURATION: u64 = 5;
const KEY: &str = "mcaptchakey";
let pow: PoWConfig = PoWConfig::new(DIFFICULTY_FACTOR, KEY.into()); //salt is dummy here
let cache = HashCache::default();
let new_cache = HashCache::default();
let visitor_result = AddVisitorResult {
difficulty_factor: DIFFICULTY_FACTOR,
duration: DURATION,
};
let string = pow.string.clone();
let msg = CachePoWBuilder::default()
.string(pow.string.clone())
.difficulty_factor(DIFFICULTY_FACTOR)
.duration(visitor_result.duration)
.key(KEY.into())
.build()
.unwrap();
cache.cache_pow(msg);
let add_cache = CacheResult {
key: KEY.into(),
token: RES.into(),
duration: DURATION,
};
cache.cache_result(add_cache.clone());
new_cache.clean_all_after_cold_start(cache.clone()).await;
let msg = VerifyCaptchaResult {
token: string.clone(),
key: KEY.into(),
};
let cache_difficulty_factor = cache.retrieve_pow_config(msg.clone()).unwrap();
let new_cache_difficulty_factor = new_cache.retrieve_pow_config(msg.clone()).unwrap();
assert_eq!(DIFFICULTY_FACTOR, cache_difficulty_factor.difficulty_factor);
assert_eq!(
DIFFICULTY_FACTOR,
new_cache_difficulty_factor.difficulty_factor
);
let verify_msg = VerifyCaptchaResult {
key: KEY.into(),
token: RES.into(),
};
assert!(new_cache.verify_captcha_result(verify_msg.clone()));
assert!(!new_cache.verify_captcha_result(verify_msg.clone()));
let duration: Duration = Duration::new(5, 0);
//sleep(DURATION + DURATION).await;
tokio::time::sleep(duration + duration).await;
let expired_string = cache.retrieve_pow_config(msg.clone());
assert_eq!(None, expired_string);
let expired_string = new_cache.retrieve_pow_config(msg);
assert_eq!(None, expired_string);
cache.cache_result(add_cache);
new_cache.clean_all_after_cold_start(cache.clone()).await;
tokio::time::sleep(duration + duration).await;
assert!(!new_cache.verify_captcha_result(verify_msg.clone()));
assert!(!cache.verify_captcha_result(verify_msg));
}
#[actix_rt::test]
async fn hashcache_pow_cache_works() {
const DIFFICULTY_FACTOR: u32 = 54;
const DURATION: u64 = 5;
const KEY: &str = "mcaptchakey";
let cache = HashCache::default();
let pow: PoWConfig = PoWConfig::new(DIFFICULTY_FACTOR, KEY.into()); //salt is dummy here
let visitor_result = AddVisitorResult {
difficulty_factor: DIFFICULTY_FACTOR,
duration: DURATION,
};
let string = pow.string.clone();
let msg = CachePoWBuilder::default()
.string(pow.string.clone())
.difficulty_factor(DIFFICULTY_FACTOR)
.duration(visitor_result.duration)
.key(KEY.into())
.build()
.unwrap();
cache.cache_pow(msg);
let msg = VerifyCaptchaResult {
token: string.clone(),
key: KEY.into(),
};
let cache_difficulty_factor = cache.retrieve_pow_config(msg.clone()).unwrap();
assert_eq!(DIFFICULTY_FACTOR, cache_difficulty_factor.difficulty_factor);
let duration: Duration = Duration::new(5, 0);
//sleep(DURATION + DURATION).await;
tokio::time::sleep(duration + duration).await;
let expired_string = cache.retrieve_pow_config(msg);
assert_eq!(None, expired_string);
}
#[actix_rt::test]
async fn hashcache_result_cache_works() {
const DURATION: u64 = 5;
const KEY: &str = "a";
const RES: &str = "b";
let cache = HashCache::default();
// send value to cache
// send another value to cache for auto delete
// verify_captcha_result
// delete
// wait for timeout and verify_captcha_result against second value
let add_cache = CacheResult {
key: KEY.into(),
token: RES.into(),
duration: DURATION,
};
cache.cache_result(add_cache);
let verify_msg = VerifyCaptchaResult {
key: KEY.into(),
token: RES.into(),
};
assert!(cache.verify_captcha_result(verify_msg.clone()));
// duplicate
assert!(!cache.verify_captcha_result(verify_msg));
let verify_msg = VerifyCaptchaResult {
key: "cz".into(),
token: RES.into(),
};
assert!(!cache.verify_captcha_result(verify_msg));
let duration: Duration = Duration::new(5, 0);
tokio::time::sleep(duration + duration).await;
let verify_msg = VerifyCaptchaResult {
key: KEY.into(),
token: RES.into(),
};
assert!(!cache.verify_captcha_result(verify_msg));
}
}

398
src/mcaptcha/defense.rs Normal file
View file

@ -0,0 +1,398 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2021 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use serde::{Deserialize, Serialize};
use libmcaptcha::defense::Level;
use libmcaptcha::errors::*;
//
///// Level struct that describes threshold-difficulty factor mapping
//#[derive(Debug, Deserialize, Serialize, Copy, Clone, PartialEq)]
//pub struct Level {
// pub visitor_threshold: u32,
// pub difficulty_factor: u32,
//}
//
///// Bulder struct for [Level] to describe threshold-difficulty factor mapping
//#[derive(Debug, Copy, Clone, PartialEq)]
//pub struct LevelBuilder {
// visitor_threshold: Option<u32>,
// difficulty_factor: Option<u32>,
//}
//
//impl Default for LevelBuilder {
// fn default() -> Self {
// LevelBuilder {
// visitor_threshold: None,
// difficulty_factor: None,
// }
// }
//}
//
//impl LevelBuilder {
// /// set visitor count for level
// pub fn visitor_threshold(&mut self, visitor_threshold: u32) -> &mut Self {
// self.visitor_threshold = Some(visitor_threshold);
// self
// }
//
// /// set difficulty factor for level. difficulty_factor can't be zero because
// /// Difficulty is calculated as:
// /// ```no_run
// /// let difficulty_factor = 500;
// /// let difficulty = u128::max_value() - u128::max_value() / difficulty_factor;
// /// ```
// /// the higher the `difficulty_factor`, the higher the difficulty.
// pub fn difficulty_factor(&mut self, difficulty_factor: u32) -> CaptchaResult<&mut Self> {
// if difficulty_factor > 0 {
// self.difficulty_factor = Some(difficulty_factor);
// Ok(self)
// } else {
// Err(CaptchaError::DifficultyFactorZero)
// }
// }
//
// /// build Level struct
// pub fn build(&mut self) -> CaptchaResult<Level> {
// if self.visitor_threshold.is_none() {
// Err(CaptchaError::SetVisitorThreshold)
// } else if self.difficulty_factor.is_none() {
// Err(CaptchaError::SetDifficultyFactor)
// } else {
// Ok(Level {
// difficulty_factor: self.difficulty_factor.unwrap(),
// visitor_threshold: self.visitor_threshold.unwrap(),
// })
// }
// }
//}
//
/// Builder struct for [Defense]
#[derive(Debug, Clone, PartialEq)]
pub struct DefenseBuilder {
levels: Vec<Level>,
}
impl Default for DefenseBuilder {
fn default() -> Self {
DefenseBuilder { levels: vec![] }
}
}
impl DefenseBuilder {
/// add a level to [Defense]
pub fn add_level(&mut self, level: Level) -> CaptchaResult<&mut Self> {
for i in self.levels.iter() {
if i.visitor_threshold == level.visitor_threshold {
return Err(CaptchaError::DuplicateVisitorCount);
}
}
self.levels.push(level);
Ok(self)
}
/// Build [Defense]
pub fn build(&mut self) -> CaptchaResult<Defense> {
if !self.levels.is_empty() {
// sort levels to arrange in ascending order
self.levels.sort_by_key(|a| a.visitor_threshold);
for level in self.levels.iter() {
if level.difficulty_factor == 0 {
return Err(CaptchaError::DifficultyFactorZero);
}
}
// as visitor count increases, difficulty_factor too should increse
// if it decreses, an error must be thrown
for i in 0..self.levels.len() - 1 {
if self.levels[i].difficulty_factor > self.levels[i + 1].difficulty_factor {
return Err(CaptchaError::DecreaseingDifficultyFactor);
}
}
Ok(Defense {
levels: self.levels.to_owned(),
})
} else {
Err(CaptchaError::LevelEmpty)
}
}
}
/// struct describes all the different [Level]s at which an mCaptcha system operates
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
pub struct Defense {
levels: Vec<Level>,
// index of current visitor threshold
}
impl From<Defense> for Vec<Level> {
fn from(d: Defense) -> Self {
d.levels
}
}
impl Defense {
///! Difficulty is calculated as:
///! ```rust
///! let difficulty = u128::max_value() - u128::max_value() / difficulty_factor;
///! ```
///! The higher the `difficulty_factor`, the higher the difficulty.
// /// Get difficulty factor of current level of defense
// pub fn get_difficulty(&self, current_visitor_threshold: usize) -> u32 {
// self.levels[current_visitor_threshold].difficulty_factor
// }
//
// /// tighten up defense. Increases defense level by a factor of one.
// /// When defense is at max level, calling this method will have no effect
// pub fn tighten_up(&mut self) {
// if self.current_visitor_threshold < self.levels.len() - 1 {
// self.current_visitor_threshold += 1;
// }
// }
// /// Loosen up defense. Decreases defense level by a factor of one.
// /// When defense is at the lowest level, calling this method will have no effect.
// pub fn loosen_up(&mut self) {
// if self.current_visitor_threshold > 0 {
// self.current_visitor_threshold -= 1;
// }
// }
//
// /// Set defense to maximum level
// pub fn max_defense(&mut self) {
// self.current_visitor_threshold = self.levels.len() - 1;
// }
//
// /// Set defense to minimum level
// pub fn min_defense(&mut self) {
// self.current_visitor_threshold = 0;
// }
//
pub fn get_levels(&self) -> Vec<Level> {
self.levels.clone()
}
/// Get current level's visitor threshold
pub fn current_level(&self, current_visitor_level: u32) -> &Level {
for level in self.levels.iter() {
if current_visitor_level <= level.visitor_threshold {
return level;
}
}
self.levels.last().as_ref().unwrap()
// &self.levels[self.current_visitor_threshold]
}
//
// /// Get current level's visitor threshold
// pub fn visitor_threshold(&self) -> u32 {
// self.levels[self.current_visitor_threshold].difficulty_factor
// }
}
#[cfg(test)]
mod tests {
use super::*;
use libmcaptcha::defense::Level;
use libmcaptcha::LevelBuilder;
#[test]
fn defense_builder_duplicate_visitor_threshold() {
let mut defense_builder = DefenseBuilder::default();
let err = defense_builder
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
);
assert_eq!(err, Err(CaptchaError::DuplicateVisitorCount));
}
#[test]
fn defense_builder_decreasing_difficulty_factor() {
let mut defense_builder = DefenseBuilder::default();
let err = defense_builder
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(500)
.difficulty_factor(10)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build();
assert_eq!(err, Err(CaptchaError::DecreaseingDifficultyFactor));
}
#[test]
fn checking_for_integer_overflow() {
let mut defense = DefenseBuilder::default()
.add_level(
LevelBuilder::default()
.visitor_threshold(5)
.difficulty_factor(5)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(10)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(20)
.difficulty_factor(60)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(30)
.difficulty_factor(65)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build()
.unwrap();
// for _ in 0..500 {
// defense.tighten_up();
// }
//
// defense.get_difficulty();
// for _ in 0..500000 {
// defense.tighten_up();
// }
//
defense.current_level(10_000_000);
}
fn get_defense() -> Defense {
DefenseBuilder::default()
.add_level(
LevelBuilder::default()
.visitor_threshold(50)
.difficulty_factor(50)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(500)
.difficulty_factor(5000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(5000)
.difficulty_factor(50000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(50000)
.difficulty_factor(500000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(500000)
.difficulty_factor(5000000)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build()
.unwrap()
}
#[test]
fn defense_builder_works() {
let defense = get_defense();
assert_eq!(defense.levels[0].difficulty_factor, 50);
assert_eq!(defense.levels[1].difficulty_factor, 5000);
assert_eq!(defense.levels[2].difficulty_factor, 50_000);
assert_eq!(defense.levels[3].difficulty_factor, 500_000);
assert_eq!(defense.levels[4].difficulty_factor, 5_000_000);
}
#[test]
fn tighten_up_works() {
let defense = get_defense();
assert_eq!(defense.current_level(0).difficulty_factor, 50);
assert_eq!(defense.current_level(500).difficulty_factor, 5_000);
assert_eq!(defense.current_level(501).difficulty_factor, 50_000);
assert_eq!(defense.current_level(5_000).difficulty_factor, 50_000);
assert_eq!(defense.current_level(5_001).difficulty_factor, 500_000);
assert_eq!(defense.current_level(50_000).difficulty_factor, 500_000);
assert_eq!(defense.current_level(50_001).difficulty_factor, 5_000_000);
assert_eq!(defense.current_level(500_000).difficulty_factor, 5_000_000);
assert_eq!(defense.current_level(500_001).difficulty_factor, 5_000_000);
}
}

595
src/mcaptcha/mcaptcha.rs Normal file
View file

@ -0,0 +1,595 @@
/* mCaptcha - A proof of work based DoS protection system
* Copyright © 2021 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use std::sync::atomic::{AtomicU32, Ordering};
use std::sync::Arc;
use std::time::Duration;
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
use super::defense::Defense;
use libmcaptcha::errors::*;
use libmcaptcha::master::messages as ManagerMessages;
/// Builder for [MCaptcha]
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct MCaptchaBuilder {
visitor_threshold: u32,
defense: Option<Defense>,
duration: Option<u64>,
}
impl Default for MCaptchaBuilder {
fn default() -> Self {
MCaptchaBuilder {
visitor_threshold: 0,
defense: None,
duration: None,
}
}
}
impl MCaptchaBuilder {
/// set defense
pub fn defense(&mut self, d: Defense) -> &mut Self {
self.defense = Some(d);
self
}
/// set duration
pub fn duration(&mut self, d: u64) -> &mut Self {
self.duration = Some(d);
self
}
/// Builds new [MCaptcha]
pub fn build(self: &mut MCaptchaBuilder) -> CaptchaResult<MCaptcha> {
if self.duration.is_none() {
Err(CaptchaError::PleaseSetValue("duration".into()))
} else if self.defense.is_none() {
Err(CaptchaError::PleaseSetValue("defense".into()))
} else if self.duration <= Some(0) {
Err(CaptchaError::CaptchaDurationZero)
} else {
let m = MCaptcha {
duration: self.duration.unwrap(),
defense: self.defense.clone().unwrap(),
visitor_threshold: Arc::new(AtomicU32::new(self.visitor_threshold)),
};
Ok(m)
}
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct MCaptcha {
visitor_threshold: Arc<AtomicU32>,
defense: Defense,
duration: u64,
}
impl MCaptcha {
/// increments the visitor count by one
#[inline]
pub fn add_visitor(&self) -> u32 {
// self.visitor_threshold += 1;
let current_visitor_level = self.visitor_threshold.fetch_add(1, Ordering::SeqCst) + 1;
let current_level = self.defense.current_level(current_visitor_level);
current_level.difficulty_factor
}
/// decrements the visitor count by specified count
#[inline]
pub fn set_visitor_count(&self, new_current: u32) {
self.visitor_threshold
.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |mut current| {
if current != new_current {
Some(new_current)
} else {
None
}
});
}
/// decrements the visitor count by specified count
#[inline]
pub fn decrement_visitor_by(&self, count: u32) {
self.visitor_threshold
.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |mut current| {
if current > 0 {
if current >= count {
current -= count;
} else {
current = 0;
}
Some(current)
} else {
None
}
});
}
/// get [Counter]'s current visitor_threshold
pub fn get_visitors(&self) -> u32 {
self.visitor_threshold.load(Ordering::SeqCst)
}
}
#[derive(Clone, Serialize, Deserialize)]
pub struct Manager {
pub captchas: Arc<DashMap<String, Arc<MCaptcha>>>,
pub gc: u64,
}
impl Manager {
/// add [Counter] actor to [Manager]
pub fn add_captcha(&self, m: Arc<MCaptcha>, id: String) {
self.captchas.insert(id, m);
}
/// create new master
/// accepts a `u64` to configure garbage collection period
pub fn new(gc: u64) -> Self {
Manager {
captchas: Arc::new(DashMap::new()),
gc,
}
}
fn gc(captchas: Arc<DashMap<String, Arc<MCaptcha>>>) {
for captcha in captchas.iter() {
let visitor = { captcha.value().get_visitors() };
if visitor == 0 {
captchas.remove(captcha.key());
}
}
}
/// get [Counter] actor from [Manager]
pub fn get_captcha(&self, id: &str) -> Option<Arc<MCaptcha>> {
if let Some(captcha) = self.captchas.get(id) {
Some(captcha.clone())
} else {
None
}
}
/// removes [Counter] actor from [Manager]
pub fn rm_captcha(&self, id: &str) -> Option<(String, Arc<MCaptcha>)> {
self.captchas.remove(id)
}
/// renames [Counter] actor
pub fn rename(&self, current_id: &str, new_id: String) {
// If actor isn't present, it's okay to not throw an error
// since actors are lazyily initialized and are cleaned up when inactive
if let Some((_, captcha)) = self.captchas.remove(current_id) {
self.add_captcha(captcha, new_id);
}
}
pub async fn clean_all_after_cold_start(&self, updated: Manager) {
updated.captchas.iter().for_each(|x| {
self.captchas
.insert(x.key().to_owned(), x.value().to_owned());
});
let captchas = self.clone();
let keys: Vec<String> = captchas
.captchas
.clone()
.iter()
.map(|x| x.key().to_owned())
.collect();
let fut = async move {
tokio::time::sleep(Duration::new(captchas.gc, 0)).await;
for key in keys.iter() {
captchas.rm_captcha(key);
}
};
tokio::spawn(fut);
}
pub fn add_visitor(
&self,
msg: &ManagerMessages::AddVisitor,
) -> Option<libmcaptcha::master::AddVisitorResult> {
if let Some(captcha) = self.captchas.get(&msg.0) {
let difficulty_factor = captcha.add_visitor();
// let id = msg.0.clone();
let c = captcha.clone();
let captchas = self.captchas.clone();
let fut = async move {
tokio::time::sleep(Duration::new(c.duration, 0)).await;
c.decrement_visitor_by(1);
// Self::gc(captchas);
// if c.get_visitors() == 0 {
// println!("Removing captcha addvivi");
// captchas.remove(&id);
// }
};
tokio::spawn(fut);
Some(libmcaptcha::master::AddVisitorResult {
duration: captcha.duration,
difficulty_factor,
})
} else {
None
}
}
pub fn get_internal_data(&self) -> HashMap<String, libmcaptcha::mcaptcha::MCaptcha> {
let mut res = HashMap::with_capacity(self.captchas.len());
for value in self.captchas.iter() {
res.insert(value.key().to_owned(), value.value().as_ref().into());
}
res
}
pub fn set_internal_data(&self, mut map: HashMap<String, libmcaptcha::mcaptcha::MCaptcha>) {
for (id, captcha) in map.drain() {
let visitors = captcha.get_visitors();
let new_captcha: MCaptcha = (&captcha).into();
let new_captcha = Arc::new(new_captcha);
self.captchas.insert(id.clone(), new_captcha.clone());
let msg = ManagerMessages::AddVisitor(id);
for _ in 0..visitors {
self.add_visitor(&msg);
}
}
}
}
impl From<&libmcaptcha::mcaptcha::MCaptcha> for MCaptcha {
fn from(value: &libmcaptcha::mcaptcha::MCaptcha) -> Self {
let mut defense = super::defense::DefenseBuilder::default();
for level in value.get_defense().get_levels() {
let _ = defense.add_level(level);
}
let defense = defense.build().unwrap();
let new_captcha = MCaptchaBuilder::default()
.defense(defense)
.duration(value.get_duration())
.build()
.unwrap();
// for _ in 0..value.get_visitors() {
// new_captcha.add_visitor();
// }
new_captcha
}
}
impl From<&MCaptcha> for libmcaptcha::mcaptcha::MCaptcha {
fn from(value: &MCaptcha) -> Self {
let mut defense = libmcaptcha::defense::DefenseBuilder::default();
for level in value.defense.get_levels().drain(0..) {
let _ = defense.add_level(level);
}
let defense = defense.build().unwrap();
let mut new_captcha = libmcaptcha::mcaptcha::MCaptchaBuilder::default()
.defense(defense)
.duration(value.duration)
.build()
.unwrap();
for _ in 0..value.get_visitors() {
new_captcha.add_visitor();
}
new_captcha
}
}
#[cfg(test)]
mod tests {
use super::*;
use libmcaptcha::defense::LevelBuilder;
use libmcaptcha::master::messages::*;
pub const LEVEL_1: (u32, u32) = (50, 50);
pub const LEVEL_2: (u32, u32) = (500, 500);
pub const DURATION: u64 = 5;
use crate::mcaptcha::defense::*;
pub fn get_defense() -> Defense {
DefenseBuilder::default()
.add_level(
LevelBuilder::default()
.visitor_threshold(LEVEL_1.0)
.difficulty_factor(LEVEL_1.1)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.add_level(
LevelBuilder::default()
.visitor_threshold(LEVEL_2.0)
.difficulty_factor(LEVEL_2.1)
.unwrap()
.build()
.unwrap(),
)
.unwrap()
.build()
.unwrap()
}
async fn race(manager: &Manager, id: String, count: (u32, u32)) {
let msg = ManagerMessages::AddVisitor(id);
for _ in 0..count.0 as usize - 1 {
manager.add_visitor(&msg);
}
}
// pub fn get_counter() -> Counter {
// get_mcaptcha().into()
// }
pub fn get_mcaptcha() -> MCaptcha {
MCaptchaBuilder::default()
.defense(get_defense())
.duration(DURATION)
.build()
.unwrap()
}
#[actix_rt::test]
async fn manager_works() {
let manager = Manager::new(1);
// let get_add_site_msg = |id: String, mcaptcha: MCaptcha| {
// AddSiteBuilder::default()
// .id(id)
// .mcaptcha(mcaptcha)
// .build()
// .unwrap()
// };
let id = "yo";
manager.add_captcha(Arc::new(get_mcaptcha()), id.into());
let mcaptcha_addr = manager.get_captcha(id);
assert!(mcaptcha_addr.is_some());
let mut mcaptcha_data = manager.get_internal_data();
mcaptcha_data.get_mut(id).unwrap().add_visitor();
mcaptcha_data.get_mut(id).unwrap().add_visitor();
mcaptcha_data.get_mut(id).unwrap().add_visitor();
// let mcaptcha_data: HashMap<String, libmcaptcha::mcaptcha::MCaptcha> = {
// let serialized = serde_json::to_string(&mcaptcha_data).unwrap();
// serde_json::from_str(&serialized).unwrap()
// };
// println!("{:?}", mcaptcha_data);
manager.set_internal_data(mcaptcha_data);
let mcaptcha_data = manager.get_internal_data();
assert_eq!(
manager.get_captcha(id).unwrap().get_visitors(),
mcaptcha_data.get(id).unwrap().get_visitors()
);
let new_id = "yoyo";
manager.rename(id, new_id.into());
{
let mcaptcha_addr = manager.get_captcha(new_id);
assert!(mcaptcha_addr.is_some());
let addr_doesnt_exist = manager.get_captcha(id);
assert!(addr_doesnt_exist.is_none());
let timer_expire = Duration::new(DURATION, 0);
tokio::time::sleep(timer_expire).await;
tokio::time::sleep(timer_expire).await;
}
// Manager::gc(manager.captchas.clone());
// let mcaptcha_addr = manager.get_captcha(new_id);
// assert_eq!(mcaptcha_addr.as_ref().unwrap().get_visitors(), 0);
// assert!(mcaptcha_addr.is_none());
//
// assert!(
// manager.rm_captcha(new_id.into()).is_some());
}
#[actix_rt::test]
async fn counter_defense_works() {
let manager = Manager::new(1);
let id = "yo";
manager.add_captcha(Arc::new(get_mcaptcha()), id.into());
let mut mcaptcha = manager
.add_visitor(&ManagerMessages::AddVisitor(id.to_string()))
.unwrap();
assert_eq!(mcaptcha.difficulty_factor, LEVEL_1.0);
race(&manager, id.to_string(), LEVEL_2).await;
mcaptcha = manager
.add_visitor(&ManagerMessages::AddVisitor(id.to_string()))
.unwrap();
assert_eq!(mcaptcha.difficulty_factor, LEVEL_2.1);
tokio::time::sleep(Duration::new(DURATION * 2, 0)).await;
assert_eq!(manager.get_captcha(id).unwrap().get_visitors(), 0);
}
}
//
//#[cfg(test)]
//pub mod tests {
// use super::*;
// use crate::defense::*;
// use crate::errors::*;
// use crate::mcaptcha;
// use crate::mcaptcha::MCaptchaBuilder;
//
// // constants for testing
// // (visitor count, level)
// pub const LEVEL_1: (u32, u32) = (50, 50);
// pub const LEVEL_2: (u32, u32) = (500, 500);
// pub const DURATION: u64 = 5;
//
// type MyActor = Addr<Counter>;
//
// pub fn get_defense() -> Defense {
// DefenseBuilder::default()
// .add_level(
// LevelBuilder::default()
// .visitor_threshold(LEVEL_1.0)
// .difficulty_factor(LEVEL_1.1)
// .unwrap()
// .build()
// .unwrap(),
// )
// .unwrap()
// .add_level(
// LevelBuilder::default()
// .visitor_threshold(LEVEL_2.0)
// .difficulty_factor(LEVEL_2.1)
// .unwrap()
// .build()
// .unwrap(),
// )
// .unwrap()
// .build()
// .unwrap()
// }
//
// async fn race(addr: Addr<Counter>, count: (u32, u32)) {
// for _ in 0..count.0 as usize - 1 {
// let _ = addr.send(AddVisitor).await.unwrap();
// }
// }
//
// pub fn get_counter() -> Counter {
// get_mcaptcha().into()
// }
//
// pub fn get_mcaptcha() -> MCaptcha {
// MCaptchaBuilder::default()
// .defense(get_defense())
// .duration(DURATION)
// .build()
// .unwrap()
// }
//
// #[test]
// fn mcaptcha_decrement_by_works() {
// let mut m = get_mcaptcha();
// for _ in 0..100 {
// m.add_visitor();
// }
// m.decrement_visitor_by(50);
// assert_eq!(m.get_visitors(), 50);
// m.decrement_visitor_by(500);
// assert_eq!(m.get_visitors(), 0);
// }
//
//
// #[actix_rt::test]
// async fn counter_defense_loosenup_works() {
// //use actix::clock::sleep;
// //use actix::clock::delay_for;
// let addr: MyActor = get_counter().start();
//
// race(addr.clone(), LEVEL_2).await;
// race(addr.clone(), LEVEL_2).await;
// let mut mcaptcha = addr.send(AddVisitor).await.unwrap();
// assert_eq!(mcaptcha.difficulty_factor, LEVEL_2.1);
//
// let duration = Duration::new(DURATION, 0);
// sleep(duration).await;
// //delay_for(duration).await;
//
// mcaptcha = addr.send(AddVisitor).await.unwrap();
// assert_eq!(mcaptcha.difficulty_factor, LEVEL_1.1);
// }
//
// #[test]
// fn test_mcatcptha_builder() {
// let defense = get_defense();
// let m = MCaptchaBuilder::default()
// .duration(0)
// .defense(defense.clone())
// .build();
//
// assert_eq!(m.err(), Some(CaptchaError::CaptchaDurationZero));
//
// let m = MCaptchaBuilder::default().duration(30).build();
// assert_eq!(
// m.err(),
// Some(CaptchaError::PleaseSetValue("defense".into()))
// );
//
// let m = MCaptchaBuilder::default().defense(defense).build();
// assert_eq!(
// m.err(),
// Some(CaptchaError::PleaseSetValue("duration".into()))
// );
// }
//
// #[actix_rt::test]
// async fn get_current_visitor_count_works() {
// let addr: MyActor = get_counter().start();
//
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// let count = addr.send(GetCurrentVisitorCount).await.unwrap();
//
// assert_eq!(count, 4);
// }
//
// #[actix_rt::test]
// #[should_panic]
// async fn stop_works() {
// let addr: MyActor = get_counter().start();
// addr.send(Stop).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// }
//
// #[actix_rt::test]
// async fn get_set_internal_data_works() {
// let addr: MyActor = get_counter().start();
// let mut mcaptcha = addr.send(GetInternalData).await.unwrap();
// mcaptcha.add_visitor();
// addr.send(SetInternalData(mcaptcha.clone())).await.unwrap();
// assert_eq!(
// addr.send(GetInternalData).await.unwrap().get_visitors(),
// mcaptcha.get_visitors()
// );
//
// let duration = Duration::new(mcaptcha.get_duration() + 3, 0);
// sleep(duration).await;
// assert_eq!(addr.send(GetCurrentVisitorCount).await.unwrap(), 0);
// }
//
// #[actix_rt::test]
// async fn bulk_delete_works() {
// let addr: MyActor = get_counter().start();
// addr.send(AddVisitor).await.unwrap();
// addr.send(AddVisitor).await.unwrap();
// assert_eq!(addr.send(GetCurrentVisitorCount).await.unwrap(), 2);
// addr.send(BulkDecrement(3)).await.unwrap();
// assert_eq!(addr.send(GetCurrentVisitorCount).await.unwrap(), 0);
// }
//}

3
src/mcaptcha/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod cache;
mod defense;
pub mod mcaptcha;

View file

@ -1,246 +0,0 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2023 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use std::sync::{Arc, RwLock};
use actix::dev::channel::channel;
use actix::WrapFuture;
use actix_web::get;
use actix_web::post;
use actix_web::web;
use actix_web::web::Data;
use actix_web::Responder;
use libmcaptcha::cache::messages::{CachedPoWConfig, RetrivePoW, VerifyCaptchaResult};
use libmcaptcha::master::messages::GetInternalData;
use serde::Deserialize;
use serde::Serialize;
use web::Json;
use crate::app::DcacheApp;
use crate::store::DcacheRequest;
use crate::store::DcacheResponse;
#[post("/write")]
pub async fn write(
app: Data<DcacheApp>,
req: Json<DcacheRequest>,
) -> actix_web::Result<impl Responder> {
let response = app.raft.client_write(req.0).await;
Ok(Json(response))
}
#[get("/state")]
pub async fn state(app: Data<DcacheApp>) -> actix_web::Result<impl Responder> {
let sm = app.store.state_machine.read().await;
let resp = sm
.data
.master
.send(GetInternalData)
.await
.unwrap()
.await
.unwrap()
.unwrap();
Ok(Json(resp))
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum ReadRequest {
RetrivePoW(RetrivePoW), //Reader
VerifyCaptchaResult(VerifyCaptchaResult), //Reader
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum ReadResponse {
VerifyCaptchaResult(bool),
RetrivePoW(Option<CachedPoWConfig>),
}
#[post("/read")]
pub async fn read(
app: Data<DcacheApp>,
req: Json<ReadRequest>,
) -> actix_web::Result<impl Responder> {
let sm = app.store.state_machine.read().await;
let req = req.into_inner();
let res = match req {
ReadRequest::RetrivePoW(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::RetrivePoW(cache_res)
}
ReadRequest::VerifyCaptchaResult(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::VerifyCaptchaResult(cache_res)
}
};
Ok(Json(res))
}
#[post("/pipeline/read")]
pub async fn pipeline_read(
app: Data<DcacheApp>,
requests: Json<Vec<ReadRequest>>,
) -> actix_web::Result<impl Responder> {
let requests = requests.into_inner();
let mut responses = Vec::with_capacity(requests.len());
let sm = app.store.state_machine.read().await;
for request in requests {
let res = match request {
ReadRequest::RetrivePoW(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::RetrivePoW(cache_res)
}
ReadRequest::VerifyCaptchaResult(msg) => {
let cache_res = sm
.data
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
ReadResponse::VerifyCaptchaResult(cache_res)
}
};
responses.push(res);
}
Ok(Json(responses))
}
#[post("/pipeline/write")]
pub async fn pipeline_write(
app: Data<DcacheApp>,
requests: Json<Vec<DcacheRequest>>,
) -> actix_web::Result<impl Responder> {
let mut responses = Vec::with_capacity(requests.len());
let mut requests = requests.into_inner();
for req in requests.drain(0..) {
responses.push(app.raft.client_write(req).await);
}
Ok(Json(responses))
}
use actix::prelude::*;
use actix::{Actor, StreamHandler};
use actix_web::{Error, HttpRequest, HttpResponse};
use actix_web_actors::ws;
/// Define HTTP actor
struct MyWs {
app: Data<DcacheApp>,
}
impl Actor for MyWs {
type Context = ws::WebsocketContext<Self>;
}
/// Handler for ws::Message message
impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
match msg {
Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
Ok(ws::Message::Text(text)) => {
let msg: super::raft::RaftMessage = serde_json::from_str(&text).unwrap();
let res: Arc<RwLock<Option<String>>> = Arc::new(RwLock::new(None));
let res2 = res.clone();
let app = self.app.clone();
let fut = async move {
let mut w = res2.write().unwrap();
match msg {
super::raft::RaftMessage::VoteRequest(msg) => {
let res = app.raft.vote(msg).await;
panic!("setting vote req");
*w = Some(serde_json::to_string(&res).unwrap());
}
super::raft::RaftMessage::Append(msg) => {
let res = app.raft.append_entries(msg).await;
panic!("setting app req");
*w = Some(serde_json::to_string(&res).unwrap());
}
super::raft::RaftMessage::Snapshot(msg) => {
panic!("setting snap req");
let res = app.raft.install_snapshot(msg).await;
*w = Some(serde_json::to_string(&res).unwrap());
}
}
}
.into_actor(self);
ctx.wait(fut);
//loop {
println!("running read loop");
let res = res.read().unwrap();
// if res.is_some() {
let res: &str = res.as_ref().unwrap();
ctx.text(res);
// break;
// }
//}
// if let Ok(res) = res.read() {
// if res.is_some() {
// let res: &str = res.as_ref().unwrap();
// ctx.text(res);
// }
// };
}
_ => (),
}
}
}
#[get("/ws/write")]
async fn ws_write(
req: HttpRequest,
app: Data<DcacheApp>,
stream: web::Payload,
) -> Result<HttpResponse, Error> {
let resp = ws::start(MyWs { app }, &req, stream);
println!("{:?}", resp);
resp
}

View file

@ -15,66 +15,15 @@
* You should have received a copy of the GNU Affero General Public License * You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc;
use actix_web::get; //use actix_web::web;
use actix_web::post; //use actix_web::web::Data;
use actix_web::web;
use actix_web::web::Data;
use actix_web::Responder;
use openraft::error::Infallible;
use openraft::BasicNode;
use openraft::RaftMetrics;
use web::Json;
use crate::app::DcacheApp; use crate::app::DcacheApp;
use crate::DcacheNodeId; use crate::DcacheNodeId;
#[post("/add-learner")]
pub async fn add_learner(
app: Data<DcacheApp>,
req: Json<(DcacheNodeId, String)>,
) -> actix_web::Result<impl Responder> {
let node_id = req.0 .0;
let node = BasicNode {
addr: req.0 .1.clone(),
};
let res = app.raft.add_learner(node_id, node, true).await;
Ok(Json(res))
}
#[post("/change-membership")]
pub async fn change_membership(
app: Data<DcacheApp>,
req: Json<BTreeSet<DcacheNodeId>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.change_membership(req.0, false).await;
Ok(Json(res))
}
#[post("/init")]
pub async fn init(app: Data<DcacheApp>) -> actix_web::Result<impl Responder> {
let mut nodes = BTreeMap::new();
nodes.insert(
app.id,
BasicNode {
addr: app.addr.clone(),
},
);
let res = app.raft.initialize(nodes).await;
Ok(Json(res))
}
#[get("/metrics")]
pub async fn metrics(app: Data<DcacheApp>) -> actix_web::Result<impl Responder> {
let metrics = app.raft.metrics().borrow().clone();
let res: Result<RaftMetrics<DcacheNodeId, BasicNode>, Infallible> = Ok(metrics);
Ok(Json(res))
}
use tokio::sync::mpsc; use tokio::sync::mpsc;
#[derive(Debug)] #[derive(Debug)]
@ -87,7 +36,7 @@ pub struct HealthMetrics;
impl HealthMetrics { impl HealthMetrics {
pub async fn spawn( pub async fn spawn(
app: Data<DcacheApp>, app: Arc<DcacheApp>,
threshold: usize, threshold: usize,
mut rx: mpsc::Receiver<HealthStatus>, mut rx: mpsc::Receiver<HealthStatus>,
) -> tokio::task::JoinHandle<()> { ) -> tokio::task::JoinHandle<()> {
@ -114,7 +63,7 @@ impl HealthMetrics {
new_nodes.push(*node.0); new_nodes.push(*node.0);
} }
let res = let _res =
app.raft.change_membership(new_nodes, false).await.unwrap(); app.raft.change_membership(new_nodes, false).await.unwrap();
} }
} else { } else {
@ -128,20 +77,3 @@ impl HealthMetrics {
tokio::spawn(fut) tokio::spawn(fut)
} }
} }
//#[get("/self/remove/{id}")]
//pub async fn remove_node(app: Data<DcacheApp>, id: web::Path<u64>) -> actix_web::Result<impl Responder> {
// let cluster_metrics = app.raft.metrics().borrow().clone();
// let remote_id: u64 = 3;
// let mut new_nodes: Vec<DcacheNodeId> = Vec::new();
// for node in cluster_metrics.membership_config.nodes() {
// if *node.0 == remote_id {
// continue;
// }
//
// new_nodes.push(*node.0);
// }
//
// let res = app.raft.change_membership(new_nodes, false).await;
// Ok(Json(res))
//}

View file

@ -15,7 +15,7 @@
* You should have received a copy of the GNU Affero General Public License * You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
pub mod api; //pub mod api;
pub mod management; pub mod management;
pub mod raft; //pub mod raft;
pub mod raft_network_impl; pub mod raft_network_impl;

View file

@ -1,80 +0,0 @@
/*
* mCaptcha - A proof of work based DoS protection system
* Copyright © 2023 Aravinth Manivannan <realravinth@batsense.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use actix_web::post;
use actix_web::web;
use actix_web::web::Data;
use actix_web::Responder;
use openraft::error::*;
use openraft::raft::AppendEntriesRequest;
use openraft::raft::InstallSnapshotRequest;
use openraft::raft::VoteRequest;
use openraft::raft::*;
use serde::*;
use web::Json;
use crate::app::DcacheApp;
use crate::DcacheNodeId;
use crate::DcacheTypeConfig;
// --- Raft communication
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum RaftMessage {
VoteRequest(VoteRequest<DcacheNodeId>),
Append(AppendEntriesRequest<DcacheTypeConfig>),
Snapshot(InstallSnapshotRequest<DcacheTypeConfig>),
}
#[derive(Debug, Serialize, Deserialize)]
pub enum RaftRes {
VoteRes(Result<VoteResponse<DcacheNodeId>, RaftError<DcacheNodeId>>),
SnapshotRes(
Result<
InstallSnapshotResponse<DcacheNodeId>,
RaftError<DcacheNodeId, InstallSnapshotError>,
>,
),
AppendRes(Result<AppendEntriesResponse<DcacheNodeId>, RaftError<DcacheNodeId>>),
}
#[post("/raft-vote")]
pub async fn vote(
app: Data<DcacheApp>,
req: Json<VoteRequest<DcacheNodeId>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.vote(req.0).await;
Ok(Json(res))
}
#[post("/raft-append")]
pub async fn append(
app: Data<DcacheApp>,
req: Json<AppendEntriesRequest<DcacheTypeConfig>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.append_entries(req.0).await;
Ok(Json(res))
}
#[post("/raft-snapshot")]
pub async fn snapshot(
app: Data<DcacheApp>,
req: Json<InstallSnapshotRequest<DcacheTypeConfig>>,
) -> actix_web::Result<impl Responder> {
let res = app.raft.install_snapshot(req.0).await;
Ok(Json(res))
}

View file

@ -1,6 +1,3 @@
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashSet;
/* /*
* mCaptcha - A proof of work based DoS protection system * mCaptcha - A proof of work based DoS protection system
* Copyright © 2023 Aravinth Manivannan <realravinth@batsense.net> * Copyright © 2023 Aravinth Manivannan <realravinth@batsense.net>
@ -18,24 +15,17 @@ use std::collections::HashSet;
* You should have received a copy of the GNU Affero General Public License * You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>. * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use std::sync::RwLock;
use std::time::Duration; use std::time::Duration;
use std::time::Instant;
use futures_util::{future, pin_mut, StreamExt};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio_tungstenite::{connect_async, tungstenite::protocol::Message};
use tokio::sync::mpsc;
use super::management::HealthStatus;
use crate::DcacheNodeId;
use crate::DcacheTypeConfig;
use async_trait::async_trait; use async_trait::async_trait;
use openraft::error::InstallSnapshotError; use openraft::error::InstallSnapshotError;
use openraft::error::NetworkError; use openraft::error::NetworkError;
use openraft::error::RPCError; use openraft::error::RPCError;
use openraft::error::RaftError; use openraft::error::RaftError;
use openraft::error::RemoteError;
use openraft::raft::AppendEntriesRequest; use openraft::raft::AppendEntriesRequest;
use openraft::raft::AppendEntriesResponse; use openraft::raft::AppendEntriesResponse;
use openraft::raft::InstallSnapshotRequest; use openraft::raft::InstallSnapshotRequest;
@ -45,70 +35,130 @@ use openraft::raft::VoteResponse;
use openraft::BasicNode; use openraft::BasicNode;
use openraft::RaftNetwork; use openraft::RaftNetwork;
use openraft::RaftNetworkFactory; use openraft::RaftNetworkFactory;
use reqwest::Client;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use serde::Serialize; use serde::Serialize;
use tokio::sync::mpsc::Sender; use tokio::sync::mpsc::Sender;
use tonic::transport::channel::Channel;
use tower_service::Service;
use super::management::HealthStatus; use crate::pool::*;
use super::raft::{RaftMessage, RaftRes}; use crate::protobuf::dcache::dcache_service_client::DcacheServiceClient;
use crate::store::DcacheRequest; use crate::protobuf::dcache::RaftRequest;
use crate::store::DcacheResponse;
use crate::DcacheNodeId; #[derive(Debug)]
use crate::DcacheTypeConfig; struct ChannelManager {}
#[async_trait]
impl ItemManager for ChannelManager {
type Key = String;
type Item = Channel;
type Error = tonic::transport::Error;
async fn build(&self, addr: &Self::Key) -> Result<Channel, tonic::transport::Error> {
tonic::transport::Endpoint::new(addr.clone())?
.connect()
.await
}
async fn check(&self, mut ch: Channel) -> Result<Channel, tonic::transport::Error> {
futures::future::poll_fn(|cx| (&mut ch).poll_ready(cx)).await?;
Ok(ch)
}
}
#[derive(Clone)]
pub struct DcacheNetwork { pub struct DcacheNetwork {
pub signal: Sender<HealthStatus>, pub signal: Sender<HealthStatus>,
pub client: Client, conn_pool: Pool<ChannelManager>,
}
pub enum RPCType {
Vote,
Snapshot,
Append,
} }
impl DcacheNetwork { impl DcacheNetwork {
pub fn new(signal: Sender<HealthStatus>, client: Client) -> Self { pub fn new(signal: Sender<HealthStatus>) -> Self {
Self { signal, client } let mgr = ChannelManager {};
Self {
signal,
conn_pool: Pool::new(mgr, Duration::from_millis(50)),
}
} }
pub async fn send_rpc<Req, Resp, Err>( pub async fn send_rpc<Req, Resp, Err>(
&self, &self,
target: DcacheNodeId, target: DcacheNodeId,
target_node: &BasicNode, target_node: &BasicNode,
uri: &str,
req: Req, req: Req,
event: RPCType,
) -> Result<Resp, RPCError<DcacheNodeId, BasicNode, Err>> ) -> Result<Resp, RPCError<DcacheNodeId, BasicNode, Err>>
where where
Req: Serialize, Req: Serialize,
Err: std::error::Error + DeserializeOwned, Err: std::error::Error + DeserializeOwned,
Resp: DeserializeOwned, Resp: DeserializeOwned,
{ {
let addr = &target_node.addr; let mut client = self.make_client(&target, target_node).await;
let url = format!("http://{}/{}", addr, uri); let res = match event {
RPCType::Vote => {
tracing::debug!("send_rpc to url: {}", url); client
.vote(RaftRequest {
let resp = match self.client.post(url).json(&req).send().await { data: serde_json::to_string(&req).unwrap(),
Ok(resp) => Ok(resp), })
Err(e) => {
self.signal.send(HealthStatus::Down(target)).await;
Err(RPCError::Network(NetworkError::new(&e)))
}
}?;
tracing::debug!("client.post() is sent");
let res: Result<Resp, Err> = resp
.json()
.await .await
.map_err(|e| RPCError::Network(NetworkError::new(&e)))?; }
let res = res.map_err(|e| RPCError::RemoteError(RemoteError::new(target, e))); RPCType::Snapshot => {
if res.is_ok() { client
.install_snapshot(RaftRequest {
data: serde_json::to_string(&req).unwrap(),
})
.await
}
RPCType::Append => {
client
.append_entries(RaftRequest {
data: serde_json::to_string(&req).unwrap(),
})
.await
}
};
match res {
Ok(res) => {
let signal2 = self.signal.clone(); let signal2 = self.signal.clone();
let fut = async move { let fut = async move {
let _ = signal2.send(HealthStatus::Healthy(target)).await; let _ = signal2.send(HealthStatus::Healthy(target)).await;
}; };
tokio::spawn(fut); tokio::spawn(fut);
let res = res.into_inner();
Ok(serde_json::from_str(&res.data).unwrap())
} }
res Err(e) => {
let _ = self.signal.send(HealthStatus::Down(target)).await;
Err(RPCError::Network(NetworkError::new(&e)))
}
}
}
pub async fn make_client(
&self,
target: &DcacheNodeId,
target_node: &BasicNode,
) -> DcacheServiceClient<Channel> {
let addr = format!("http://{}", &target_node.addr);
tracing::debug!("connect: target={}: {}", target, addr);
let channel = self.conn_pool.get(&addr).await.unwrap();
let client = DcacheServiceClient::new(channel);
tracing::info!("connected: target={}: {}", target, addr);
client
} }
} }
@ -119,20 +169,10 @@ impl RaftNetworkFactory<DcacheTypeConfig> for Arc<DcacheNetwork> {
type Network = DcacheNetworkConnection; type Network = DcacheNetworkConnection;
async fn new_client(&mut self, target: DcacheNodeId, node: &BasicNode) -> Self::Network { async fn new_client(&mut self, target: DcacheNodeId, node: &BasicNode) -> Self::Network {
let addr = &node.addr;
let url = format!("ws://{}/{}", addr, "ws/write");
let (write, rx) = mpsc::channel(30);
let (tx, read) = mpsc::channel(30);
let ws_client = WSClient::spawn(rx, tx, url).await;
DcacheNetworkConnection { DcacheNetworkConnection {
owner: self.clone(), owner: self.clone(),
target, target,
target_node: node.clone(), target_node: node.clone(),
// ws_client,
read,
write,
} }
} }
} }
@ -141,9 +181,6 @@ pub struct DcacheNetworkConnection {
owner: Arc<DcacheNetwork>, owner: Arc<DcacheNetwork>,
target: DcacheNodeId, target: DcacheNodeId,
target_node: BasicNode, target_node: BasicNode,
// ws_client: WSClient,
write: mpsc::Sender<RaftMessage>,
read: mpsc::Receiver<RaftRes>,
} }
#[async_trait] #[async_trait]
@ -155,16 +192,9 @@ impl RaftNetwork<DcacheTypeConfig> for DcacheNetworkConnection {
AppendEntriesResponse<DcacheNodeId>, AppendEntriesResponse<DcacheNodeId>,
RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId>>, RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId>>,
> { > {
self.write.send(RaftMessage::Append(req)).await.unwrap(); self.owner
match self.read.recv().await.unwrap() { .send_rpc(self.target, &self.target_node, req, RPCType::Append)
RaftRes::AppendRes(res) => { .await
res.map_err(|e| RPCError::RemoteError(RemoteError::new(self.target, e)))
}
_ => unimplemented!(),
}
// self.owner
// .send_rpc(self.target, &self.target_node, "raft-append", req)
// .await
} }
async fn send_install_snapshot( async fn send_install_snapshot(
@ -174,16 +204,9 @@ impl RaftNetwork<DcacheTypeConfig> for DcacheNetworkConnection {
InstallSnapshotResponse<DcacheNodeId>, InstallSnapshotResponse<DcacheNodeId>,
RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId, InstallSnapshotError>>, RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId, InstallSnapshotError>>,
> { > {
// self.owner self.owner
// .send_rpc(self.target, &self.target_node, "raft-snapshot", req) .send_rpc(self.target, &self.target_node, req, RPCType::Append)
// .await .await
self.write.send(RaftMessage::Snapshot(req)).await.unwrap();
match self.read.recv().await.unwrap() {
RaftRes::SnapshotRes(res) => {
res.map_err(|e| RPCError::RemoteError(RemoteError::new(self.target, e)))
}
_ => unimplemented!(),
}
} }
async fn send_vote( async fn send_vote(
@ -193,53 +216,8 @@ impl RaftNetwork<DcacheTypeConfig> for DcacheNetworkConnection {
VoteResponse<DcacheNodeId>, VoteResponse<DcacheNodeId>,
RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId>>, RPCError<DcacheNodeId, BasicNode, RaftError<DcacheNodeId>>,
> { > {
// self.owner self.owner
// .send_rpc(self.target, &self.target_node, "raft-vote", req) .send_rpc(self.target, &self.target_node, req, RPCType::Vote)
// .await
self.write
.send(RaftMessage::VoteRequest(req))
.await .await
.unwrap();
match self.read.recv().await.unwrap() {
RaftRes::VoteRes(res) => {
res.map_err(|e| RPCError::RemoteError(RemoteError::new(self.target, e)))
}
_ => unimplemented!(),
}
}
}
pub struct WSClient;
impl WSClient {
pub async fn spawn(
mut rx: mpsc::Receiver<RaftMessage>,
tx: mpsc::Sender<RaftRes>,
url: String,
) {
use futures_util::SinkExt;
let (ws_stream, _) = connect_async(url).await.expect("Failed to connect");
println!("WebSocket handshake has been successfully completed");
let (mut write, mut read) = ws_stream.split();
let fut = async move {
while let Some(msg) = rx.recv().await {
write
.send(Message::Text(serde_json::to_string(&msg).unwrap()))
.await
.unwrap();
match read.next().await.unwrap().unwrap() {
Message::Text(msg) => {
tx.send(serde_json::from_str(&msg).unwrap()).await;
}
_ => (),
}
}
};
tokio::spawn(fut);
} }
} }

154
src/pool.rs Normal file
View file

@ -0,0 +1,154 @@
use std::collections::HashMap;
use std::fmt::Debug;
use std::hash::Hash;
use std::marker::PhantomData;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;
use async_trait::async_trait;
use tokio::time::sleep;
//use log::debug;
//use crate::base::tokio;
pub type PoolItem<T> = Arc<tokio::sync::Mutex<Option<T>>>;
/// To build or check an item.
///
/// When an item is requested, ItemManager `build()` one for the pool.
/// When an item is reused, ItemManager `check()` if it is still valid.
#[async_trait]
pub trait ItemManager {
type Key;
type Item;
type Error;
/// Make a new item to put into the pool.
///
/// An impl should hold that an item returned by `build()` is passed `check()`.
async fn build(&self, key: &Self::Key) -> Result<Self::Item, Self::Error>;
/// Check if an existent item still valid.
///
/// E.g.: check if a tcp connection still alive.
/// If the item is valid, `check` should return it in a Ok().
/// Otherwise, the item should be dropped and `check` returns an Err().
async fn check(&self, item: Self::Item) -> Result<Self::Item, Self::Error>;
}
/// Pool assumes the items in it is `Clone`, thus it keeps only one item for each key.
#[allow(clippy::type_complexity)]
#[derive(Debug, Clone)]
pub struct Pool<Mgr>
where
Mgr: ItemManager + Debug,
{
/// The first sleep time when `build()` fails.
/// The next sleep time is 2 times of the previous one.
pub initial_retry_interval: Duration,
/// Pooled items indexed by key.
pub items: Arc<Mutex<HashMap<Mgr::Key, PoolItem<Mgr::Item>>>>,
manager: Mgr,
err_type: PhantomData<Mgr::Error>,
n_retries: u32,
}
impl<Mgr> Pool<Mgr>
where
Mgr: ItemManager + Debug,
Mgr::Key: Clone + Eq + Hash + Send + Debug,
Mgr::Item: Clone + Sync + Send + Debug,
Mgr::Error: Sync + Debug,
{
pub fn new(manager: Mgr, initial_retry_interval: Duration) -> Self {
Pool {
initial_retry_interval,
items: Default::default(),
manager,
err_type: Default::default(),
n_retries: 3,
}
}
pub fn with_retries(mut self, retries: u32) -> Self {
self.n_retries = retries;
self
}
pub fn item_manager(&self) -> &Mgr {
&self.manager
}
/// Return an raw pool item.
///
/// The returned one may be an uninitialized one, i.e., it contains a None.
/// The lock for `items` should not be held for long, e.g. when `build()` a new connection, it takes dozen ms.
fn get_pool_item(&self, key: &Mgr::Key) -> PoolItem<Mgr::Item> {
let mut items = self.items.lock().unwrap();
if let Some(item) = items.get(key) {
item.clone()
} else {
let item = PoolItem::default();
items.insert(key.clone(), item.clone());
item
}
}
/// Return a item, by cloning an existent one or making a new one.
///
/// When returning an existent one, `check()` will be called on it to ensure it is still valid.
/// E.g., when returning a tcp connection.
// #[logcall::logcall(err = "debug")]
// #[minitrace::trace]
pub async fn get(&self, key: &Mgr::Key) -> Result<Mgr::Item, Mgr::Error> {
let pool_item = self.get_pool_item(key);
let mut guard = pool_item.lock().await;
let item_opt = (*guard).clone();
if let Some(ref item) = item_opt {
let check_res = self.manager.check(item.clone()).await;
// debug!("check reused item res: {:?}", check_res);
if let Ok(itm) = check_res {
return Ok(itm);
} else {
// mark broken conn as deleted
*guard = None;
}
}
let mut interval = self.initial_retry_interval;
for i in 0..self.n_retries {
// debug!("build new item of key: {:?}", key);
let new_item = self.manager.build(key).await;
// debug!("build new item of key res: {:?}", new_item);
match new_item {
Ok(x) => {
*guard = Some(x.clone());
return Ok(x);
}
Err(err) => {
if i == self.n_retries - 1 {
return Err(err);
}
}
}
sleep(interval).await;
interval *= 2;
}
unreachable!("the loop should always return!");
}
}

546
src/protobuf.rs Normal file
View file

@ -0,0 +1,546 @@
use std::sync::Arc;
use libmcaptcha::cache::messages as CacheMessages;
use libmcaptcha::defense;
use libmcaptcha::master::messages as MasterMessages;
use libmcaptcha::mcaptcha;
use openraft::BasicNode;
use serde::de::DeserializeOwned;
use serde::Serialize;
use tonic::Response;
use dcache::dcache_request::DcacheRequest as PipelineReq;
use dcache::dcache_response::DcacheResponse as InnerPipelineRes;
use dcache::dcache_service_server::DcacheService;
use dcache::DcacheResponse as OuterPipelineRes;
use dcache::{Learner, RaftReply, RaftRequest};
use crate::app::DcacheApp;
use crate::store::{DcacheRequest, DcacheResponse};
pub mod dcache {
tonic::include_proto!("dcache"); // The string specified here must match the proto package name
}
#[derive(Clone)]
pub struct MyDcacheImpl {
app: Arc<DcacheApp>,
}
impl MyDcacheImpl {
pub fn new(app: Arc<DcacheApp>) -> Self {
Self { app }
}
}
#[tonic::async_trait]
impl DcacheService for MyDcacheImpl {
async fn add_learner(
&self,
request: tonic::Request<Learner>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let node_id = req.id;
let node = BasicNode {
addr: req.addr.clone(),
};
println!("Learner added: {:?}", &req.addr);
let res = self.app.raft.add_learner(node_id, node, true).await;
Ok(Response::new(res.into()))
}
async fn add_captcha(
&self,
request: tonic::Request<dcache::AddCaptchaRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::AddCaptcha(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn add_visitor(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::OptionAddVisitorResult>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::AddVisitor(MasterMessages::AddVisitor(
req.id,
)))
.await
.map_err(|e| {
tonic::Status::new(tonic::Code::Internal, serde_json::to_string(&e).unwrap())
})?;
match res.data {
DcacheResponse::AddVisitorResult(res) => {
Ok(Response::new(dcache::OptionAddVisitorResult {
result: res.map(|f| f.into()),
}))
}
_ => unimplemented!(),
}
}
async fn rename_captcha(
&self,
request: tonic::Request<dcache::RenameCaptchaRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::RenameCaptcha(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn remove_captcha(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::RemoveCaptcha(MasterMessages::RemoveCaptcha(
req.id,
)))
.await;
Ok(Response::new(res.into()))
}
async fn cache_pow(
&self,
request: tonic::Request<dcache::CachePowRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::CachePoW(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn retrieve_pow(
&self,
request: tonic::Request<dcache::RetrievePowRequest>,
) -> std::result::Result<tonic::Response<dcache::OptionalRetrievePoWResponse>, tonic::Status>
{
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
let res = sm.results.retrieve_pow_config(req.into());
Ok(Response::new(dcache::OptionalRetrievePoWResponse {
result: res.map(|x| x.into()),
}))
}
async fn delete_pow(
&self,
request: tonic::Request<dcache::DeletePowRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::DeletePoW(CacheMessages::DeletePoW(
req.string,
)))
.await;
Ok(Response::new(res.into()))
}
async fn cache_result(
&self,
request: tonic::Request<dcache::CacheResultRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::CacheResult(req.into()))
.await;
Ok(Response::new(res.into()))
}
async fn verify_captcha_result(
&self,
request: tonic::Request<dcache::RetrievePowRequest>,
) -> std::result::Result<tonic::Response<dcache::CaptchaResultVerified>, tonic::Status> {
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
let verified = sm.results.verify_captcha_result(req.into());
Ok(Response::new(dcache::CaptchaResultVerified { verified }))
}
async fn delete_captcha_result(
&self,
request: tonic::Request<dcache::DeleteCaptchaResultRequest>,
) -> std::result::Result<tonic::Response<dcache::RaftReply>, tonic::Status> {
let req = request.into_inner();
let res = self
.app
.raft
.client_write(DcacheRequest::DeleteCaptchaResult(
CacheMessages::DeleteCaptchaResult { token: req.token },
))
.await;
Ok(Response::new(res.into()))
}
async fn captcha_exists(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::CaptchaExistsResponse>, tonic::Status> {
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
let exists = sm.counter.get_captcha(&req.id).is_some();
Ok(Response::new(dcache::CaptchaExistsResponse { exists }))
}
async fn get_visitor_count(
&self,
request: tonic::Request<dcache::CaptchaId>,
) -> std::result::Result<tonic::Response<dcache::OptionGetVisitorCountResponse>, tonic::Status>
{
let req = request.into_inner();
let sm = self.app.store.state_machine.read().await;
if let Some(captcha) = sm.counter.get_captcha(&req.id) {
let res = captcha.get_visitors();
Ok(Response::new(dcache::OptionGetVisitorCountResponse {
result: Some(dcache::GetVisitorCountResponse { visitors: res }),
}))
} else {
Ok(Response::new(dcache::OptionGetVisitorCountResponse {
result: None,
}))
}
}
// type PipelineDcacheOpsStream =
// Pin<Box<dyn Stream<Item = Result<OuterPipelineRes, tonic::Status>> + Send + 'static>>;
// async fn pipeline_dcache_ops(
// &self,
// request: tonic::Request<tonic::Streaming<dcache::DcacheRequest>>,
// ) -> std::result::Result<tonic::Response<Self::PipelineDcacheOpsStream>, tonic::Status> {
async fn pipeline_dcache_ops(
&self,
request: tonic::Request<dcache::DcacheBatchRequest>,
) -> Result<Response<dcache::DcacheBatchResponse>, tonic::Status> {
let mut reqs = request.into_inner();
let mut responses = Vec::with_capacity(reqs.requests.len());
for req in reqs.requests.drain(0..) {
let res = match req.dcache_request.unwrap() {
PipelineReq::AddCaptcha(add_captcha_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::AddCaptcha(add_captcha_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::AddVisitor(add_visitor_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::AddVisitor(MasterMessages::AddVisitor(
add_visitor_req.id,
)))
.await;
match res {
Err(_) => OuterPipelineRes {
dcache_response: None,
},
Ok(res) => match res.data {
DcacheResponse::AddVisitorResult(res) => {
let res = dcache::OptionAddVisitorResult {
result: res.map(|f| f.into()),
};
OuterPipelineRes {
dcache_response: Some(
InnerPipelineRes::OptionAddVisitorResult(res),
),
}
}
_ => unimplemented!(),
},
}
}
PipelineReq::RenameCaptcha(rename_captcha_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::RenameCaptcha(rename_captcha_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::RemoveCaptcha(remove_captcha_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::RemoveCaptcha(MasterMessages::RemoveCaptcha(
remove_captcha_req.id,
)))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::CachePow(cache_pow_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::CachePoW(cache_pow_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::CacheResult(cache_result_req) => {
let res = self
.app
.raft
.client_write(DcacheRequest::CacheResult(cache_result_req.into()))
.await;
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::Other(res.into())),
}
}
PipelineReq::CaptchaExists(captcha_exists_req) => {
let sm = self.app.store.state_machine.read().await;
let exists = sm.counter.get_captcha(&captcha_exists_req.id).is_some();
let res = dcache::CaptchaExistsResponse { exists };
drop(sm);
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::CaptchaExists(res)),
}
}
PipelineReq::GetVisitorCount(get_visitor_count_req) => {
let sm = self.app.store.state_machine.read().await;
if let Some(captcha) = sm.counter.get_captcha(&get_visitor_count_req.id) {
let res = captcha.get_visitors();
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::GetVisitorCount(
dcache::OptionGetVisitorCountResponse {
result: Some(dcache::GetVisitorCountResponse { visitors: res }),
},
)),
}
} else {
OuterPipelineRes {
dcache_response: Some(InnerPipelineRes::GetVisitorCount(
dcache::OptionGetVisitorCountResponse { result: None },
)),
}
}
}
};
responses.push(res);
}
Ok(Response::new(dcache::DcacheBatchResponse { responses }))
}
async fn write(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.client_write(req).await;
Ok(Response::new(res.into()))
}
/// / Forward a request to other
async fn forward(
&self,
_request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
unimplemented!();
}
async fn append_entries(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.append_entries(req).await;
Ok(Response::new(res.into()))
}
async fn install_snapshot(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.install_snapshot(req).await;
Ok(Response::new(res.into()))
}
async fn vote(
&self,
request: tonic::Request<RaftRequest>,
) -> std::result::Result<tonic::Response<RaftReply>, tonic::Status> {
let req = request.into_inner();
let req = serde_json::from_str(&req.data).unwrap();
let res = self.app.raft.vote(req).await;
Ok(Response::new(res.into()))
}
}
impl<T, E> From<RaftReply> for Result<T, E>
where
T: DeserializeOwned,
E: DeserializeOwned,
{
fn from(msg: RaftReply) -> Self {
if !msg.data.is_empty() {
let resp: T = serde_json::from_str(&msg.data).expect("fail to deserialize");
Ok(resp)
} else {
let err: E = serde_json::from_str(&msg.error).expect("fail to deserialize");
Err(err)
}
}
}
impl<T, E> From<Result<T, E>> for RaftReply
where
T: Serialize,
E: Serialize,
{
fn from(r: Result<T, E>) -> Self {
match r {
Ok(x) => {
let data = serde_json::to_string(&x).expect("fail to serialize");
RaftReply {
data,
error: Default::default(),
}
}
Err(e) => {
let error = serde_json::to_string(&e).expect("fail to serialize");
RaftReply {
data: Default::default(),
error,
}
}
}
}
}
impl From<dcache::AddCaptchaRequest> for MasterMessages::AddSite {
fn from(value: dcache::AddCaptchaRequest) -> Self {
let req_mcaptcha = value.mcaptcha.unwrap();
let mut defense = req_mcaptcha.defense.unwrap();
let mut new_defense = defense::DefenseBuilder::default();
for level in defense.levels.drain(0..) {
new_defense
.add_level(
defense::LevelBuilder::default()
.difficulty_factor(level.difficulty_factor)
.unwrap()
.visitor_threshold(level.visitor_threshold)
.build()
.unwrap(),
)
.unwrap();
}
let defense = new_defense.build().unwrap();
let mcaptcha = mcaptcha::MCaptchaBuilder::default()
.defense(defense)
.duration(req_mcaptcha.duration)
.build()
.unwrap();
Self {
id: value.id,
mcaptcha,
}
}
}
impl From<libmcaptcha::master::AddVisitorResult> for dcache::AddVisitorResult {
fn from(value: libmcaptcha::master::AddVisitorResult) -> Self {
Self {
duration: value.duration,
difficulty_factor: value.difficulty_factor,
}
}
}
impl From<dcache::RenameCaptchaRequest> for MasterMessages::Rename {
fn from(value: dcache::RenameCaptchaRequest) -> Self {
Self {
name: value.name,
rename_to: value.rename_to,
}
}
}
impl From<dcache::CachePowRequest> for CacheMessages::CachePoW {
fn from(value: dcache::CachePowRequest) -> Self {
Self {
string: value.string,
difficulty_factor: value.difficulty_factor,
duration: value.duration,
key: value.key,
}
}
}
impl From<CacheMessages::CachePoW> for dcache::CachePowRequest {
fn from(value: CacheMessages::CachePoW) -> Self {
Self {
string: value.string,
difficulty_factor: value.difficulty_factor,
duration: value.duration,
key: value.key,
}
}
}
impl From<CacheMessages::CachedPoWConfig> for dcache::RetrievePowResponse {
fn from(value: CacheMessages::CachedPoWConfig) -> Self {
Self {
difficulty_factor: value.difficulty_factor,
duration: value.duration,
key: value.key,
}
}
}
impl From<dcache::CacheResultRequest> for CacheMessages::CacheResult {
fn from(value: dcache::CacheResultRequest) -> Self {
Self {
token: value.token,
key: value.key,
duration: value.duration,
}
}
}
impl From<dcache::RetrievePowRequest> for CacheMessages::VerifyCaptchaResult {
fn from(value: dcache::RetrievePowRequest) -> Self {
Self {
token: value.token,
key: value.key,
}
}
}

View file

@ -23,8 +23,6 @@ use std::ops::RangeBounds;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use actix::prelude::*;
use libmcaptcha::cache::messages::CachedPoWConfig;
use libmcaptcha::AddVisitorResult; use libmcaptcha::AddVisitorResult;
use libmcaptcha::MCaptcha; use libmcaptcha::MCaptcha;
use openraft::async_trait::async_trait; use openraft::async_trait::async_trait;
@ -49,15 +47,11 @@ use openraft::Vote;
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use url::quirks::set_pathname;
use crate::DcacheNodeId; use crate::DcacheNodeId;
use crate::DcacheTypeConfig; use crate::DcacheTypeConfig;
use actix::prelude::*; use libmcaptcha::cache::messages::{CachePoW, CacheResult, DeleteCaptchaResult, DeletePoW};
use libmcaptcha::cache::messages::{
CachePoW, CacheResult, DeleteCaptchaResult, DeletePoW, RetrivePoW, VerifyCaptchaResult,
};
use libmcaptcha::master::messages::{ use libmcaptcha::master::messages::{
AddSite as AddCaptcha, AddVisitor, GetInternalData, RemoveCaptcha, Rename as RenameCaptcha, AddSite as AddCaptcha, AddVisitor, GetInternalData, RemoveCaptcha, Rename as RenameCaptcha,
SetInternalData, SetInternalData,
@ -66,8 +60,7 @@ use libmcaptcha::{master::embedded::master::Master as EmbeddedMaster, system::Sy
pub mod system; pub mod system;
#[derive(Serialize, Deserialize, Clone, Debug, Message)] #[derive(Serialize, Deserialize, Clone, Debug)]
#[rtype(result = "DcacheResponse")]
pub enum DcacheRequest { pub enum DcacheRequest {
// master // master
AddVisitor(AddVisitor), AddVisitor(AddVisitor),
@ -81,8 +74,7 @@ pub enum DcacheRequest {
DeleteCaptchaResult(DeleteCaptchaResult), DeleteCaptchaResult(DeleteCaptchaResult),
} }
#[derive(Serialize, Deserialize, Message, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
#[rtype(result = "()")]
pub enum DcacheResponse { pub enum DcacheResponse {
AddVisitorResult(Option<AddVisitorResult>), AddVisitorResult(Option<AddVisitorResult>),
Empty, // AddCaptcha, RenameCaptcha, RemoveCaptcha, Cachepow, CacheResult, Empty, // AddCaptcha, RenameCaptcha, RemoveCaptcha, Cachepow, CacheResult,
@ -102,7 +94,9 @@ pub struct DcacheStateMachine {
pub last_membership: StoredMembership<DcacheNodeId, BasicNode>, pub last_membership: StoredMembership<DcacheNodeId, BasicNode>,
/// Application data. /// Application data.
pub data: Arc<System<HashCache, EmbeddedMaster>>, // pub data: Arc<System<HashCache, EmbeddedMaster>>,
pub counter: crate::mcaptcha::mcaptcha::Manager,
pub results: crate::mcaptcha::cache::HashCache,
} }
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
@ -111,42 +105,34 @@ struct PersistableStateMachine {
last_membership: StoredMembership<DcacheNodeId, BasicNode>, last_membership: StoredMembership<DcacheNodeId, BasicNode>,
/// Application data. counter: crate::mcaptcha::mcaptcha::Manager,
data: HashMap<String, MCaptcha>, results: crate::mcaptcha::cache::HashCache,
} }
impl PersistableStateMachine { impl PersistableStateMachine {
async fn from_statemachine(m: &DcacheStateMachine) -> Self { async fn from_statemachine(m: &DcacheStateMachine) -> Self {
let internal_data = m let counter = m.counter.clone();
.data let results = m.results.clone();
.master
.send(GetInternalData)
.await
.unwrap()
.await
.unwrap()
.unwrap();
Self { Self {
last_applied_log: m.last_applied_log.clone(), last_applied_log: m.last_applied_log,
last_membership: m.last_membership.clone(), last_membership: m.last_membership.clone(),
data: internal_data, counter,
results,
} }
} }
async fn to_statemachine( async fn to_statemachine(
self, self,
data: Arc<System<HashCache, EmbeddedMaster>>, counter: crate::mcaptcha::mcaptcha::Manager,
results: crate::mcaptcha::cache::HashCache,
) -> DcacheStateMachine { ) -> DcacheStateMachine {
data.master self.counter.clean_all_after_cold_start(counter).await;
.send(SetInternalData { self.results.clean_all_after_cold_start(results).await;
mcaptcha: self.data,
})
.await
.unwrap();
DcacheStateMachine { DcacheStateMachine {
last_applied_log: self.last_applied_log, last_applied_log: self.last_applied_log,
last_membership: self.last_membership, last_membership: self.last_membership,
data, results: self.results,
counter: self.counter,
} }
} }
} }
@ -173,7 +159,8 @@ impl DcacheStore {
let state_machine = RwLock::new(DcacheStateMachine { let state_machine = RwLock::new(DcacheStateMachine {
last_applied_log: Default::default(), last_applied_log: Default::default(),
last_membership: Default::default(), last_membership: Default::default(),
data: system::init_system(salt), counter: crate::mcaptcha::mcaptcha::Manager::new(30),
results: crate::mcaptcha::cache::HashCache::default(),
}); });
Self { Self {
@ -399,83 +386,42 @@ impl RaftStorage<DcacheTypeConfig> for Arc<DcacheStore> {
EntryPayload::Blank => res.push(DcacheResponse::Empty), EntryPayload::Blank => res.push(DcacheResponse::Empty),
EntryPayload::Normal(ref req) => match req { EntryPayload::Normal(ref req) => match req {
DcacheRequest::AddVisitor(msg) => { DcacheRequest::AddVisitor(msg) => {
let r = sm let r = sm.counter.add_visitor(msg);
.data
.master
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::AddVisitorResult(r)); res.push(DcacheResponse::AddVisitorResult(r));
} }
DcacheRequest::AddCaptcha(msg) => { DcacheRequest::AddCaptcha(msg) => {
sm.data sm.counter
.master .add_captcha(Arc::new((&msg.mcaptcha).into()), msg.id.clone());
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty); res.push(DcacheResponse::Empty);
} }
DcacheRequest::RenameCaptcha(msg) => { DcacheRequest::RenameCaptcha(msg) => {
sm.data sm.counter.rename(&msg.name, msg.rename_to.clone());
.master
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty); res.push(DcacheResponse::Empty);
} }
DcacheRequest::RemoveCaptcha(msg) => { DcacheRequest::RemoveCaptcha(msg) => {
sm.data sm.counter.rm_captcha(&msg.0);
.master
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty); res.push(DcacheResponse::Empty);
} }
// cache // cache
DcacheRequest::CachePoW(msg) => { DcacheRequest::CachePoW(msg) => {
sm.data sm.results.cache_pow(msg.clone());
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty); res.push(DcacheResponse::Empty);
} }
DcacheRequest::DeletePoW(msg) => { DcacheRequest::DeletePoW(msg) => {
sm.data.cache.send(msg.clone()).await.unwrap().unwrap(); sm.results.remove_pow_config(&msg.0);
// sm.data.cache.send(msg.clone()).await.unwrap().unwrap();
res.push(DcacheResponse::Empty); res.push(DcacheResponse::Empty);
} }
DcacheRequest::CacheResult(msg) => { DcacheRequest::CacheResult(msg) => {
sm.data sm.results.cache_result(msg.clone());
.cache
.send(msg.clone())
.await
.unwrap()
.await
.unwrap()
.unwrap();
res.push(DcacheResponse::Empty); res.push(DcacheResponse::Empty);
} }
DcacheRequest::DeleteCaptchaResult(msg) => { DcacheRequest::DeleteCaptchaResult(msg) => {
sm.data.cache.send(msg.clone()).await.unwrap().unwrap(); sm.results.remove_cache_result(&msg.token);
res.push(DcacheResponse::Empty); res.push(DcacheResponse::Empty);
} }
}, },
@ -522,7 +468,7 @@ impl RaftStorage<DcacheTypeConfig> for Arc<DcacheStore> {
})?; })?;
let mut state_machine = self.state_machine.write().await; let mut state_machine = self.state_machine.write().await;
let updated_state_machine = updated_state_machine let updated_state_machine = updated_state_machine
.to_statemachine(state_machine.data.clone()) .to_statemachine(state_machine.counter.clone(), state_machine.results.clone())
.await; .await;
*state_machine = updated_state_machine; *state_machine = updated_state_machine;
} }
@ -557,3 +503,19 @@ impl RaftStorage<DcacheTypeConfig> for Arc<DcacheStore> {
self.clone() self.clone()
} }
} }
#[cfg(test)]
mod tests {
use super::*;
async fn provision_dcache_store() -> Arc<DcacheStore> {
Arc::new(DcacheStore::new(
"adsfasdfasdfadsfadfadfadfadsfasdfasdfasdfasdf".into(),
))
}
#[test]
fn test_dcache_store() {
openraft::testing::Suite::test_all(provision_dcache_store).unwrap();
}
}

Some files were not shown because too many files have changed in this diff Show more