Compare commits

..

122 commits

Author SHA1 Message Date
23a87480b7 Merge pull request 'fix&feat: ordering: Aggregate creator provides ID' (#114) from ordering-fix-aggregate-id into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #114
2024-09-24 19:10:15 +05:30
88f4dd6825
feat: ordering: Order view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-24 17:57:41 +05:30
eba2f7204c
feat: ordering: Order ID is provided by caller 2024-09-24 17:26:28 +05:30
7c3676e84d
feat: ordering: Product ID is provided by caller & Product view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-24 16:07:21 +05:30
5f466fce80
feat: ordering: Category ID is provided by caller & Category view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-24 15:59:50 +05:30
c5d5dcb323
feat: ordering: Kot ID is provided by caller & Kot view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-24 15:44:48 +05:30
df64f434be
feat: ordering: slineitem ID is provided by caller & lineitemview tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-24 12:39:58 +05:30
5a050fde0e
feat: ordering: customization ID is provided by caller & customization view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-24 12:31:39 +05:30
d265412d06
feat: ordering: store ID is provided by caller & store view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-24 12:20:37 +05:30
272e8f68c6 Merge pull request 'feat: billing: aggregate IDs are provided by caller & test View impl' (#113) from billing-fix-aggregate-id into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #113
2024-09-21 20:04:39 +05:30
4a51a3d629
feat: billing: aggregate IDs are provided by caller & test View impl
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-21 19:55:11 +05:30
503920fc5f Merge pull request 'fix: inventory: aggregate IDs are provided by the caller' (#112) from inventory-fix-aggregate-id into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #112
2024-09-21 16:45:43 +05:30
ca7defe724
feat: inventory: product view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-21 16:21:28 +05:30
88446b94c4
feat: inventory: product ID is provided by caller 2024-09-21 16:21:19 +05:30
73d829f3f8
feat: inventory: category view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-21 15:34:58 +05:30
d727d0b5b0
feat: inventory: category ID is provided by caller 2024-09-21 15:34:14 +05:30
5245cf02e0
feat: inventory: customization view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-20 18:09:20 +05:30
ac6da029a5
feat: inventory: customization ID is provided by caller 2024-09-20 18:09:08 +05:30
2853bc5a75
feat: store view tests
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-20 17:23:46 +05:30
4580aea18b
feat&fix: caller provides aggregate ID 2024-09-20 17:22:11 +05:30
e14c42023a
debug: print db url in tests 2024-09-20 17:20:54 +05:30
7d66d9d5b8 Merge pull request 'feat: apply events to billing Views' (#111) from billing-view into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #111
2024-09-18 18:34:33 +05:30
3746eb211f Merge pull request 'feat: apply events to ordering Views' (#110) from ordering-view into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #110
2024-09-18 18:34:22 +05:30
9bd2849695
feat: apply events to billing Views
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-18 18:05:16 +05:30
2017ca5e97
feat: apply events to ordering Views
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-18 18:04:36 +05:30
28e3da0bbc Merge pull request 'feat: apply events to inventory Views' (#109) from inventory-view into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #109
2024-09-18 17:53:03 +05:30
d09581d9bc
feat: apply events to inventory Views
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-18 17:40:24 +05:30
1660da90a7 Merge pull request 'feat: compute total price for bill' (#108) from billing into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #108
2024-09-18 17:27:20 +05:30
dfc080f26a
feat: is_dividable compilation error and print unsupported quantity type
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-18 16:30:54 +05:30
d4acccde9d
fix: from_minor compilation error 2024-09-18 16:30:41 +05:30
39edaead04
feat: impl get_line_items_for_bill_id DB port 2024-09-18 16:30:14 +05:30
f0da898e62
feat: compute total price for bill 2024-09-18 16:03:43 +05:30
384dae69f5
feat: impl Add for Price 2024-09-18 14:05:04 +05:30
364763fc35 Merge pull request 'feat: import store aggregate from inventory into billing domain' (#107) from billing into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #107
2024-09-17 16:22:23 +05:30
dbbbb86a8c
feat: CRUD bill aggregate
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-17 16:13:43 +05:30
1c64b62d5b
feat: import line_item aggregate from inventory into billing domain
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
2024-09-17 14:20:15 +05:30
5e48eed486
feat: import store aggregate from inventory into billing domain
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
2024-09-16 17:58:28 +05:30
6b6084a0e9 Merge pull request 'feat: convert price from major to minor and minor to major' (#106) from convert-price into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #106
2024-09-16 16:22:30 +05:30
24078a201e Merge pull request 'feat: convert quantity to minor' (#105) from convert-quantity-to-minor into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #105
2024-09-16 16:22:25 +05:30
d62beabb62
feat: convert price from major to minor and minor to major
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-16 15:43:29 +05:30
3a6b200773
feat: convert quantity to minor
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-16 15:38:41 +05:30
83f0726bae Merge pull request 'feat: import inventory services&domain obj to implement pantry' (#104) from pantry-aggregate into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #104
2024-09-16 15:09:37 +05:30
cddba71eb5
feat: import inventory services&domain obj to implement pantry
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-16 14:55:48 +05:30
891f570afe Merge pull request 'fix(deps): update rust crate cqrs-es to v0.4.12' (#103) from renovate/cqrs-es-0.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #103
2024-09-16 14:08:58 +05:30
cb778aa89e
chore: mv Currenct,Price to utlis 2024-09-14 16:09:45 +05:30
90212fb53a
chore: mv Currenct,Price to utlis
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-09-14 16:05:57 +05:30
Renovate Bot
a37d521be9 fix(deps): update rust crate cqrs-es to v0.4.12
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-13 16:39:11 +00:00
b8f7f6cda5 Merge pull request 'fix(deps): update rust crate actix-identity to 0.8.0' (#102) from renovate/actix-identity-0.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #102
2024-09-13 21:06:30 +05:30
4c813e8e97 Merge pull request 'fix(deps): update rust crate lettre to v0.11.9' (#101) from renovate/lettre-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #101
2024-09-13 21:06:24 +05:30
86f354b928 Merge pull request 'fix(deps): update rust crate derive-getters to 0.5.0' (#100) from renovate/derive-getters-0.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #100
2024-09-13 21:06:08 +05:30
Renovate Bot
e0c6e2c9b1 fix(deps): update rust crate actix-identity to 0.8.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-13 14:08:20 +00:00
Renovate Bot
c6d50d61da fix(deps): update rust crate lettre to v0.11.9
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-13 14:08:09 +00:00
Renovate Bot
36ba3fedbb fix(deps): update rust crate derive-getters to 0.5.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-13 12:46:05 +00:00
750849b287 Merge pull request 'feat&fix: replace maildev with mailpit and use util lib to interact with its HTTP API' (#99) from maildev-client into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #99
2024-09-13 17:38:56 +05:30
283322cf29
s/maildev/mailpit/
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-13 17:30:23 +05:30
a25a7ce39f
feat&fix: use mailpit 2024-09-13 17:18:21 +05:30
cbe03ebaaa
feat: use util lib to interact with maildev 2024-09-13 17:16:38 +05:30
baedcb7719 Merge pull request 'feat: update devenv' (#98) from chore-devenv into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #98
2024-09-13 16:06:04 +05:30
f8266efbb6
feat: update devenv
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed
ci/woodpecker/pull_request_closed/woodpecker Pipeline failed
2024-09-13 16:03:43 +05:30
3d1de90b78 Merge pull request 'fix(deps): update rust crate actix-session to v0.10.1' (#97) from renovate/actix-session-0.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #97
2024-09-13 14:43:04 +05:30
f53e40259c Merge pull request 'fix(deps): update rust crate lettre to v0.11.8' (#95) from renovate/lettre-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #95
2024-09-13 14:42:27 +05:30
Renovate Bot
792d04bf31 fix(deps): update rust crate actix-session to v0.10.1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-12 19:45:44 +00:00
Renovate Bot
2d484d4dae fix(deps): update rust crate lettre to v0.11.8
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-12 16:46:27 +00:00
41e32a9504 Merge pull request 'fix(deps): update rust crate tracing-actix-web to v0.7.12' (#96) from renovate/tracing-actix-web-0.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #96
2024-09-12 22:12:51 +05:30
cc33266054 Merge pull request 'fix(deps): update rust crate serde to v1.0.210' (#92) from renovate/serde-monorepo into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #92
2024-09-12 22:12:15 +05:30
b2fa4dfba3 Merge pull request 'fix(deps): update rust crate serde_json to v1.0.128' (#91) from renovate/serde_json-1.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #91
2024-09-12 22:11:02 +05:30
eb92cea381 Merge pull request 'fix(deps): update rust crate derive_builder to v0.20.1' (#94) from renovate/derive_builder-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #94
2024-09-12 22:10:55 +05:30
426ac5bbd5 Merge pull request 'fix(deps): update rust crate async-trait to v0.1.82' (#93) from renovate/async-trait-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #93
2024-09-12 22:09:49 +05:30
6dbec7c5e6 Merge pull request 'chore(deps): update rust crate reqwest to v0.12.7' (#90) from renovate/reqwest-0.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: #90
2024-09-12 22:09:45 +05:30
Renovate Bot
1c36796398 fix(deps): update rust crate tracing-actix-web to v0.7.12
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-11 14:16:29 +00:00
Renovate Bot
7e4e0cbc00 fix(deps): update rust crate serde_json to v1.0.128
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-11 14:16:25 +00:00
Renovate Bot
e1ef7bccb1 fix(deps): update rust crate serde to v1.0.210
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-11 14:16:18 +00:00
Renovate Bot
3dad7c1707 fix(deps): update rust crate derive_builder to v0.20.1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-11 14:16:05 +00:00
Renovate Bot
006f473e91 fix(deps): update rust crate async-trait to v0.1.82
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-11 14:15:55 +00:00
Renovate Bot
3ab91c5ae0 chore(deps): update rust crate reqwest to v0.12.7
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-09-11 14:15:50 +00:00
22da1e0015 Merge pull request 'fix(deps): update rust crate serde to v1.0.209' (#88) from renovate/serde-monorepo into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #88
2024-08-29 10:45:16 +05:30
2d5cf2d900 Merge pull request 'chore(deps): update getmeili/meilisearch docker tag to v1.10' (#89) from renovate/getmeili-meilisearch-1.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #89
2024-08-28 21:40:20 +05:30
969d9ff239 Merge pull request 'fix(deps): update rust crate serde_json to v1.0.127' (#87) from renovate/serde_json-1.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #87
2024-08-28 13:43:28 +05:30
ec8a97aecf Merge pull request 'chore(deps): update rust crate reqwest to v0.12.7' (#86) from renovate/reqwest-0.x-lockfile into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #86
2024-08-28 13:40:59 +05:30
Renovate Bot
63a59a1491 chore(deps): update getmeili/meilisearch docker tag to v1.10
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-26 08:06:45 +00:00
Renovate Bot
cd35c9df5d fix(deps): update rust crate serde to v1.0.209
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-08-24 04:06:44 +00:00
Renovate Bot
c7148b4490 fix(deps): update rust crate serde_json to v1.0.126
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-08-23 20:06:08 +00:00
Renovate Bot
bb028c3896 chore(deps): update rust crate reqwest to v0.12.6
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-08-19 16:37:21 +00:00
c15437257d Merge pull request 'fix(deps): update rust crate serde to v1.0.208' (#85) from renovate/serde-monorepo into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/85
2024-08-19 13:53:18 +05:30
945e8dabf2 Merge pull request 'fix(deps): update rust crate serde_json to v1.0.125' (#84) from renovate/serde_json-1.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/84
2024-08-19 13:52:48 +05:30
4a9a10acb3 Merge pull request 'chore(deps): update postgres docker tag to v16.4' (#81) from renovate/postgres-16.x into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/81
2024-08-19 13:49:37 +05:30
cd856e8da7 Merge pull request 'fix(deps): update rust crate derive-getters to 0.5.0' (#78) from renovate/derive-getters-0.x into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/78
2024-08-19 13:49:28 +05:30
956b3eca97 Merge pull request 'fix(deps): update rust crate actix-session to 0.10.0' (#77) from renovate/actix-session-0.x into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/77
2024-08-19 13:49:08 +05:30
Renovate Bot
dcd4396863 fix(deps): update rust crate serde to v1.0.208
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-15 16:07:05 +00:00
Renovate Bot
061418f83e fix(deps): update rust crate serde_json to v1.0.125
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-15 06:07:06 +00:00
27f7a97a36 Merge pull request 'fix(deps): update rust crate serde to v1.0.207' (#80) from renovate/serde-monorepo into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/80
2024-08-13 15:34:44 +05:30
759380b49d Merge pull request 'fix(deps): update rust crate actix-web to v4.9.0' (#82) from renovate/actix-web-4.x-lockfile into master
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/82
2024-08-13 15:31:10 +05:30
cfb1b1d43e Merge pull request 'fix(deps): update rust crate serde_json to v1.0.124' (#83) from renovate/serde_json-1.x-lockfile into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/83
2024-08-13 15:28:11 +05:30
Renovate Bot
ee3db57e17 fix(deps): update rust crate serde to v1.0.207
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-12 20:37:38 +00:00
Renovate Bot
68d7821b7f fix(deps): update rust crate serde_json to v1.0.124
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-11 21:36:17 +00:00
Renovate Bot
8f57349b9d fix(deps): update rust crate actix-web to v4.9.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-10 02:36:52 +00:00
Renovate Bot
8bfeaa14c1 chore(deps): update postgres docker tag to v16.4
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-08 20:35:39 +00:00
Renovate Bot
867620e87e fix(deps): update rust crate derive-getters to 0.5.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-07 15:35:34 +00:00
Renovate Bot
b45a79e7ea fix(deps): update rust crate actix-session to 0.10.0
Some checks failed
renovate/artifacts Artifact file update failure
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-07 00:39:14 +00:00
6dcaac9843 Merge pull request 'fix(deps): update rust crate meilisearch-sdk to v0.27.1' (#76) from renovate/meilisearch-sdk-0.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/76
2024-08-06 16:36:42 +05:30
Renovate Bot
cadd7ac9a4 fix(deps): update rust crate meilisearch-sdk to v0.27.1
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-05 13:09:02 +00:00
c4b67bd4a2 Merge pull request 'fix(deps): update rust crate serde_json to v1.0.122' (#75) from renovate/serde_json-1.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: https://git.batsense.net///libre-solutions/vanikam/pulls/75
2024-08-02 11:48:56 +05:30
Renovate Bot
269570baaf fix(deps): update rust crate serde_json to v1.0.122
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-08-01 21:37:42 +00:00
a986855759 Merge pull request 'fix(deps): update rust crate serde_json to v1.0.121' (#74) from renovate/serde_json-1.x-lockfile into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #74
2024-07-29 16:10:24 +05:30
Renovate Bot
9c5407c2f4 fix(deps): update rust crate serde_json to v1.0.121
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-28 21:35:30 +00:00
520036734d Merge pull request 'fix: link LineItem to Kot' (#73) from link-line-item-to-kot into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #73
2024-07-23 21:50:22 +05:30
e592eec19c
fix: link LineItem to Kot
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-23 21:25:46 +05:30
989e444356 Merge pull request 'feat: delete kot service' (#72) from delete-kot-service into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #72
2024-07-23 21:25:04 +05:30
938e032114 Merge pull request 'feat: update kot service' (#71) from kot-update-service into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #71
2024-07-23 21:10:15 +05:30
54471351c6 Merge pull request 'fix: use created time from cmd' (#70) from fix-add-kot-created-time into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #70
2024-07-23 21:10:05 +05:30
d2020a14de
feat: delete kot service
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-23 20:59:55 +05:30
ec820b4d33
feat: update kot service
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-23 20:51:44 +05:30
df7ec8cfd4
fix: use created time from cmd
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-23 20:51:11 +05:30
366c7dc639 Merge pull request 'feat: kot add service' (#69) from kot-add-service into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #69
2024-07-23 20:48:48 +05:30
6115a9adde
feat: kot add service
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-23 20:34:41 +05:30
ddd0716380
feat: impl View for Kot 2024-07-23 20:34:27 +05:30
ed66a074ed
feat: define Kot aggregate 2024-07-23 20:34:04 +05:30
13414d34fc
feat: db port to check duplicate Kot ID 2024-07-23 20:33:52 +05:30
6e0364c1ee
feat: add Kot query table 2024-07-23 20:32:54 +05:30
f7bf30dfa9 Merge pull request 'fix(deps): update rust crate mockall to 0.13.0' (#61) from renovate/mockall-0.x into master
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #61
2024-07-23 20:05:55 +05:30
5fe29e0207 Merge pull request 'feat: delete order service' (#68) from delete-order-service into master
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
Reviewed-on: #68
2024-07-23 20:05:48 +05:30
Renovate Bot
44b51123c2 fix(deps): update rust crate mockall to 0.13.0
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-23 13:05:32 +00:00
271 changed files with 18879 additions and 1308 deletions

View file

@ -4,6 +4,6 @@ export POSTGRES_DATABASE_URL="postgres://postgres:password@localhost:5432/postgr
export VANIKAM_email_USERNAME=admin export VANIKAM_email_USERNAME=admin
export VANIKAM_email_PASSWORD=password export VANIKAM_email_PASSWORD=password
export VANIKAM_email_SERVER_HOSTNAME=localhost:10025 export VANIKAM_email_SERVER_HOSTNAME=localhost:10025
export MAILDEV_URL=http://localhost:1080 export MAILPIT_URL=http://localhost:1080
export VANIKAM_email_FROM="Vanikam Info <info@vanikam.app>" export VANIKAM_email_FROM="Vanikam Info <info@vanikam.app>"
export VANIKAM_email_REPLY_TO="Vanikam Support <support@vanikam.app>" export VANIKAM_email_REPLY_TO="Vanikam Support <support@vanikam.app>"

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_billing_store_query\n WHERE\n name = $1\n AND\n deleted = false\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
null
]
},
"hash": "03b31d653e96f3b28ff15c86ff5801fe319ba86a15c09aacc4990a7a58849081"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n store_id, version\n FROM\n cqrs_billing_store_query\n WHERE\n store_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "0bf69e4af7657572bf84633ecf8e2649a7b5baa3fc3abbb6ee7f522fedf3062e"
}

View file

@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_category_query\n WHERE\n name = $1\n AND\n store_id = $2\n AND\n deleted = false\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Text",
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "0d2d5392e3cb1d5f7b164e93ccd768dc19975d19c93adb222ee8f8666d28cef2"
}

View file

@ -0,0 +1,94 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n product_name,\n product_id,\n line_item_id,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n created_time,\n bill_id,\n price_per_unit_minor,\n price_per_unit_major,\n price_per_unit_currency,\n deleted\n FROM\n cqrs_billing_line_item_query\n WHERE\n line_item_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "product_name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "product_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "line_item_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "quantity_minor_unit",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "quantity_minor_number",
"type_info": "Int4"
},
{
"ordinal": 5,
"name": "quantity_major_unit",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "quantity_major_number",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "created_time",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "bill_id",
"type_info": "Uuid"
},
{
"ordinal": 9,
"name": "price_per_unit_minor",
"type_info": "Int4"
},
{
"ordinal": 10,
"name": "price_per_unit_major",
"type_info": "Int4"
},
{
"ordinal": 11,
"name": "price_per_unit_currency",
"type_info": "Text"
},
{
"ordinal": 12,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "0d6ba3039f3419dd34a609ae207ac7ca9cf88099aa273972271040852794caa5"
}

View file

@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_billing_bill_next_token_id\n (token_number, store_id)\n VALUES\n ($1, $2);",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int4",
"Uuid"
]
},
"nullable": []
},
"hash": "0e4316d20c96a7fcfcb68a98865de8e3c0e33e22f2d2cd67fdd671b1f94ab8ea"
}

View file

@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_ordering_kot_query (\n version,\n order_id,\n kot_id,\n created_time,\n deleted\n ) VALUES (\n $1, $2, $3, $4, $5\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Uuid",
"Uuid",
"Timestamptz",
"Bool"
]
},
"nullable": []
},
"hash": "1804fe2c946337fb2d10683b273bc00c8291b5ca0624f6a8fcb1d701d6b383c5"
}

View file

@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_ordering_category_query\n SET\n version = $1,\n name = $2,\n description = $3,\n store_id = $4,\n deleted = $5;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "289e6d9fe105ae9e91d947c22d0a5979786aac15b0c78359b3f634b74c4686d4"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_billing_bill_next_token_id\n WHERE\n store_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "2af9418499c040df68031e79f7bcbdaa293f1d363f06f5926adfd038783171c4"
}

View file

@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_product_customizations_query\n WHERE\n name = $1\n AND\n product_id = $2\n AND\n deleted = false\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Text",
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "2f5ec6062904e7124f56a237e80922537b577f2e7aeafd0708578bf62f8423db"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_ordering_line_item_query (\n version,\n product_name,\n product_id,\n line_item_id,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n sale_time,\n deleted\n ) VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9, $10\n );", "query": "INSERT INTO cqrs_ordering_line_item_query (\n version,\n product_name,\n product_id,\n line_item_id,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n created_time,\n kot_id,\n deleted\n ) VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11\n );",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@ -14,10 +14,11 @@
"Text", "Text",
"Int4", "Int4",
"Timestamptz", "Timestamptz",
"Uuid",
"Bool" "Bool"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "88f519e645dba9eb091d18e8f5c0db127ac0d070fb81e5b157a68638c3688996" "hash": "34ba93d669697b7080df26bb23e7d27dcb4352041b04fc9d21e7332f0b8b32be"
} }

View file

@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_ordering_store_query (\n version, name, address, store_id, owner, deleted\n ) VALUES (\n $1, $2, $3, $4, $5, $6\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Uuid",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "37334f91e68e9d95bc9675d98be833c7763ff3e1eb368456fd41e1208863b9b2"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "SELECT \n product_name,\n product_id,\n line_item_id,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n sale_time,\n deleted\n FROM\n cqrs_ordering_line_item_query\n WHERE\n line_item_id = $1;", "query": "SELECT \n product_name,\n product_id,\n line_item_id,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n created_time,\n kot_id,\n deleted\n FROM\n cqrs_ordering_line_item_query\n WHERE\n line_item_id = $1;",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@ -40,11 +40,16 @@
}, },
{ {
"ordinal": 7, "ordinal": 7,
"name": "sale_time", "name": "created_time",
"type_info": "Timestamptz" "type_info": "Timestamptz"
}, },
{ {
"ordinal": 8, "ordinal": 8,
"name": "kot_id",
"type_info": "Uuid"
},
{
"ordinal": 9,
"name": "deleted", "name": "deleted",
"type_info": "Bool" "type_info": "Bool"
} }
@ -63,8 +68,9 @@
false, false,
false, false,
false, false,
false,
false false
] ]
}, },
"hash": "2d5ff8c368757fc2d390aaaed23d18fe6888ace9e7e2b9d12f02d2c50d80e685" "hash": "3b9849078c846bc254aca3d56efd44b9a919e77bdfa9074b6c1b335380a0ce6c"
} }

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_ordering_product_query (\n version,\n name,\n description,\n image,\n product_id,\n category_id,\n price_major,\n price_minor,\n price_currency,\n sku_able,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n\n deleted\n ) VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Text",
"Uuid",
"Uuid",
"Int4",
"Int4",
"Text",
"Bool",
"Text",
"Int4",
"Text",
"Int4",
"Bool"
]
},
"nullable": []
},
"hash": "3d45cb28b61bac689d137c7415fcd620552e1390a443f4c8907b344a8526ff07"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_store_query\n WHERE\n store_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "4abd37d29572915fb167833c0496c4bd40ee98d9d72f7a5e7fc8338d73468fe5"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "SELECT \n customer_name,\n order_id,\n created_time,\n deleted\n FROM\n cqrs_ordering_order_query\n WHERE\n order_id = $1;", "query": "SELECT \n customer_name,\n order_id,\n created_time,\n store_id,\n deleted\n FROM\n cqrs_ordering_order_query\n WHERE\n order_id = $1;",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@ -20,6 +20,11 @@
}, },
{ {
"ordinal": 3, "ordinal": 3,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "deleted", "name": "deleted",
"type_info": "Bool" "type_info": "Bool"
} }
@ -30,11 +35,12 @@
] ]
}, },
"nullable": [ "nullable": [
true,
false, false,
false, false,
false, false,
false false
] ]
}, },
"hash": "dbe1e41f04a81b2a504b9179911201ec52340d09d45041addd54eb349af82488" "hash": "4f8a2294b40e4285fa6d4c7bcf648be7320f99002b9a9279981053a5062b0ed8"
} }

View file

@ -0,0 +1,94 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n product_name,\n product_id,\n line_item_id,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n created_time,\n bill_id,\n price_per_unit_minor,\n price_per_unit_major,\n price_per_unit_currency,\n deleted\n FROM cqrs_billing_line_item_query\n WHERE\n bill_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "product_name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "product_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "line_item_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "quantity_minor_unit",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "quantity_minor_number",
"type_info": "Int4"
},
{
"ordinal": 5,
"name": "quantity_major_unit",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "quantity_major_number",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "created_time",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "bill_id",
"type_info": "Uuid"
},
{
"ordinal": 9,
"name": "price_per_unit_minor",
"type_info": "Int4"
},
{
"ordinal": 10,
"name": "price_per_unit_major",
"type_info": "Int4"
},
{
"ordinal": 11,
"name": "price_per_unit_currency",
"type_info": "Text"
},
{
"ordinal": 12,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "538d43c832702b03da4a51e0b0794785adfb14b4b8ff0ed7c4a7079e711b8ce7"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n store_id, version\n FROM\n cqrs_ordering_store_query\n WHERE\n store_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "5568e35d9a0b7acb1c0b1f65015e274c0b064a370dda1c436c0e598ffd6ba599"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_product_query\n WHERE\n product_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "5ca085f34eea52fb8c78558149a60a8476780635aff1065d57097332a2dd8fd0"
}

View file

@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n name, description, category_id, store_id, deleted\n FROM\n cqrs_ordering_category_query\n WHERE\n category_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "description",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "category_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true,
false,
false,
false
]
},
"hash": "5d3972a89f5d64e0c9cbe3a086401889c7f2c4cc7eda9e6a7c0f501c103ab9ac"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n kot_id, version\n FROM\n cqrs_ordering_kot_query\n WHERE\n kot_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "kot_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "5db3c3d1ff503d25d3f8fc529a95fac6c716e1f575eb59d7a3a149d06760741b"
}

View file

@ -0,0 +1,27 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_billing_line_item_query (\n version,\n product_name,\n product_id,\n line_item_id,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n created_time,\n bill_id,\n price_per_unit_minor,\n price_per_unit_major,\n price_per_unit_currency,\n deleted\n ) VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Uuid",
"Uuid",
"Text",
"Int4",
"Text",
"Int4",
"Timestamptz",
"Uuid",
"Int4",
"Int4",
"Text",
"Bool"
]
},
"nullable": []
},
"hash": "6ac0c3c64749d1eb4a93f8727b56d6a2c9e9b1e49258b38ed1dbebf2f601e99e"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_billing_bill_query (\n version,\n created_time,\n store_id,\n bill_id,\n token_number,\n total_price_major,\n total_price_minor,\n total_price_currency,\n deleted\n\n ) VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Timestamptz",
"Uuid",
"Uuid",
"Int4",
"Int4",
"Int4",
"Text",
"Bool"
]
},
"nullable": []
},
"hash": "6d9b6f386f4425d45d6aa8f7c2331c44587f54c3583849d07d81e87e1f773c2b"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n customization_id, version\n FROM\n cqrs_ordering_product_customizations_query\n WHERE\n customization_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "customization_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "6f06c0f4b71f0458229dad370d46da62e933651ffbe39ee27e5e3dd078d86e05"
}

View file

@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_billing_store_query (\n version, name, address, store_id, owner, deleted\n ) VALUES (\n $1, $2, $3, $4, $5, $6\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Uuid",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "78008f1b0d3f366937c72a181b19217cd9f28f1ebe73ddc081ec2f7bec5dc1b3"
}

View file

@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_ordering_category_query (\n version, name, description, category_id, store_id, deleted\n ) VALUES (\n $1, $2, $3, $4, $5, $6\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Uuid",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "7cb52847f00e985c9475485de63c6671f35d27bde39be34677a6b74228bf3e97"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_ordering_order_query (\n version,\n customer_name,\n order_id,\n created_time,\n deleted\n\n ) VALUES (\n $1, $2, $3, $4, $5\n );", "query": "INSERT INTO cqrs_ordering_order_query (\n version,\n customer_name,\n order_id,\n created_time,\n store_id,\n deleted\n\n ) VALUES (\n $1, $2, $3, $4, $5, $6\n );",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@ -9,10 +9,11 @@
"Text", "Text",
"Uuid", "Uuid",
"Timestamptz", "Timestamptz",
"Uuid",
"Bool" "Bool"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "a342003149f2991ef0280d008f18e664c96299edfd9a11d08487e4db10a10e8b" "hash": "7e2e91418e136fc5091b27c5f61158cca61e4853bee64e5afc44679e685cd744"
} }

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_store_query\n WHERE\n name = $1\n AND\n deleted = false\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
null
]
},
"hash": "81de3abeb5dcbe7a87e20bab82dc6a258b3017a710f8c3e249d73a5d980e2115"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n bill_id, version\n FROM\n cqrs_billing_bill_query\n WHERE\n bill_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "bill_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "835a3afa5e16c762d021c4ca889b8c9ce62cf39e4a516511f436cba04520fddd"
}

View file

@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM cqrs_billing_bill_next_token_id WHERE store_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "859920462d8e008b34bfc7467140d382b164f80a255dfe805abcd12cd43e17f3"
}

View file

@ -1,18 +1,18 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_ordering_order_query\n SET\n version = $1,\n customer_name = $2,\n order_id = $3,\n created_time = $4,\n deleted = $5;", "query": "UPDATE\n cqrs_ordering_order_query\n SET\n version = $1,\n customer_name = $2,\n created_time = $3,\n store_id= $4,\n deleted = $5;",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
"Left": [ "Left": [
"Int8", "Int8",
"Text", "Text",
"Uuid",
"Timestamptz", "Timestamptz",
"Uuid",
"Bool" "Bool"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "bc7d17aab113d0519c53e5f612116a6e72bb0007a298cdba17f45f4b8bed5f56" "hash": "876ca2c177175439a1604a2d5aeec785e41473116ceda238a8089bcb33588e3d"
} }

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_product_customizations_query\n WHERE\n customization_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "89387ed4e97c8c957e576b2753533112bf6ea3eb662460776fcaa625046b7a0d"
}

View file

@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_ordering_store_query\n SET\n version = $1,\n name = $2,\n address = $3,\n owner = $4,\n deleted = $5;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "8a7958c4f8419e1fd95b2d0c75a3bab76f5962f37e58c7dee4e5f9341dca8c0e"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_inventory_store_query\n SET\n version = $1,\n name = $2,\n address = $3,\n store_id = $4,\n owner = $5,\n deleted = $6;", "query": "UPDATE\n cqrs_inventory_store_query\n SET\n version = $1,\n name = $2,\n address = $3,\n owner = $4,\n deleted = $5;",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@ -9,11 +9,10 @@
"Text", "Text",
"Text", "Text",
"Uuid", "Uuid",
"Uuid",
"Bool" "Bool"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "0ccd86643c13d9d7c793f362aedc2d720d9dda982a0416849bf97291fc645ea5" "hash": "8d62d7b612fd7f323aee586047dec9603f05782f55f72d21fc29954a65622b5d"
} }

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n category_id, version\n FROM\n cqrs_ordering_category_query\n WHERE\n category_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "category_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "8f83767550a0efbb13020cff6b0b976d0756b470f448394d4832866b5a209ecd"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_billing_bill_query\n WHERE\n bill_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "91a8472f079a5221a94cdf5bf1a4da078ac5fe27b022d41948ceb5d554b7f0fa"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n product_id, version\n FROM\n cqrs_ordering_product_query\n WHERE\n product_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "product_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "9645865fd899d0bb983bb9da43567e172b2c49855971ee58692bed595d347228"
}

View file

@ -0,0 +1,17 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_ordering_kot_query\n SET\n version = $1,\n order_id = $2,\n created_time = $3,\n deleted = $4;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Uuid",
"Timestamptz",
"Bool"
]
},
"nullable": []
},
"hash": "97c136fd927b9a153a6093da6594e5fc2837b3943b8bbcbade97920a99019c86"
}

View file

@ -0,0 +1,26 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_billing_line_item_query\n SET\n version = $1,\n product_name = $2,\n product_id = $3,\n quantity_minor_unit = $4,\n quantity_minor_number = $5,\n quantity_major_unit = $6,\n quantity_major_number = $7,\n created_time = $8,\n bill_id = $9,\n price_per_unit_minor = $10 ,\n price_per_unit_major = $11,\n price_per_unit_currency = $12,\n deleted = $13;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Uuid",
"Text",
"Int4",
"Text",
"Int4",
"Timestamptz",
"Uuid",
"Int4",
"Int4",
"Text",
"Bool"
]
},
"nullable": []
},
"hash": "995cca627c711a87b30723c6ceefd3fcdd1fc63bdcd95f8a974823089652aa51"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_category_query\n WHERE\n category_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "9e05e649d2cc489f8870e888d7a98d78af6a3fc78f860d0e5fc677622a1db9e3"
}

View file

@ -0,0 +1,27 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_ordering_product_query\n SET\n version = $1,\n name = $2,\n description = $3,\n image = $4,\n category_id = $5,\n price_major = $6,\n price_minor = $7,\n price_currency = $8,\n sku_able = $9,\n quantity_minor_unit = $10,\n quantity_minor_number = $11,\n quantity_major_unit = $12,\n quantity_major_number = $13,\n deleted = $14;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Text",
"Uuid",
"Int4",
"Int4",
"Text",
"Bool",
"Text",
"Int4",
"Text",
"Int4",
"Bool"
]
},
"nullable": []
},
"hash": "a3fa1c6271b85d23d70116363f19144190120c13752364bc3b78a92a08bd9157"
}

View file

@ -0,0 +1,17 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_ordering_product_customizations_query\n SET\n version = $1,\n name = $2,\n product_id = $3,\n deleted = $4;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "a5a58d14ddbfa78cca3729392faecfab30cc8b01fed9b73b9cc0813750230314"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n line_item_id, version\n FROM\n cqrs_billing_line_item_query\n WHERE\n line_item_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "line_item_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "version",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false
]
},
"hash": "a80999e0bed9e7535187b20eb501258adf1c319543da07a8d79de67394b83eeb"
}

View file

@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n order_id,\n kot_id,\n created_time,\n deleted\n FROM\n cqrs_ordering_kot_query\n WHERE\n kot_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "order_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "kot_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "created_time",
"type_info": "Timestamptz"
},
{
"ordinal": 3,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "ad00213731142dc3853945e02662a93b1c75304f8fad8922a6d62b5e06bd2490"
}

View file

@ -0,0 +1,64 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n created_time,\n store_id,\n bill_id,\n token_number,\n total_price_major,\n total_price_minor,\n total_price_currency,\n deleted\n FROM\n cqrs_billing_bill_query\n WHERE\n bill_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "created_time",
"type_info": "Timestamptz"
},
{
"ordinal": 1,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "bill_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "token_number",
"type_info": "Int4"
},
{
"ordinal": 4,
"name": "total_price_major",
"type_info": "Int4"
},
{
"ordinal": 5,
"name": "total_price_minor",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "total_price_currency",
"type_info": "Text"
},
{
"ordinal": 7,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
false,
true,
true,
true,
false
]
},
"hash": "b0c2747901658847d7c765401de06cba772713dfdc11e30e92ad4842cbbb8a62"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_billing_store_query\n WHERE\n store_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "ba895b4310f182a73d88df08255dc89374f95ea0e3967124cb1e414c52d0428a"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_billing_line_item_query\n WHERE\n line_item_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "c1308bfe8d0d602e154a7d02df7e229e775326bf554e935b688585d0be253934"
}

View file

@ -0,0 +1,21 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_billing_bill_query\n SET\n version = $1,\n\n created_time = $2,\n store_id = $3,\n token_number = $4,\n total_price_major = $5,\n total_price_minor = $6,\n total_price_currency = $7,\n deleted = $8;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Timestamptz",
"Uuid",
"Int4",
"Int4",
"Int4",
"Text",
"Bool"
]
},
"nullable": []
},
"hash": "c30f49bb293ca6e184c5110bdfe1108b23bdf71dd904bdd5287155161138565d"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_inventory_product_query\n SET\n version = $1,\n name = $2,\n description = $3,\n image = $4,\n product_id = $5,\n category_id = $6,\n price_major = $7,\n price_minor = $8,\n price_currency = $9,\n sku_able = $10,\n quantity_minor_unit = $11,\n quantity_minor_number = $12,\n quantity_major_unit = $13,\n quantity_major_number = $14,\n deleted = $15;", "query": "UPDATE\n cqrs_inventory_product_query\n SET\n version = $1,\n name = $2,\n description = $3,\n image = $4,\n category_id = $5,\n price_major = $6,\n price_minor = $7,\n price_currency = $8,\n sku_able = $9,\n quantity_minor_unit = $10,\n quantity_minor_number = $11,\n quantity_major_unit = $12,\n quantity_major_number = $13,\n deleted = $14;",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@ -10,7 +10,6 @@
"Text", "Text",
"Text", "Text",
"Uuid", "Uuid",
"Uuid",
"Int4", "Int4",
"Int4", "Int4",
"Text", "Text",
@ -24,5 +23,5 @@
}, },
"nullable": [] "nullable": []
}, },
"hash": "e2f9f291a20aac77851774ba8cd37325143a4d98e0980632f097c5885cc71094" "hash": "c358d3b79d35668b3475f29f5bf6767f7209a2443fd944420baf1e1cf5c51ccb"
} }

View file

@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n name, address, store_id, owner, deleted\n FROM\n cqrs_billing_store_query\n WHERE\n store_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "address",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "owner",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true,
false,
false,
false
]
},
"hash": "c3f26f3816104a510a4630d5d3d27178f9bcf4055cbf176b5e8fb2376a5972ba"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_inventory_category_query\n SET\n version = $1,\n name = $2,\n description = $3,\n category_id = $4,\n store_id = $5,\n deleted = $6;", "query": "UPDATE\n cqrs_inventory_category_query\n SET\n version = $1,\n name = $2,\n description = $3,\n store_id = $4,\n deleted = $5;",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@ -9,11 +9,10 @@
"Text", "Text",
"Text", "Text",
"Uuid", "Uuid",
"Uuid",
"Bool" "Bool"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "14d111d7453b89f2346966a9fdd725d269ef36288ed5e7fe1f7ad452deaab0e6" "hash": "c9bee14e15dae80b7af9b0fdca43a97b964024252bed2d0caeebee175acd55d9"
} }

View file

@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n name, address, store_id, owner, deleted\n FROM\n cqrs_ordering_store_query\n WHERE\n store_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "address",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "owner",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true,
false,
false,
false
]
},
"hash": "cae7149b31d542cc01d263d682510e60f44de01ccb095e63542c11e5b2386ee5"
}

View file

@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n name, description, store_id, category_id, deleted\n FROM\n cqrs_ordering_category_query\n WHERE\n category_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "description",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "store_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "category_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true,
false,
false,
false
]
},
"hash": "cfab77a90a7a7f3d74b739442d9a75a065ad6a9a7a74432bcc3b0d6802af1eb2"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_inventory_product_customizations_query\n SET\n version = $1,\n name = $2,\n customization_id = $3,\n product_id = $4,\n deleted = $5;", "query": "UPDATE\n cqrs_inventory_product_customizations_query\n SET\n version = $1,\n name = $2,\n product_id = $3,\n deleted = $4;",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@ -8,11 +8,10 @@
"Int8", "Int8",
"Text", "Text",
"Uuid", "Uuid",
"Uuid",
"Bool" "Bool"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "e88a5dae732c3f8180664f306b4bb1d21f97a2f1391860eb8714a52ef4439d81" "hash": "d46bb69f4e2afbae01ab08beb48a67b2cade64d9ebd47f3fc6143b057671cc1b"
} }

View file

@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n name,\n customization_id,\n product_id,\n deleted\n FROM\n cqrs_ordering_product_customizations_query\n WHERE\n customization_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "customization_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "product_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "d5eeb278addc02f44a2a799a0ba03226d1f03935d73dc3bfc1af490df4726c78"
}

View file

@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_billing_store_query\n SET\n version = $1,\n name = $2,\n address = $3,\n owner = $4,\n deleted = $5;",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Text",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "d7decc8f70fc4f12d7a1db5009d2190bb9746000067d53990eb7cd646ff5d252"
}

View file

@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO cqrs_ordering_product_customizations_query (\n version,\n name,\n customization_id,\n product_id,\n deleted\n ) VALUES (\n $1, $2, $3, $4, $5\n );",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text",
"Uuid",
"Uuid",
"Bool"
]
},
"nullable": []
},
"hash": "d873aaab136d804c0c0c1744d6914f616f0f8987c81a0d1d3c8d923b580b2ae1"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n token_number\n FROM\n cqrs_billing_bill_next_token_id\n WHERE\n store_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "token_number",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "dbd434995778eab99e753a8fcdce07612061f183c20028ea4995807de6e6fae0"
}

View file

@ -0,0 +1,100 @@
{
"db_name": "PostgreSQL",
"query": "SELECT \n name,\n description,\n image,\n product_id,\n category_id,\n price_major,\n price_minor,\n price_currency,\n sku_able,\n quantity_minor_unit,\n quantity_minor_number,\n quantity_major_unit,\n quantity_major_number,\n deleted\n FROM\n cqrs_ordering_product_query\n WHERE\n product_id = $1;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "description",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "image",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "product_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "category_id",
"type_info": "Uuid"
},
{
"ordinal": 5,
"name": "price_major",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "price_minor",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "price_currency",
"type_info": "Text"
},
{
"ordinal": 8,
"name": "sku_able",
"type_info": "Bool"
},
{
"ordinal": 9,
"name": "quantity_minor_unit",
"type_info": "Text"
},
{
"ordinal": 10,
"name": "quantity_minor_number",
"type_info": "Int4"
},
{
"ordinal": 11,
"name": "quantity_major_unit",
"type_info": "Text"
},
{
"ordinal": 12,
"name": "quantity_major_number",
"type_info": "Int4"
},
{
"ordinal": 13,
"name": "deleted",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true,
true,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "ddf0a8ae68a2f52e9ae3723d36983dfb42518b07940e620f4b45eb92b8a54a80"
}

View file

@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE cqrs_billing_bill_next_token_id\n SET\n token_number = $1\n WHERE\n store_id = $2",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int4",
"Uuid"
]
},
"nullable": []
},
"hash": "df0fde518fdde267e2513fdd2c8d35cbdcf0ae01437b469f1acc93ed11702ce7"
}

View file

@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "UPDATE\n cqrs_ordering_line_item_query\n SET\n version = $1,\n product_name = $2,\n product_id = $3,\n line_item_id = $4,\n quantity_minor_unit = $5,\n quantity_minor_number = $6,\n quantity_major_unit = $7,\n quantity_major_number = $8,\n sale_time = $9,\n deleted = $10;", "query": "UPDATE\n cqrs_ordering_line_item_query\n SET\n version = $1,\n product_name = $2,\n product_id = $3,\n quantity_minor_unit = $4,\n quantity_minor_number = $5,\n quantity_major_unit = $6,\n quantity_major_number = $7,\n created_time = $8,\n kot_id = $9,\n deleted = $10;",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@ -8,16 +8,16 @@
"Int8", "Int8",
"Text", "Text",
"Uuid", "Uuid",
"Uuid",
"Text", "Text",
"Int4", "Int4",
"Text", "Text",
"Int4", "Int4",
"Timestamptz", "Timestamptz",
"Uuid",
"Bool" "Bool"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "724e980a30694acbdc309702e5349ddff36d11757477e5ec130c1f4209e613dc" "hash": "e61db066fe7e66879f8ce8ccd3da94d89c30f8f5d633004ba16a6e971e041a14"
} }

View file

@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_product_query\n WHERE\n name = $1\n AND\n category_id = $2\n AND\n deleted = false\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Text",
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "f72e06dd5ed9f4b5943c53d84cc4730963cb78e0d5954cb0eb4e0b5afb3f36d5"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS (\n SELECT 1\n FROM cqrs_ordering_kot_query\n WHERE\n kot_id = $1\n );",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "ff85b5343820fb3c30eb5e1efbfe1e4d3a9b9a14c070846c8adb1ca0eaee1f37"
}

View file

@ -6,9 +6,9 @@ steps:
environment: environment:
- DATABASE_URL=postgres://postgres:password@database:5432/postgres - DATABASE_URL=postgres://postgres:password@database:5432/postgres
- VANIKAM_email_URL=smtp://admin:password@email:10025 - VANIKAM_email_URL=smtp://admin:password@email:10025
- MAILDEV_URL=http://email:1080
- VANIKAM_meili_API_KEY=5c8eb5f46c148884fb64da09be211a18347fbba24435ca603adc9eba608ba66d - VANIKAM_meili_API_KEY=5c8eb5f46c148884fb64da09be211a18347fbba24435ca603adc9eba608ba66d
- VANIKAM_meili_URL=http://meilisearch:7700 - VANIKAM_meili_URL=http://meilisearch:7700
- MAILPIT_URL=http://email:1080
commands: commands:
# - curl -fsSL https://deb.nodesource.com/setup_16.x | bash - &&\ # - curl -fsSL https://deb.nodesource.com/setup_16.x | bash - &&\
# - apt update && apt-get -y --no-install-recommends install nodejs tar gpg curl wget # - apt update && apt-get -y --no-install-recommends install nodejs tar gpg curl wget
@ -69,15 +69,15 @@ steps:
# secrets: [RELEASE_BOT_GPG_SIGNING_KEY, DUMBSERVE_PASSWORD, GPG_PASSWORD] # secrets: [RELEASE_BOT_GPG_SIGNING_KEY, DUMBSERVE_PASSWORD, GPG_PASSWORD]
# #
services: services:
email: # email:
image: axllent/mailpit # image: axllent/mailpit
environment: # environment:
- MP_SMTP_AUTH=admin:password # - MP_SMTP_AUTH=admin:password
- MP_MAX_MESSAGES=5000 # - MP_MAX_MESSAGES=5000
- MP_SMTP_AUTH_ALLOW_INSECURE=1 # - MP_SMTP_AUTH_ALLOW_INSECURE=1
- MP_SMTP_BIND_ADDR=0.0.0.0:10025 # - MP_SMTP_BIND_ADDR=0.0.0.0:10025
- MP_SMTP_AUTH_ALLOW_INSECURE=true # - MP_SMTP_AUTH_ALLOW_INSECURE=true
- MP_UI_BIND_ADDR=0.0.0.0:1080 # - MP_UI_BIND_ADDR=0.0.0.0:1080
database: database:
@ -86,7 +86,15 @@ services:
- POSTGRES_PASSWORD=password - POSTGRES_PASSWORD=password
meilisearch: meilisearch:
image: getmeili/meilisearch:v1.9 image: getmeili/meilisearch:v1.10
environment: environment:
- MEILI_ENV=development - MEILI_ENV=development
- MEILI_MASTER_KEY=5c8eb5f46c148884fb64da09be211a18347fbba24435ca603adc9eba608ba66d - MEILI_MASTER_KEY=5c8eb5f46c148884fb64da09be211a18347fbba24435ca603adc9eba608ba66d
email:
image: axllent/mailpit
environment:
- MP_SMTP_BIND_ADDR=0.0.0.0:10025
- MP_UI_BIND_ADDR=0.0.0.0:1080
- MP_SMTP_AUTH_ACCEPT_ANY=true
- MP_SMTP_AUTH_ALLOW_INSECURE=true

1281
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -5,24 +5,24 @@ edition = "2021"
[workspace] [workspace]
exclude = ["utils/db-migrations"] #, "utils/cache-bust"] exclude = ["utils/db-migrations"] #, "utils/cache-bust"]
memebers = ["."] members = [".", "mailpit_client"]
[dependencies] [dependencies]
actix-identity = "0.7.1" actix-identity = "0.8.0"
actix-rt = "2.9.0" actix-rt = "2.9.0"
actix-session = { version = "0.9.0", features = ["cookie-session"] } actix-session = { version = "0.10.0", features = ["cookie-session"] }
actix-web = "4.5.1" actix-web = "4.5.1"
argon2-creds = "0.2.3" argon2-creds = "0.2.3"
async-trait = "0.1.80" async-trait = "0.1.80"
config = "0.14.0" config = "0.14.0"
cqrs-es = "0.4.11" cqrs-es = "0.4.11"
derive-getters = "0.4.0" derive-getters = "0.5.0"
derive_builder = "0.20.0" derive_builder = "0.20.0"
derive_more = "0.99.17" derive_more = "0.99.17"
lettre = { version = "0.11.7", features = ["tokio1-rustls-tls", "tracing", "dkim", "tokio1-native-tls", "smtp-transport", "pool", "builder"], default-features = false } lettre = { version = "0.11.7", features = ["tokio1-rustls-tls", "tracing", "dkim", "tokio1-native-tls", "smtp-transport", "pool", "builder"], default-features = false }
log = "0.4.21" log = "0.4.21"
meilisearch-sdk = "0.27.0" meilisearch-sdk = "0.27.0"
mockall = { version = "0.12.1", features = ["nightly"] } mockall = { version = "0.13.0", features = ["nightly"] }
postgres-es = "0.4.11" postgres-es = "0.4.11"
pretty_env_logger = "0.5.0" pretty_env_logger = "0.5.0"
rand = "0.8.5" rand = "0.8.5"
@ -40,3 +40,4 @@ validator = { version = "0.18.1", features = ["derive"] }
[dev-dependencies] [dev-dependencies]
reqwest = { version = "0.12.4", features = ["json"] } reqwest = { version = "0.12.4", features = ["json"] }
mailpit_client = { path = "./mailpit_client" }

View file

@ -66,6 +66,9 @@ env.recreate: ## Deploy dependencies from scratch
sleep 5 sleep 5
$(call run_migrations) $(call run_migrations)
make fmt:
cargo fmt -v --all -- --emit files
lint: ## Lint codebase lint: ## Lint codebase
cargo fmt -v --all -- --emit files cargo fmt -v --all -- --emit files
cargo clippy --workspace --tests --all-features cargo clippy --workspace --tests --all-features

View file

@ -3,11 +3,11 @@
"devenv": { "devenv": {
"locked": { "locked": {
"dir": "src/modules", "dir": "src/modules",
"lastModified": 1720853497, "lastModified": 1726063457,
"owner": "cachix", "owner": "cachix",
"repo": "devenv", "repo": "devenv",
"rev": "7f569a0f2473b9f6000fd9e4c32511fd1b0d37c1", "rev": "39bf6ce569103c9390d37322daa59468c31b3ce7",
"treeHash": "4d452ecc8223834e39d507f9ea92308f007ee05d", "treeHash": "839747a1cb35ba6d5b36cce9a739ab2ba5e4a5d4",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -25,11 +25,11 @@
"rust-analyzer-src": "rust-analyzer-src" "rust-analyzer-src": "rust-analyzer-src"
}, },
"locked": { "locked": {
"lastModified": 1721025077, "lastModified": 1726116637,
"owner": "nix-community", "owner": "nix-community",
"repo": "fenix", "repo": "fenix",
"rev": "22d6920811da3d6f6fbf1efc5af4e9c3e5025d30", "rev": "96a04a213838c5001619ad57400c5a176fa040b1",
"treeHash": "57ea1fdfb5b1ac9bac22f47720339ce91ff094c4", "treeHash": "af83666a28d8bb6aa6d4c0e5f4a2a9c6c158e272",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -93,11 +93,11 @@
}, },
"nixpkgs-stable": { "nixpkgs-stable": {
"locked": { "locked": {
"lastModified": 1720954236, "lastModified": 1725930920,
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "53e81e790209e41f0c1efa9ff26ff2fd7ab35e27", "rev": "44a71ff39c182edaf25a7ace5c9454e7cba2c658",
"treeHash": "ca1f1273cf201da604f7c704535d4b7fac62cdb2", "treeHash": "56e93544112b7bb7aa0c3093d537295683ef9148",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -117,11 +117,11 @@
"nixpkgs-stable": "nixpkgs-stable" "nixpkgs-stable": "nixpkgs-stable"
}, },
"locked": { "locked": {
"lastModified": 1721042469, "lastModified": 1725513492,
"owner": "cachix", "owner": "cachix",
"repo": "pre-commit-hooks.nix", "repo": "pre-commit-hooks.nix",
"rev": "f451c19376071a90d8c58ab1a953c6e9840527fd", "rev": "7570de7b9b504cfe92025dd1be797bf546f66528",
"treeHash": "91f40b7a3b9f6886bd77482cba5b5cd890415a2e", "treeHash": "4b46d77870afecd8f642541cb4f4927326343b59",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -141,11 +141,11 @@
"rust-analyzer-src": { "rust-analyzer-src": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1720953053, "lastModified": 1726144767,
"owner": "rust-lang", "owner": "rust-lang",
"repo": "rust-analyzer", "repo": "rust-analyzer",
"rev": "e9afba57a5a8780285f530172e3ceea1f9c7eff7", "rev": "fd243cd0fbad8297e7a3c86f18aa5f0a110bb85f",
"treeHash": "9947b4a2042c461f30688f8628180d1453f0995d", "treeHash": "8fbbce6f1b56bbcbe27c9af7f6298582c22ae34d",
"type": "github" "type": "github"
}, },
"original": { "original": {

View file

@ -3,25 +3,26 @@ version: "3"
services: services:
email: email:
image: axllent/mailpit image: axllent/mailpit
ports: restart: always
- 1080:1080 container_name: vanigam-dash-maildev
- 10025:10025 network_mode: host
environment: environment:
- MP_SMTP_AUTH=admin:password
- MP_MAX_MESSAGES=5000
- MP_SMTP_AUTH_ALLOW_INSECURE=1
- MP_SMTP_BIND_ADDR=0.0.0.0:10025 - MP_SMTP_BIND_ADDR=0.0.0.0:10025
- MP_SMTP_AUTH_ALLOW_INSECURE=true
- MP_UI_BIND_ADDR=0.0.0.0:1080 - MP_UI_BIND_ADDR=0.0.0.0:1080
- MP_SMTP_AUTH_ACCEPT_ANY=true
- MP_SMTP_AUTH_ALLOW_INSECURE=true
# - MAILDEV_SMTP_PORT=10025
# - MAILDEV_INCOMING_USER=admin
# - MAILDEV_INCOMING_PASS=password
postgres: postgres:
image: postgres:16.3 image: postgres:16.4
network_mode: host network_mode: host
environment: environment:
POSTGRES_PASSWORD: password POSTGRES_PASSWORD: password
meiliserach: meiliserach:
image: getmeili/meilisearch:v1.9 image: getmeili/meilisearch:v1.10
ports: ports:
- 7700:7700 - 7700:7700
environment: environment:

15
mailpit_client/Cargo.toml Normal file
View file

@ -0,0 +1,15 @@
[package]
name = "mailpit_client"
version = "0.1.0"
edition = "2021"
[dependencies]
reqwest = { version = "0.12.4", features = ["json"] }
serde = { version = "1.0.201", features = ["derive"] }
serde_json = "1.0.117"
actix-rt = "2.9.0"
derive-getters = "0.5.0"
derive_more = "0.99.17"
log = "0.4.21"
derive_builder = "0.20.0"
url = { version = "2.5.0", features = ["serde"] }

89
mailpit_client/src/lib.rs Normal file
View file

@ -0,0 +1,89 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use derive_builder::Builder;
use derive_getters::Getters;
#[allow(unused_imports)]
use log::*;
#[allow(unused_imports)]
#[cfg(test)]
use println as info;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Getters, Builder)]
#[serde(rename_all = "PascalCase")]
pub struct MailPitAddress {
address: String,
name: String,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Getters, Builder)]
#[serde(rename_all = "PascalCase")]
pub struct MailPitEmail {
#[serde(rename = "ID")]
id: String,
from: MailPitAddress,
to: Vec<MailPitAddress>,
subject: String,
#[serde(rename = "Snippet")]
text: String,
html: Option<String>,
}
#[derive(Clone, Debug, Getters, Builder)]
pub struct MailPitHTTPClient {
#[builder(default = "Client::default()")]
client: Client,
url: Url,
}
impl MailPitHTTPClient {
pub async fn list_emails(&self) -> Vec<MailPitEmail> {
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
struct List {
messages: Vec<MailPitEmail>,
}
let mut u = self.url.clone();
u.set_path("/api/v1/messages");
info!("trying to fetch emails: {}", u.as_str());
let list: List = self
.client
.get(u.clone())
.send()
.await
.unwrap()
.json()
.await
.unwrap();
list.messages
}
pub async fn get_email_addressed_to(&self, email_address: &str) -> MailPitEmail {
self.list_emails()
.await
.drain(0..)
.find(|e| e.to.iter().any(|f| f.address == email_address))
.unwrap()
}
pub async fn delete_email(&self, email: MailPitEmail) {
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
struct DeletePayload {
#[serde(rename = "IDs")]
id: Vec<String>,
}
let mut u = self.url.clone();
u.set_path("/api/v1/messages/");
info!("Deleting email from: {:?}", email.from);
let payload = DeletePayload { id: vec![email.id] };
self.client.delete(u).json(&payload).send().await.unwrap();
}
}

View file

@ -6,9 +6,11 @@ CREATE TABLE IF NOT EXISTS cqrs_ordering_line_item_query
( (
version bigint CHECK (version >= 0) NOT NULL, version bigint CHECK (version >= 0) NOT NULL,
sale_time timestamp with time zone DEFAULT (CURRENT_TIMESTAMP) NOT NULL, created_time timestamp with time zone DEFAULT (CURRENT_TIMESTAMP) NOT NULL,
line_item_id UUID NOT NULL UNIQUE, line_item_id UUID NOT NULL UNIQUE,
kot_id UUID NOT NULL,
product_name TEXT NOT NULL, product_name TEXT NOT NULL,
product_id UUID NOT NULL, product_id UUID NOT NULL,

View file

@ -8,8 +8,9 @@ CREATE TABLE IF NOT EXISTS cqrs_ordering_order_query
created_time timestamp with time zone DEFAULT (CURRENT_TIMESTAMP) NOT NULL, created_time timestamp with time zone DEFAULT (CURRENT_TIMESTAMP) NOT NULL,
order_id UUID NOT NULL UNIQUE, order_id UUID NOT NULL UNIQUE,
store_id UUID NOT NULL,
customer_name TEXT NOT NULL, customer_name TEXT,
deleted BOOLEAN NOT NULL DEFAULT FALSE, deleted BOOLEAN NOT NULL DEFAULT FALSE,

View file

@ -0,0 +1,16 @@
-- SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later
CREATE TABLE IF NOT EXISTS cqrs_ordering_kot_query
(
version bigint CHECK (version >= 0) NOT NULL,
created_time timestamp with time zone DEFAULT (CURRENT_TIMESTAMP) NOT NULL,
order_id UUID NOT NULL,
kot_id UUID NOT NULL UNIQUE,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (kot_id)
);

View file

@ -0,0 +1,16 @@
--- SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later
CREATE TABLE IF NOT EXISTS cqrs_ordering_store_query
(
version bigint CHECK (version >= 0) NOT NULL,
name TEXT NOT NULL,
address TEXT,
owner UUID NOT NULL,
store_id UUID NOT NULL UNIQUE,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (store_id)
);

View file

@ -0,0 +1,17 @@
-- SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later
CREATE TABLE IF NOT EXISTS cqrs_ordering_category_query
(
version bigint CHECK (version >= 0) NOT NULL,
name TEXT NOT NULL,
description TEXT,
store_id UUID NOT NULL,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
category_id UUID NOT NULL UNIQUE,
UNIQUE(store_id, name),
PRIMARY KEY (category_id)
);

View file

@ -0,0 +1,14 @@
-- Add migration script here
CREATE TABLE IF NOT EXISTS cqrs_ordering_product_customizations_query
(
version bigint CHECK (version >= 0) NOT NULL,
name TEXT NOT NULL,
customization_id UUID NOT NULL UNIQUE,
product_id UUID NOT NULL,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
UNIQUE(product_id, name),
PRIMARY KEY (customization_id)
);

View file

@ -0,0 +1,29 @@
-- Add migration script here
CREATE TABLE IF NOT EXISTS cqrs_ordering_product_query
(
version bigint CHECK (version >= 0) NOT NULL,
name TEXT NOT NULL,
description TEXT,
image TEXT,
sku_able BOOLEAN NOT NULL DEFAULT FALSE,
product_id UUID NOT NULL UNIQUE,
price_minor INTEGER NOT NULL,
price_major INTEGER NOT NULL,
price_currency TEXT NOT NULL,
quantity_major_number INTEGER NOT NULL,
quantity_minor_number INTEGER NOT NULL,
quantity_major_unit TEXT NOT NULL,
quantity_minor_unit TEXT NOT NULL,
category_id UUID NOT NULL,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
UNIQUE(category_id, name),
PRIMARY KEY (product_id)
)

View file

@ -0,0 +1,16 @@
--- SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later
CREATE TABLE IF NOT EXISTS cqrs_billing_store_query
(
version bigint CHECK (version >= 0) NOT NULL,
name TEXT NOT NULL,
address TEXT,
owner UUID NOT NULL,
store_id UUID NOT NULL UNIQUE,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (store_id)
);

View file

@ -0,0 +1,29 @@
-- SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later
CREATE TABLE IF NOT EXISTS cqrs_billing_line_item_query
(
version bigint CHECK (version >= 0) NOT NULL,
created_time timestamp with time zone DEFAULT (CURRENT_TIMESTAMP) NOT NULL,
line_item_id UUID NOT NULL UNIQUE,
bill_id UUID NOT NULL,
product_name TEXT NOT NULL,
product_id UUID NOT NULL,
quantity_major_number INTEGER NOT NULL,
quantity_minor_number INTEGER NOT NULL,
quantity_major_unit TEXT NOT NULL,
quantity_minor_unit TEXT NOT NULL,
price_per_unit_minor INTEGER NOT NULL,
price_per_unit_major INTEGER NOT NULL,
price_per_unit_currency TEXT NOT NULL,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (line_item_id)
);

View file

@ -0,0 +1,31 @@
-- SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later
CREATE TABLE IF NOT EXISTS cqrs_billing_bill_query
(
version bigint CHECK (version >= 0) NOT NULL,
created_time timestamp with time zone DEFAULT (CURRENT_TIMESTAMP) NOT NULL,
bill_id UUID NOT NULL UNIQUE,
store_id UUID NOT NULL,
token_number INTEGER NOT NULL,
total_price_minor INTEGER DEFAULT NULL,
total_price_major INTEGER DEFAULT NULL,
total_price_currency TEXT DEFAULT NULL,
deleted BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (bill_id)
);
CREATE TABLE IF NOT EXISTS cqrs_billing_bill_next_token_id
(
store_id UUID NOT NULL,
token_number INTEGER NOT NULL DEFAULT 1,
PRIMARY KEY (store_id)
);

View file

@ -0,0 +1,4 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
mod postgres;

View file

@ -0,0 +1,83 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use uuid::Uuid;
use super::BillingDBPostgresAdapter;
use crate::billing::application::port::output::db::{bill_id_exists::*, errors::*};
#[async_trait::async_trait]
impl BillIDExistsDBPort for BillingDBPostgresAdapter {
async fn bill_id_exists(&self, bill_id: &Uuid) -> BillingDBResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (
SELECT 1
FROM cqrs_billing_bill_query
WHERE
bill_id = $1
);",
bill_id
)
.fetch_one(&self.pool)
.await?;
if let Some(x) = res.exists {
Ok(x)
} else {
Ok(false)
}
}
}
#[cfg(test)]
pub mod tests {
use super::*;
// use crate::billing::domain::add_product_command::tests::get_customizations;
use crate::billing::domain::bill_aggregate::*;
pub async fn create_dummy_bill(bill: &Bill, db: &BillingDBPostgresAdapter) {
sqlx::query!(
"INSERT INTO cqrs_billing_bill_query (
version,
store_id,
bill_id,
token_number,
deleted
) VALUES (
$1, $2, $3, $4, $5
);",
1,
*bill.store_id(),
*bill.bill_id(),
*bill.token_number() as i32,
bill.deleted().clone(),
)
.execute(&db.pool)
.await
.unwrap();
}
#[actix_rt::test]
async fn test_postgres_product_exists() {
let settings = crate::settings::tests::get_settings().await;
settings.create_db().await;
let db = super::BillingDBPostgresAdapter::new(
sqlx::postgres::PgPool::connect(&settings.database.url)
.await
.unwrap(),
);
let bill = Bill::default();
// state doesn't exist
assert!(!db.bill_id_exists(bill.bill_id()).await.unwrap());
create_dummy_bill(&bill, &db).await;
// state exists
assert!(db.bill_id_exists(bill.bill_id()).await.unwrap());
settings.drop_db().await;
}
}

View file

@ -0,0 +1,446 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use std::str::FromStr;
use async_trait::async_trait;
use cqrs_es::persist::{PersistenceError, ViewContext, ViewRepository};
use cqrs_es::{EventEnvelope, Query, View};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use super::errors::*;
use super::BillingDBPostgresAdapter;
use crate::billing::domain::bill_aggregate::{Bill, BillBuilder};
use crate::billing::domain::events::BillingEvent;
use crate::types::currency::{self, Currency, PriceBuilder};
use crate::utils::parse_aggregate_id::parse_aggregate_id;
pub const NEW_BILL_NON_UUID: &str = "billing_new_bill_non_uuid-asdfa";
// The view for a Bill query, for a standard http application this should
// be designed to reflect the response dto that will be returned to a user.
#[derive(Debug, Serialize, Deserialize)]
pub struct BillView {
created_time: OffsetDateTime,
store_id: Uuid,
bill_id: Uuid,
token_number: i32,
total_price_minor: Option<i32>,
total_price_major: Option<i32>,
total_price_currency: Option<String>,
deleted: bool,
}
impl From<BillView> for Bill {
fn from(v: BillView) -> Self {
let price = match (
v.total_price_minor,
v.total_price_major,
v.total_price_currency,
) {
(Some(minor), Some(major), Some(currency)) => Some(
PriceBuilder::default()
.major(major as usize)
.minor(minor as usize)
.currency(Currency::from_str(&currency).unwrap())
.build()
.unwrap(),
),
_ => None,
};
BillBuilder::default()
.created_time(v.created_time)
.store_id(v.store_id)
.bill_id(v.bill_id)
.token_number(v.token_number as usize)
.total_price(price)
.deleted(v.deleted)
.build()
.unwrap()
}
}
impl Default for BillView {
fn default() -> Self {
Self {
created_time: OffsetDateTime::now_utc(),
store_id: Default::default(),
bill_id: Default::default(),
token_number: Default::default(),
total_price_minor: Default::default(),
total_price_major: Default::default(),
total_price_currency: Default::default(),
deleted: false,
}
}
}
impl BillView {
fn merge(&mut self, bill: &Bill) {
self.created_time = bill.created_time().clone();
self.store_id = *bill.store_id();
self.bill_id = *bill.bill_id();
self.token_number = *bill.token_number() as i32;
self.total_price_minor = bill.total_price().as_ref().map(|t| *t.minor() as i32);
self.total_price_major = bill.total_price().as_ref().map(|t| *t.major() as i32);
self.total_price_currency = bill
.total_price()
.as_ref()
.map(|t| t.currency().to_string());
}
}
// This updates the view with events as they are committed.
// The logic should be minimal here, e.g., don't calculate the account balance,
// design the events to carry the balance information instead.
impl View<Bill> for BillView {
fn update(&mut self, event: &EventEnvelope<Bill>) {
match &event.payload {
BillingEvent::BillAdded(val) => {
self.merge(val.bill());
self.deleted = false;
}
BillingEvent::BillUpdated(e) => self.merge(e.new_bill()),
BillingEvent::BillTotalPriceComputed(e) => {
let total_price = e.total_price().clone();
self.total_price_minor = Some(*total_price.minor() as i32);
self.total_price_major = Some(*total_price.major() as i32);
self.total_price_currency = Some(total_price.currency().to_string());
}
BillingEvent::BillDeleted(e) => self.deleted = true,
_ => (),
}
}
}
#[async_trait]
impl ViewRepository<BillView, Bill> for BillingDBPostgresAdapter {
async fn load(&self, bill_id: &str) -> Result<Option<BillView>, PersistenceError> {
let bill_id = match parse_aggregate_id(bill_id, NEW_BILL_NON_UUID)? {
Some((val, _)) => return Ok(Some(val)),
None => Uuid::parse_str(bill_id).unwrap(),
};
let res = sqlx::query_as!(
BillView,
"SELECT
created_time,
store_id,
bill_id,
token_number,
total_price_major,
total_price_minor,
total_price_currency,
deleted
FROM
cqrs_billing_bill_query
WHERE
bill_id = $1;",
bill_id
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
Ok(Some(res))
}
async fn load_with_context(
&self,
bill_id: &str,
) -> Result<Option<(BillView, ViewContext)>, PersistenceError> {
let bill_id = match parse_aggregate_id(bill_id, NEW_BILL_NON_UUID)? {
Some(val) => return Ok(Some(val)),
None => Uuid::parse_str(bill_id).unwrap(),
};
let res = sqlx::query_as!(
BillView,
"SELECT
created_time,
store_id,
bill_id,
token_number,
total_price_major,
total_price_minor,
total_price_currency,
deleted
FROM
cqrs_billing_bill_query
WHERE
bill_id = $1;",
&bill_id,
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
struct Context {
version: i64,
bill_id: Uuid,
}
let ctx = sqlx::query_as!(
Context,
"SELECT
bill_id, version
FROM
cqrs_billing_bill_query
WHERE
bill_id = $1;",
bill_id
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
let view_context = ViewContext::new(ctx.bill_id.to_string(), ctx.version);
Ok(Some((res, view_context)))
}
async fn update_view(
&self,
view: BillView,
context: ViewContext,
) -> Result<(), PersistenceError> {
match context.version {
0 => {
let version = context.version + 1;
sqlx::query!(
"INSERT INTO cqrs_billing_bill_query (
version,
created_time,
store_id,
bill_id,
token_number,
total_price_major,
total_price_minor,
total_price_currency,
deleted
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9
);",
version,
view.created_time,
view.store_id,
view.bill_id,
view.token_number,
view.total_price_major,
view.total_price_minor,
view.total_price_currency,
view.deleted,
)
.execute(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
}
_ => {
let version = context.version + 1;
sqlx::query!(
"UPDATE
cqrs_billing_bill_query
SET
version = $1,
created_time = $2,
store_id = $3,
token_number = $4,
total_price_major = $5,
total_price_minor = $6,
total_price_currency = $7,
deleted = $8;",
version,
view.created_time,
view.store_id,
view.token_number,
view.total_price_major,
view.total_price_minor,
view.total_price_currency,
view.deleted,
)
.execute(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
}
}
Ok(())
}
}
pub struct SimpleLoggingQuery {}
// Our simplest query, this is great for debugging but absolutely useless in production.
// This query just pretty prints the events as they are processed.
#[async_trait]
impl Query<Bill> for SimpleLoggingQuery {
async fn dispatch(&self, aggregate_id: &str, events: &[EventEnvelope<Bill>]) {
for event in events {
let payload = serde_json::to_string_pretty(&event.payload).unwrap();
println!("{}-{}\n{}", aggregate_id, event.sequence, payload);
}
}
}
#[async_trait]
impl Query<Bill> for BillingDBPostgresAdapter {
async fn dispatch(&self, bill_id: &str, events: &[EventEnvelope<Bill>]) {
let res = self
.load_with_context(bill_id)
.await
.unwrap_or_else(|_| Some((BillView::default(), ViewContext::new(bill_id.into(), 0))));
let (mut view, view_context): (BillView, ViewContext) = res.unwrap();
for event in events {
view.update(event);
}
self.update_view(view, view_context).await.unwrap();
}
}
#[cfg(test)]
mod tests {
use super::*;
use postgres_es::PostgresCqrs;
use crate::{
billing::{
application::services::{
add_bill_service::AddBillServiceBuilder, update_bill_service::*,
MockBillingServicesInterface,
},
domain::{
add_bill_command::*, commands::BillingCommand, store_aggregate::Store,
update_bill_command::*,
},
},
db::migrate::*,
tests::bdd::*,
utils::uuid::tests::*,
};
use std::sync::Arc;
#[actix_rt::test]
async fn pg_query_billing_bill_view() {
let settings = crate::settings::tests::get_settings().await;
//let settings = crate::settings::Settings::new().unwrap();
settings.create_db().await;
let db = crate::db::sqlx_postgres::Postgres::init(&settings.database.url).await;
db.migrate().await;
let db = BillingDBPostgresAdapter::new(db.pool.clone());
let simple_query = SimpleLoggingQuery {};
let queries: Vec<Box<dyn Query<Bill>>> = vec![Box::new(simple_query), Box::new(db.clone())];
let mut mock_services = MockBillingServicesInterface::new();
let store = Store::default();
crate::billing::adapters::output::db::postgres::store_id_exists::tests::create_dummy_store_record(&store, &db).await;
let db2 = db.clone();
mock_services
.expect_add_bill()
.times(IS_CALLED_ONLY_ONCE.unwrap())
.returning(move || {
Arc::new(
AddBillServiceBuilder::default()
.db_bill_id_exists(Arc::new(db2.clone()))
.db_next_token_id(Arc::new(db2.clone()))
.build()
.unwrap(),
)
});
let db2 = db.clone();
mock_services
.expect_update_bill()
.times(IS_CALLED_ONLY_ONCE.unwrap())
.returning(move || {
Arc::new(
UpdateBillServiceBuilder::default()
.db_bill_id_exists(Arc::new(db2.clone()))
.build()
.unwrap(),
)
});
let (cqrs, bill_query): (
Arc<PostgresCqrs<Bill>>,
Arc<dyn ViewRepository<BillView, Bill>>,
) = (
Arc::new(postgres_es::postgres_cqrs(
db.pool.clone(),
queries,
Arc::new(mock_services),
)),
Arc::new(db.clone()),
);
let cmd = AddBillCommandBuilder::default()
.adding_by(UUID)
.bill_id(UUID)
.store_id(*store.store_id())
.build()
.unwrap();
cqrs.execute(
&cmd.bill_id().to_string(),
BillingCommand::AddBill(cmd.clone()),
)
.await
.unwrap();
let bill = bill_query
.load(&(*cmd.bill_id()).to_string())
.await
.unwrap()
.unwrap();
let bill: Bill = bill.into();
assert_eq!(bill.store_id(), cmd.store_id());
assert_eq!(bill.bill_id(), cmd.bill_id());
assert!(!bill.deleted());
let update_bill_cmd = UpdateBillCommandBuilder::default()
.adding_by(UUID)
.store_id(*store.store_id())
.total_price(None)
.old_bill(bill.clone())
.build()
.unwrap();
cqrs.execute(
&cmd.bill_id().to_string(),
BillingCommand::UpdateBill(update_bill_cmd.clone()),
)
.await
.unwrap();
let bill = bill_query
.load(&(*cmd.bill_id()).to_string())
.await
.unwrap()
.unwrap();
let bill: Bill = bill.into();
assert_eq!(bill.store_id(), cmd.store_id());
assert_eq!(bill.bill_id(), update_bill_cmd.old_bill().bill_id());
assert_eq!(bill.total_price(), update_bill_cmd.total_price());
assert!(!bill.deleted());
settings.drop_db().await;
}
}

View file

@ -0,0 +1,79 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use std::borrow::Cow;
use cqrs_es::persist::PersistenceError;
use sqlx::Error as SqlxError;
use crate::billing::application::port::output::db::errors::BillingDBError;
impl From<SqlxError> for BillingDBError {
fn from(e: SqlxError) -> Self {
log::error!("[postgres] err: {}", e);
if let SqlxError::Database(err) = e {
if err.code() == Some(Cow::from("23505")) {
let msg = err.message();
if msg.contains("cqrs_inventory_store_query_store_id_key") {
return Self::DuplicateStoreID;
} else {
println!("{msg}");
}
}
}
Self::InternalError
}
}
/// map custom row not found error to DB error
pub fn map_row_not_found_err(e: SqlxError, row_not_found: BillingDBError) -> BillingDBError {
if let SqlxError::RowNotFound = e {
row_not_found
} else {
e.into()
}
}
#[derive(Debug)]
pub enum PostgresAggregateError {
OptimisticLock,
ConnectionError(Box<dyn std::error::Error + Send + Sync + 'static>),
DeserializationError(Box<dyn std::error::Error + Send + Sync + 'static>),
UnknownError(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl From<SqlxError> for PostgresAggregateError {
fn from(err: SqlxError) -> Self {
// TODO: improve error handling
match &err {
SqlxError::Database(database_error) => {
if let Some(code) = database_error.code() {
if code.as_ref() == "23505" {
return PostgresAggregateError::OptimisticLock;
}
}
PostgresAggregateError::UnknownError(Box::new(err))
}
SqlxError::Io(_) | SqlxError::Tls(_) => {
PostgresAggregateError::ConnectionError(Box::new(err))
}
_ => PostgresAggregateError::UnknownError(Box::new(err)),
}
}
}
impl From<PostgresAggregateError> for PersistenceError {
fn from(err: PostgresAggregateError) -> Self {
match err {
PostgresAggregateError::OptimisticLock => PersistenceError::OptimisticLockError,
PostgresAggregateError::ConnectionError(error) => {
PersistenceError::ConnectionError(error)
}
PostgresAggregateError::DeserializationError(error) => {
PersistenceError::UnknownError(error)
}
PostgresAggregateError::UnknownError(error) => PersistenceError::UnknownError(error),
}
}
}

View file

@ -0,0 +1,144 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use uuid::Uuid;
use super::BillingDBPostgresAdapter;
use crate::billing::{
application::port::output::db::{errors::*, get_line_items_for_bill_id::*},
domain::line_item_aggregate::LineItem,
};
#[async_trait::async_trait]
impl GetLineItemsForBillIDDBPort for BillingDBPostgresAdapter {
async fn get_line_items_for_bill_id(&self, bill_id: Uuid) -> BillingDBResult<Vec<LineItem>> {
let mut res = sqlx::query_as!(
super::line_item_view::LineItemView,
"SELECT
product_name,
product_id,
line_item_id,
quantity_minor_unit,
quantity_minor_number,
quantity_major_unit,
quantity_major_number,
created_time,
bill_id,
price_per_unit_minor,
price_per_unit_major,
price_per_unit_currency,
deleted
FROM cqrs_billing_line_item_query
WHERE
bill_id = $1;",
bill_id
)
.fetch_all(&self.pool)
.await?;
println!("Got len: {}", res.len());
let mut output = Vec::with_capacity(res.len());
res.drain(0..).for_each(|r| output.push(r.into()));
Ok(output)
}
}
#[cfg(test)]
pub mod tests {
use super::*;
// use crate::billing::domain::add_product_command::tests::get_customizations;
use crate::{
billing::{
adapters::output::db::postgres::line_item_view::LineItemView, domain::bill_aggregate::*,
},
types::currency::*,
types::quantity::*,
utils::uuid::{tests::*, *},
};
async fn create_dummy_line_item(
db: &BillingDBPostgresAdapter,
bill_id: Uuid,
line_item_id: Uuid,
) {
let view = LineItemView::default();
let version = 0;
sqlx::query!(
"INSERT INTO cqrs_billing_line_item_query (
version,
product_name,
product_id,
line_item_id,
quantity_minor_unit,
quantity_minor_number,
quantity_major_unit,
quantity_major_number,
created_time,
bill_id,
price_per_unit_minor,
price_per_unit_major,
price_per_unit_currency,
deleted
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14
);",
version,
view.product_name,
view.product_id,
line_item_id,
QuantityUnit::DiscreteNumber.to_string(),
view.quantity_minor_number,
QuantityUnit::DiscreteNumber.to_string(),
view.quantity_major_number,
view.created_time,
bill_id,
view.price_per_unit_minor,
view.price_per_unit_major,
Currency::INR.to_string(),
view.deleted,
)
.execute(&db.pool)
.await
.unwrap();
}
#[actix_rt::test]
async fn test_postgres_get_line_items_for_bill_id() {
let settings = crate::settings::tests::get_settings().await;
settings.create_db().await;
let db = super::BillingDBPostgresAdapter::new(
sqlx::postgres::PgPool::connect(&settings.database.url)
.await
.unwrap(),
);
let bill_id = UUID;
// state doesn't exist
assert!(db
.get_line_items_for_bill_id(bill_id)
.await
.unwrap()
.is_empty());
let u = GenerateUUID;
let li_id_1 = u.get_uuid();
let li_id_2 = u.get_uuid();
create_dummy_line_item(&db, bill_id, li_id_1).await;
create_dummy_line_item(&db, bill_id, li_id_2).await;
// state exists
let res = db.get_line_items_for_bill_id(bill_id).await.unwrap();
assert_eq!(res.len(), 2);
assert!(res
.iter()
.any(|li| *li.bill_id() == bill_id && *li.line_item_id() == li_id_1));
assert!(res
.iter()
.any(|li| *li.bill_id() == bill_id && *li.line_item_id() == li_id_2));
settings.drop_db().await;
}
}

View file

@ -0,0 +1,109 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use uuid::Uuid;
use super::BillingDBPostgresAdapter;
use crate::billing::application::port::output::db::{errors::*, line_item_id_exists::*};
#[async_trait::async_trait]
impl LineItemIDExistsDBPort for BillingDBPostgresAdapter {
async fn line_item_id_exists(&self, line_item_id: &Uuid) -> BillingDBResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (
SELECT 1
FROM cqrs_billing_line_item_query
WHERE
line_item_id = $1
);",
line_item_id
)
.fetch_one(&self.pool)
.await?;
if let Some(x) = res.exists {
Ok(x)
} else {
Ok(false)
}
}
}
#[cfg(test)]
pub mod tests {
use super::*;
// use crate::billing::domain::add_product_command::tests::get_customizations;
use crate::billing::domain::line_item_aggregate::*;
async fn create_dummy_line_item(line_item: &LineItem, db: &BillingDBPostgresAdapter) {
sqlx::query!(
"INSERT INTO cqrs_billing_line_item_query (
version,
product_name,
product_id,
line_item_id,
quantity_minor_unit,
quantity_minor_number,
quantity_major_unit,
quantity_major_number,
bill_id,
price_per_unit_minor,
price_per_unit_major,
price_per_unit_currency,
deleted
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13
);",
1,
line_item.product_name(),
line_item.product_id(),
line_item.line_item_id(),
line_item.quantity().major().unit().to_string(),
line_item.quantity().major().number().clone() as i32,
line_item.quantity().minor().unit().to_string(),
line_item.quantity().minor().number().clone() as i32,
line_item.bill_id(),
*line_item.price_per_unit().minor() as i32,
*line_item.price_per_unit().major() as i32,
line_item.price_per_unit().currency().to_string(),
line_item.deleted().clone(),
)
.execute(&db.pool)
.await
.unwrap();
}
#[actix_rt::test]
async fn test_postgres_product_exists() {
let settings = crate::settings::tests::get_settings().await;
settings.create_db().await;
let db = super::BillingDBPostgresAdapter::new(
sqlx::postgres::PgPool::connect(&settings.database.url)
.await
.unwrap(),
);
let line_item = LineItem::default();
// state doesn't exist
assert!(!db
.line_item_id_exists(line_item.line_item_id())
.await
.unwrap());
create_dummy_line_item(&line_item, &db).await;
// state exists
assert!(db
.line_item_id_exists(line_item.product_id())
.await
.unwrap());
settings.drop_db().await;
}
}

View file

@ -0,0 +1,576 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use std::str::FromStr;
use async_trait::async_trait;
use cqrs_es::persist::{PersistenceError, ViewContext, ViewRepository};
use cqrs_es::{EventEnvelope, Query, View};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use super::errors::*;
use super::BillingDBPostgresAdapter;
use crate::billing::domain::events::BillingEvent;
use crate::billing::domain::line_item_aggregate::*;
use crate::types::currency::*;
use crate::types::quantity::*;
use crate::utils::parse_aggregate_id::parse_aggregate_id;
pub const NEW_LINE_ITEM_NON_UUID: &str = "new_line_item_non_uuid-asdfa-billing";
// The view for a LineItem query, for a standard http application this should
// be designed to reflect the response dto that will be returned to a user.
#[derive(Debug, Serialize, Deserialize)]
pub struct LineItemView {
pub product_name: String,
pub product_id: Uuid,
pub bill_id: Uuid,
pub created_time: OffsetDateTime,
pub line_item_id: Uuid,
pub quantity_major_number: i32,
pub quantity_minor_number: i32,
pub quantity_major_unit: String,
pub quantity_minor_unit: String,
pub price_per_unit_major: i32,
pub price_per_unit_minor: i32,
pub price_per_unit_currency: String,
pub deleted: bool,
}
impl Default for LineItemView {
fn default() -> Self {
Self {
created_time: OffsetDateTime::now_utc(),
product_name: Default::default(),
product_id: Default::default(),
bill_id: Default::default(),
line_item_id: Default::default(),
price_per_unit_minor: Default::default(),
price_per_unit_major: Default::default(),
price_per_unit_currency: Default::default(),
quantity_major_number: Default::default(),
quantity_minor_number: Default::default(),
quantity_major_unit: Default::default(),
quantity_minor_unit: Default::default(),
deleted: false,
}
}
}
impl From<LineItemView> for LineItem {
fn from(v: LineItemView) -> Self {
let quantity = QuantityBuilder::default()
.minor(
QuantityPartBuilder::default()
.number(v.quantity_minor_number as usize)
.unit(QuantityUnit::from_str(&v.quantity_minor_unit).unwrap())
.build()
.unwrap(),
)
.major(
QuantityPartBuilder::default()
.number(v.quantity_major_number as usize)
.unit(QuantityUnit::from_str(&v.quantity_major_unit).unwrap())
.build()
.unwrap(),
)
.build()
.unwrap();
let price_per_unit = PriceBuilder::default()
.minor(v.price_per_unit_minor as usize)
.major(v.price_per_unit_major as usize)
.currency(Currency::from_str(&v.price_per_unit_currency).unwrap())
.build()
.unwrap();
LineItemBuilder::default()
.product_name(v.product_name)
.line_item_id(v.line_item_id)
.quantity(quantity)
.created_time(v.created_time)
.product_id(v.product_id)
.price_per_unit(price_per_unit)
.bill_id(v.bill_id)
.deleted(v.deleted)
.build()
.unwrap()
}
}
// This updates the view with events as they are committed.
// The logic should be minimal here, e.g., don't calculate the account balance,
// design the events to carry the balance information instead.
impl View<LineItem> for LineItemView {
fn update(&mut self, event: &EventEnvelope<LineItem>) {
match &event.payload {
BillingEvent::LineItemAdded(val) => {
self.product_name = val.line_item().product_name().into();
self.product_id = *val.line_item().product_id();
self.line_item_id = *val.line_item().line_item_id();
self.quantity_major_number = *val.line_item().quantity().major().number() as i32;
self.quantity_minor_number = *val.line_item().quantity().minor().number() as i32;
self.quantity_major_unit = val.line_item().quantity().major().unit().to_string();
self.quantity_minor_unit = val.line_item().quantity().minor().unit().to_string();
self.price_per_unit_major = *val.line_item().price_per_unit().major() as i32;
self.price_per_unit_minor = *val.line_item().price_per_unit().minor() as i32;
self.price_per_unit_currency =
val.line_item().price_per_unit().currency().to_string();
self.created_time = val.line_item().created_time().clone();
self.bill_id = *val.line_item().bill_id();
self.deleted = false;
}
BillingEvent::LineItemUpdated(val) => {
let new = val.new_line_item();
self.product_name = new.product_name().into();
self.product_id = *new.product_id();
self.line_item_id = *new.line_item_id();
self.quantity_major_number = *new.quantity().major().number() as i32;
self.quantity_minor_number = *new.quantity().minor().number() as i32;
self.quantity_major_unit = new.quantity().major().unit().to_string();
self.quantity_minor_unit = new.quantity().minor().unit().to_string();
self.price_per_unit_major = *new.price_per_unit().major() as i32;
self.price_per_unit_minor = *new.price_per_unit().minor() as i32;
self.price_per_unit_currency = new.price_per_unit().currency().to_string();
self.created_time = new.created_time().clone();
self.bill_id = *new.bill_id();
}
BillingEvent::LineItemDeleted(_) => self.deleted = true,
_ => (),
}
}
}
#[async_trait]
impl ViewRepository<LineItemView, LineItem> for BillingDBPostgresAdapter {
async fn load(&self, line_item_id: &str) -> Result<Option<LineItemView>, PersistenceError> {
let line_item_id = match parse_aggregate_id(line_item_id, NEW_LINE_ITEM_NON_UUID)? {
Some((val, _)) => return Ok(Some(val)),
None => Uuid::parse_str(line_item_id).unwrap(),
};
let res = sqlx::query_as!(
LineItemView,
"SELECT
product_name,
product_id,
line_item_id,
quantity_minor_unit,
quantity_minor_number,
quantity_major_unit,
quantity_major_number,
created_time,
bill_id,
price_per_unit_minor,
price_per_unit_major,
price_per_unit_currency,
deleted
FROM
cqrs_billing_line_item_query
WHERE
line_item_id = $1;",
line_item_id
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
Ok(Some(res))
}
async fn load_with_context(
&self,
line_item_id: &str,
) -> Result<Option<(LineItemView, ViewContext)>, PersistenceError> {
let line_item_id = match parse_aggregate_id(line_item_id, NEW_LINE_ITEM_NON_UUID)? {
Some(val) => return Ok(Some(val)),
None => Uuid::parse_str(line_item_id).unwrap(),
};
let res = sqlx::query_as!(
LineItemView,
"SELECT
product_name,
product_id,
line_item_id,
quantity_minor_unit,
quantity_minor_number,
quantity_major_unit,
quantity_major_number,
created_time,
bill_id,
price_per_unit_minor,
price_per_unit_major,
price_per_unit_currency,
deleted
FROM
cqrs_billing_line_item_query
WHERE
line_item_id = $1;",
line_item_id
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
struct Context {
version: i64,
line_item_id: Uuid,
}
let ctx = sqlx::query_as!(
Context,
"SELECT
line_item_id, version
FROM
cqrs_billing_line_item_query
WHERE
line_item_id = $1;",
line_item_id
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
let view_context = ViewContext::new(ctx.line_item_id.to_string(), ctx.version);
Ok(Some((res, view_context)))
}
async fn update_view(
&self,
view: LineItemView,
context: ViewContext,
) -> Result<(), PersistenceError> {
match context.version {
0 => {
let version = context.version + 1;
sqlx::query!(
"INSERT INTO cqrs_billing_line_item_query (
version,
product_name,
product_id,
line_item_id,
quantity_minor_unit,
quantity_minor_number,
quantity_major_unit,
quantity_major_number,
created_time,
bill_id,
price_per_unit_minor,
price_per_unit_major,
price_per_unit_currency,
deleted
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14
);",
version,
view.product_name,
view.product_id,
view.line_item_id,
view.quantity_minor_unit,
view.quantity_minor_number,
view.quantity_major_unit,
view.quantity_major_number,
view.created_time,
view.bill_id,
view.price_per_unit_minor,
view.price_per_unit_major,
view.price_per_unit_currency,
view.deleted,
)
.execute(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
}
_ => {
let version = context.version + 1;
sqlx::query!(
"UPDATE
cqrs_billing_line_item_query
SET
version = $1,
product_name = $2,
product_id = $3,
quantity_minor_unit = $4,
quantity_minor_number = $5,
quantity_major_unit = $6,
quantity_major_number = $7,
created_time = $8,
bill_id = $9,
price_per_unit_minor = $10 ,
price_per_unit_major = $11,
price_per_unit_currency = $12,
deleted = $13;",
version,
view.product_name,
view.product_id,
view.quantity_minor_unit,
view.quantity_minor_number,
view.quantity_major_unit,
view.quantity_major_number,
view.created_time,
view.bill_id,
view.price_per_unit_minor,
view.price_per_unit_major,
view.price_per_unit_currency,
view.deleted,
)
.execute(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
}
}
Ok(())
}
}
#[async_trait]
impl Query<LineItem> for BillingDBPostgresAdapter {
async fn dispatch(&self, line_item_id: &str, events: &[EventEnvelope<LineItem>]) {
let res = self
.load_with_context(line_item_id)
.await
.unwrap_or_else(|_| {
Some((
LineItemView::default(),
ViewContext::new(line_item_id.into(), 0),
))
});
let (mut view, view_context): (LineItemView, ViewContext) = res.unwrap();
for event in events {
view.update(event);
}
self.update_view(view, view_context).await.unwrap();
}
}
#[cfg(test)]
mod tests {
use super::*;
use postgres_es::PostgresCqrs;
use crate::{
billing::{
application::services::{
add_line_item_service::AddLineItemServiceBuilder, delete_line_item_service::*,
update_line_item_service::*, MockBillingServicesInterface,
},
domain::{
add_line_item_command::*, bill_aggregate::Bill, commands::BillingCommand,
delete_line_item_command::DeleteLineItemCommandBuilder,
update_line_item_command::*,
},
},
db::migrate::*,
tests::bdd::*,
types::quantity::*,
utils::{
random_string::GenerateRandomStringInterface,
uuid::{tests::UUID, *},
},
};
use std::sync::Arc;
#[actix_rt::test]
async fn pg_query_billing_line_item_view() {
let settings = crate::settings::tests::get_settings().await;
//let settings = crate::settings::Settings::new().unwrap();
settings.create_db().await;
let db = crate::db::sqlx_postgres::Postgres::init(&settings.database.url).await;
db.migrate().await;
let db = BillingDBPostgresAdapter::new(db.pool.clone());
let queries: Vec<Box<dyn Query<LineItem>>> = vec![Box::new(db.clone())];
let mut mock_services = MockBillingServicesInterface::new();
let bill = Bill::default();
crate::billing::adapters::output::db::postgres::bill_id_exists::tests::create_dummy_bill(
&bill, &db,
)
.await;
let db2 = db.clone();
mock_services
.expect_add_line_item()
.times(IS_CALLED_ONLY_ONCE.unwrap())
.returning(move || {
Arc::new(
AddLineItemServiceBuilder::default()
.db_line_item_id_exists(Arc::new(db2.clone()))
.db_bill_id_exists(Arc::new(db2.clone()))
.build()
.unwrap(),
)
});
let db2 = db.clone();
mock_services
.expect_update_line_item()
.times(IS_CALLED_ONLY_ONCE.unwrap())
.returning(move || {
Arc::new(
UpdateLineItemServiceBuilder::default()
.db_line_item_id_exists(Arc::new(db2.clone()))
.db_bill_id_exists(Arc::new(db2.clone()))
.build()
.unwrap(),
)
});
let db2 = db.clone();
mock_services
.expect_delete_line_item()
.times(IS_CALLED_ONLY_ONCE.unwrap())
.returning(move || {
Arc::new(
DeleteLineItemServiceBuilder::default()
.db_line_item_id_exists(Arc::new(db2.clone()))
.build()
.unwrap(),
)
});
let (cqrs, line_item_query): (
Arc<PostgresCqrs<LineItem>>,
Arc<dyn ViewRepository<LineItemView, LineItem>>,
) = (
Arc::new(postgres_es::postgres_cqrs(
db.pool.clone(),
queries,
Arc::new(mock_services),
)),
Arc::new(db.clone()),
);
let rand = crate::utils::random_string::GenerateRandomString {};
let uuid = GenerateUUID {};
let line_item_id = uuid.get_uuid();
let cmd = AddLineItemCommandBuilder::default()
.product_name(rand.get_random(10))
.adding_by(UUID)
.price_per_unit(Price::default())
.quantity(Quantity::get_quantity())
.product_id(UUID)
.bill_id(*bill.bill_id())
.line_item_id(line_item_id)
.build()
.unwrap();
cqrs.execute(
&cmd.line_item_id().to_string(),
BillingCommand::AddLineItem(cmd.clone()),
)
.await
.unwrap();
let line_item = line_item_query
.load(&(*cmd.line_item_id()).to_string())
.await
.unwrap()
.unwrap();
let line_item: LineItem = line_item.into();
assert_eq!(line_item.line_item_id(), cmd.line_item_id());
assert_eq!(line_item.product_name(), cmd.product_name());
assert_eq!(line_item.product_id(), cmd.product_id());
assert_eq!(line_item.quantity(), cmd.quantity());
assert!(!line_item.deleted());
let update_line_item_cmd = UnvalidatedUpdateLineItemCommandBuilder::default()
.product_name(rand.get_random(10))
.adding_by(UUID)
.quantity(Quantity::get_quantity())
.product_id(UUID)
.bill_id(*bill.bill_id())
.old_line_item(line_item.clone())
.price_per_unit(Price::default())
.build()
.unwrap()
.validate()
.unwrap();
cqrs.execute(
&cmd.line_item_id().to_string(),
BillingCommand::UpdateLineItem(update_line_item_cmd.clone()),
)
.await
.unwrap();
let line_item = line_item_query
.load(&(*cmd.line_item_id()).to_string())
.await
.unwrap()
.unwrap();
let line_item: LineItem = line_item.into();
assert_eq!(
line_item.line_item_id(),
update_line_item_cmd.old_line_item().line_item_id()
);
assert_eq!(
line_item.product_name(),
update_line_item_cmd.product_name()
);
assert_eq!(line_item.product_id(), update_line_item_cmd.product_id());
assert_eq!(line_item.quantity(), update_line_item_cmd.quantity());
assert!(!line_item.deleted());
// delete
let delete_line_item_cmd = DeleteLineItemCommandBuilder::default()
.line_item(line_item.clone())
.adding_by(UUID)
.build()
.unwrap();
cqrs.execute(
&cmd.line_item_id().to_string(),
BillingCommand::DeleteLineItem(delete_line_item_cmd.clone()),
)
.await
.unwrap();
let deleted_line_item = line_item_query
.load(&(*cmd.line_item_id()).to_string())
.await
.unwrap()
.unwrap();
let deleted_line_item: LineItem = deleted_line_item.into();
assert_eq!(
deleted_line_item.line_item_id(),
delete_line_item_cmd.line_item().line_item_id()
);
assert_eq!(
deleted_line_item.product_name(),
delete_line_item_cmd.line_item().product_name()
);
assert_eq!(
deleted_line_item.product_id(),
delete_line_item_cmd.line_item().product_id()
);
assert_eq!(
deleted_line_item.quantity(),
delete_line_item_cmd.line_item().quantity()
);
assert!(deleted_line_item.deleted());
settings.drop_db().await;
}
}

View file

@ -0,0 +1,34 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use std::sync::Arc;
use sqlx::postgres::PgPool;
use crate::db::{migrate::RunMigrations, sqlx_postgres::Postgres};
mod bill_id_exists;
mod bill_view;
mod errors;
mod get_line_items_for_bill_id;
mod line_item_id_exists;
mod line_item_view;
mod next_token_id;
mod store_id_exists;
mod store_name_exists;
mod store_view;
#[derive(Clone)]
pub struct BillingDBPostgresAdapter {
pool: PgPool,
}
impl BillingDBPostgresAdapter {
pub fn new(pool: PgPool) -> Self {
Self { pool }
}
pub fn migratable(&self) -> Arc<dyn RunMigrations> {
Arc::new(Postgres::new(self.pool.clone()))
}
}

View file

@ -0,0 +1,111 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use uuid::Uuid;
use super::BillingDBPostgresAdapter;
use crate::billing::application::port::output::db::{errors::*, next_token_id::*};
struct TokenNumber {
token_number: i32,
}
#[async_trait::async_trait]
impl NextTokenIDDBPort for BillingDBPostgresAdapter {
/// delete record for store_id, so that when next_token_id is called, it'll create and set it
/// to 1
async fn reset(&self, store_id: &Uuid) -> BillingDBResult<()> {
sqlx::query!(
"DELETE FROM cqrs_billing_bill_next_token_id WHERE store_id = $1",
store_id
)
.execute(&self.pool)
.await?;
Ok(())
}
/// increment token ID and return pre-increment value
async fn next_token_id(&self, store_id: &Uuid) -> BillingDBResult<usize> {
let res = sqlx::query!(
"SELECT EXISTS (
SELECT 1
FROM cqrs_billing_bill_next_token_id
WHERE
store_id = $1
);",
store_id
)
.fetch_one(&self.pool)
.await?;
if let Some(true) = res.exists {
let res = sqlx::query_as!(
TokenNumber,
"SELECT
token_number
FROM
cqrs_billing_bill_next_token_id
WHERE
store_id = $1;",
store_id
)
.fetch_one(&self.pool)
.await?;
sqlx::query!(
"UPDATE cqrs_billing_bill_next_token_id
SET
token_number = $1
WHERE
store_id = $2",
res.token_number + 1,
store_id
)
.execute(&self.pool)
.await?;
Ok(res.token_number as usize + 1)
} else {
sqlx::query!(
"INSERT INTO cqrs_billing_bill_next_token_id
(token_number, store_id)
VALUES
($1, $2);",
1,
store_id
)
.execute(&self.pool)
.await?;
Ok(1)
}
}
}
#[cfg(test)]
pub mod tests {
use super::*;
// use crate::billing::domain::add_product_command::tests::get_customizations;
use crate::{billing::domain::bill_aggregate::*, utils::uuid::tests::UUID};
#[actix_rt::test]
async fn test_postgres_next_token_id() {
let settings = crate::settings::tests::get_settings().await;
settings.create_db().await;
let db = super::BillingDBPostgresAdapter::new(
sqlx::postgres::PgPool::connect(&settings.database.url)
.await
.unwrap(),
);
let store_id = UUID;
assert_eq!(db.next_token_id(&store_id).await.unwrap(), 1);
assert_eq!(db.next_token_id(&store_id).await.unwrap(), 2);
assert!(db.reset(&store_id).await.is_ok());
assert_eq!(db.next_token_id(&store_id).await.unwrap(), 1);
settings.drop_db().await;
}
}

View file

@ -0,0 +1,87 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use uuid::Uuid;
use super::BillingDBPostgresAdapter;
use crate::billing::application::port::output::db::{errors::*, store_id_exists::*};
use crate::billing::domain::store_aggregate::*;
#[async_trait::async_trait]
impl StoreIDExistsDBPort for BillingDBPostgresAdapter {
async fn store_id_exists(&self, store_id: &Uuid) -> BillingDBResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (
SELECT 1
FROM cqrs_billing_store_query
WHERE
store_id = $1
);",
store_id
)
.fetch_one(&self.pool)
.await?;
if let Some(x) = res.exists {
Ok(x)
} else {
Ok(false)
}
}
}
#[cfg(test)]
pub mod tests {
use uuid::Uuid;
use crate::utils::uuid::tests::UUID;
use super::*;
pub async fn create_dummy_store_record(s: &Store, db: &BillingDBPostgresAdapter) {
sqlx::query!(
"INSERT INTO cqrs_billing_store_query
(version, name, address, store_id, owner, deleted)
VALUES ($1, $2, $3, $4, $5 ,$6);",
1,
s.name(),
s.address().as_ref().map(|s| s.as_str()),
s.store_id(),
s.owner(),
false
)
.execute(&db.pool)
.await
.unwrap();
}
#[actix_rt::test]
async fn test_postgres_store_exists() {
let store_id = Uuid::new_v4();
let settings = crate::settings::tests::get_settings().await;
settings.create_db().await;
let db = super::BillingDBPostgresAdapter::new(
sqlx::postgres::PgPool::connect(&settings.database.url)
.await
.unwrap(),
);
let store = StoreBuilder::default()
.name("store_name".into())
.owner(UUID)
.address(Some("store_address".into()))
.store_id(store_id)
.build()
.unwrap();
// state doesn't exist
assert!(!db.store_id_exists(store.store_id()).await.unwrap());
create_dummy_store_record(&store, &db).await;
// state exists
assert!(db.store_id_exists(store.store_id()).await.unwrap());
settings.drop_db().await;
}
}

View file

@ -0,0 +1,81 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use super::BillingDBPostgresAdapter;
use crate::billing::application::port::output::db::{errors::*, store_name_exists::*};
use crate::billing::domain::store_aggregate::*;
#[async_trait::async_trait]
impl StoreNameExistsDBPort for BillingDBPostgresAdapter {
async fn store_name_exists(&self, s: &Store) -> BillingDBResult<bool> {
let res = sqlx::query!(
"SELECT EXISTS (
SELECT 1
FROM cqrs_billing_store_query
WHERE
name = $1
AND
deleted = false
);",
s.name(),
)
.fetch_one(&self.pool)
.await?;
if let Some(x) = res.exists {
Ok(x)
} else {
Ok(false)
}
}
}
#[cfg(test)]
mod tests {
use uuid::Uuid;
use crate::utils::uuid::tests::UUID;
use super::*;
use crate::billing::adapters::output::db::postgres::store_id_exists::tests::create_dummy_store_record;
#[actix_rt::test]
async fn test_postgres_store_exists() {
let store_id = Uuid::new_v4();
let settings = crate::settings::tests::get_settings().await;
settings.create_db().await;
let db = super::BillingDBPostgresAdapter::new(
sqlx::postgres::PgPool::connect(&settings.database.url)
.await
.unwrap(),
);
let store = StoreBuilder::default()
.name("store_name".into())
.owner(UUID)
.address(Some("store_address".into()))
.store_id(store_id)
.build()
.unwrap();
// state doesn't exist
assert!(!db.store_name_exists(&store).await.unwrap());
create_dummy_store_record(&store, &db).await;
// state exists
assert!(db.store_name_exists(&store).await.unwrap());
// Set store.deleted = true; now db.store_name_exists must return false
sqlx::query!(
"UPDATE cqrs_billing_store_query SET deleted = true WHERE store_id = $1;",
store.store_id()
)
.execute(&db.pool)
.await
.unwrap();
assert!(!db.store_name_exists(&store).await.unwrap());
settings.drop_db().await;
}
}

View file

@ -0,0 +1,353 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use async_trait::async_trait;
use cqrs_es::persist::{PersistenceError, ViewContext, ViewRepository};
use cqrs_es::{EventEnvelope, Query, View};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use super::errors::*;
use super::BillingDBPostgresAdapter;
use crate::billing::domain::events::BillingEvent;
use crate::billing::domain::store_aggregate::*;
use crate::utils::parse_aggregate_id::parse_aggregate_id;
pub const NEW_STORE_NON_UUID: &str = "billing_new_store_non_uuid-asdfa";
// The view for a Store query, for a standard http application this should
// be designed to reflect the response dto that will be returned to a user.
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct StoreView {
name: String,
address: Option<String>,
store_id: Uuid,
owner: Uuid,
deleted: bool,
}
impl From<StoreView> for Store {
fn from(value: StoreView) -> Self {
StoreBuilder::default()
.name(value.name)
.address(value.address)
.store_id(value.store_id)
.owner(value.owner)
.deleted(value.deleted)
.build()
.unwrap()
}
}
// This updates the view with events as they are committed.
// The logic should be minimal here, e.g., don't calculate the account balance,
// design the events to carry the balance information instead.
impl View<Store> for StoreView {
fn update(&mut self, event: &EventEnvelope<Store>) {
match &event.payload {
BillingEvent::StoreAdded(val) => {
self.name = val.name().into();
self.address = val.address().clone();
self.store_id = *val.store_id();
self.owner = *val.owner();
self.deleted = false;
}
BillingEvent::StoreUpdated(e) => {
let val = e.new_store();
self.name = val.name().into();
self.address = val.address().clone();
self.store_id = *val.store_id();
self.owner = *val.owner();
}
_ => (),
}
}
}
#[async_trait]
impl ViewRepository<StoreView, Store> for BillingDBPostgresAdapter {
async fn load(&self, store_id: &str) -> Result<Option<StoreView>, PersistenceError> {
let store_id = match parse_aggregate_id(store_id, NEW_STORE_NON_UUID)? {
Some((val, _)) => return Ok(Some(val)),
None => Uuid::parse_str(store_id).unwrap(),
};
let res = sqlx::query_as!(
StoreView,
"SELECT
name, address, store_id, owner, deleted
FROM
cqrs_billing_store_query
WHERE
store_id = $1;",
store_id
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
Ok(Some(res))
}
async fn load_with_context(
&self,
store_id: &str,
) -> Result<Option<(StoreView, ViewContext)>, PersistenceError> {
let store_id = match parse_aggregate_id(store_id, NEW_STORE_NON_UUID)? {
Some(val) => return Ok(Some(val)),
None => Uuid::parse_str(store_id).unwrap(),
};
let res = sqlx::query_as!(
StoreView,
"SELECT
name, address, store_id, owner, deleted
FROM
cqrs_billing_store_query
WHERE
store_id = $1;",
&store_id,
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
struct Context {
version: i64,
store_id: Uuid,
}
let ctx = sqlx::query_as!(
Context,
"SELECT
store_id, version
FROM
cqrs_billing_store_query
WHERE
store_id = $1;",
store_id
)
.fetch_one(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
let view_context = ViewContext::new(ctx.store_id.to_string(), ctx.version);
Ok(Some((res, view_context)))
}
async fn update_view(
&self,
view: StoreView,
context: ViewContext,
) -> Result<(), PersistenceError> {
match context.version {
0 => {
let version = context.version + 1;
sqlx::query!(
"INSERT INTO cqrs_billing_store_query (
version, name, address, store_id, owner, deleted
) VALUES (
$1, $2, $3, $4, $5, $6
);",
version,
view.name,
view.address,
view.store_id,
view.owner,
view.deleted,
)
.execute(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
}
_ => {
let version = context.version + 1;
sqlx::query!(
"UPDATE
cqrs_billing_store_query
SET
version = $1,
name = $2,
address = $3,
owner = $4,
deleted = $5;",
version,
view.name,
view.address,
view.owner,
view.deleted,
)
.execute(&self.pool)
.await
.map_err(PostgresAggregateError::from)?;
}
}
Ok(())
}
}
pub struct SimpleLoggingQuery {}
// Our simplest query, this is great for debugging but absolutely useless in production.
// This query just pretty prints the events as they are processed.
#[async_trait]
impl Query<Store> for SimpleLoggingQuery {
async fn dispatch(&self, aggregate_id: &str, events: &[EventEnvelope<Store>]) {
for event in events {
let payload = serde_json::to_string_pretty(&event.payload).unwrap();
println!("{}-{}\n{}", aggregate_id, event.sequence, payload);
}
}
}
#[async_trait]
impl Query<Store> for BillingDBPostgresAdapter {
async fn dispatch(&self, store_id: &str, events: &[EventEnvelope<Store>]) {
let res = self
.load_with_context(store_id)
.await
.unwrap_or_else(|_| Some((StoreView::default(), ViewContext::new(store_id.into(), 0))));
let (mut view, view_context): (StoreView, ViewContext) = res.unwrap();
for event in events {
view.update(event);
}
self.update_view(view, view_context).await.unwrap();
}
}
#[cfg(test)]
mod tests {
use super::*;
use postgres_es::PostgresCqrs;
use crate::{
billing::{
application::services::{
add_store_service::AddStoreServiceBuilder, update_store_service::*,
MockBillingServicesInterface,
},
domain::add_store_command::*,
domain::commands::BillingCommand,
domain::update_store_command::*,
},
db::migrate::*,
tests::bdd::*,
utils::{random_string::GenerateRandomStringInterface, uuid::tests::UUID},
};
use std::sync::Arc;
#[actix_rt::test]
async fn pg_query_billing_store_view() {
let settings = crate::settings::tests::get_settings().await;
//let settings = crate::settings::Settings::new().unwrap();
settings.create_db().await;
let db = crate::db::sqlx_postgres::Postgres::init(&settings.database.url).await;
db.migrate().await;
let db = BillingDBPostgresAdapter::new(db.pool.clone());
let simple_query = SimpleLoggingQuery {};
let queries: Vec<Box<dyn Query<Store>>> =
vec![Box::new(simple_query), Box::new(db.clone())];
let mut mock_services = MockBillingServicesInterface::new();
let db2 = db.clone();
mock_services
.expect_add_store()
.times(IS_CALLED_ONLY_ONCE.unwrap())
.returning(move || {
Arc::new(
AddStoreServiceBuilder::default()
.db_store_id_exists(Arc::new(db2.clone()))
.db_store_name_exists(Arc::new(db2.clone()))
.build()
.unwrap(),
)
});
let db2 = db.clone();
mock_services
.expect_update_store()
.times(IS_CALLED_ONLY_ONCE.unwrap())
.returning(move || {
Arc::new(
UpdateStoreServiceBuilder::default()
.db_store_id_exists(Arc::new(db2.clone()))
.db_store_name_exists(Arc::new(db2.clone()))
.build()
.unwrap(),
)
});
let (cqrs, store_query): (
Arc<PostgresCqrs<Store>>,
Arc<dyn ViewRepository<StoreView, Store>>,
) = (
Arc::new(postgres_es::postgres_cqrs(
db.pool.clone(),
queries,
Arc::new(mock_services),
)),
Arc::new(db.clone()),
);
let rand = crate::utils::random_string::GenerateRandomString {};
let cmd = AddStoreCommandBuilder::default()
.name(rand.get_random(10))
.address(None)
.owner(UUID)
.store_id(UUID)
.build()
.unwrap();
cqrs.execute(
&cmd.store_id().to_string(),
BillingCommand::AddStore(cmd.clone()),
)
.await
.unwrap();
let store = store_query
.load(&(*cmd.store_id()).to_string())
.await
.unwrap()
.unwrap();
let store: Store = store.into();
assert_eq!(store.name(), cmd.name());
assert_eq!(store.address(), cmd.address());
assert_eq!(store.owner(), cmd.owner());
assert_eq!(store.store_id(), cmd.store_id());
assert!(!store.deleted());
let update_store_cmd = UpdateStoreCommand::new(
rand.get_random(10),
Some(rand.get_random(10)),
UUID,
store,
UUID,
)
.unwrap();
cqrs.execute(
&cmd.store_id().to_string(),
BillingCommand::UpdateStore(update_store_cmd.clone()),
)
.await
.unwrap();
let store = store_query
.load(&(*cmd.store_id()).to_string())
.await
.unwrap()
.unwrap();
let store: Store = store.into();
assert_eq!(store.name(), update_store_cmd.name());
assert_eq!(store.address(), update_store_cmd.address());
assert_eq!(store.owner(), update_store_cmd.owner());
assert_eq!(store.store_id(), update_store_cmd.old_store().store_id());
assert!(!store.deleted());
settings.drop_db().await;
}
}

View file

@ -1,3 +1,4 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net> // SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
// //
// SPDX-License-Identifier: AGPL-3.0-or-later // SPDX-License-Identifier: AGPL-3.0-or-later
mod db;

View file

@ -2,5 +2,5 @@
// //
// SPDX-License-Identifier: AGPL-3.0-or-later // SPDX-License-Identifier: AGPL-3.0-or-later
mod port; pub mod port;
mod services; pub mod services;

View file

@ -2,5 +2,5 @@
// //
// SPDX-License-Identifier: AGPL-3.0-or-later // SPDX-License-Identifier: AGPL-3.0-or-later
mod input; pub mod input;
mod output; pub mod output;

View file

@ -0,0 +1,53 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use mockall::predicate::*;
use mockall::*;
use uuid::Uuid;
use super::errors::*;
#[cfg(test)]
#[allow(unused_imports)]
pub use tests::*;
#[automock]
#[async_trait::async_trait]
pub trait BillIDExistsDBPort: Send + Sync {
async fn bill_id_exists(&self, c: &Uuid) -> BillingDBResult<bool>;
}
pub type BillIDExistsDBPortObj = std::sync::Arc<dyn BillIDExistsDBPort>;
#[cfg(test)]
pub mod tests {
use super::*;
use std::sync::Arc;
pub fn mock_bill_id_exists_db_port_false(times: Option<usize>) -> BillIDExistsDBPortObj {
let mut m = MockBillIDExistsDBPort::new();
if let Some(times) = times {
m.expect_bill_id_exists()
.times(times)
.returning(|_| Ok(false));
} else {
m.expect_bill_id_exists().returning(|_| Ok(false));
}
Arc::new(m)
}
pub fn mock_bill_id_exists_db_port_true(times: Option<usize>) -> BillIDExistsDBPortObj {
let mut m = MockBillIDExistsDBPort::new();
if let Some(times) = times {
m.expect_bill_id_exists()
.times(times)
.returning(|_| Ok(true));
} else {
m.expect_bill_id_exists().returning(|_| Ok(true));
}
Arc::new(m)
}
}

View file

@ -0,0 +1,19 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use derive_more::Display;
use serde::{Deserialize, Serialize};
pub type BillingDBResult<V> = Result<V, BillingDBError>;
#[derive(Debug, Display, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub enum BillingDBError {
DuplicateBillID,
DuplicateStoreName,
DuplicateStoreID,
StoreIDNotFound,
DuplicateLineItemID,
LineItemIDNotFound,
InternalError,
}

View file

@ -0,0 +1,61 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use mockall::predicate::*;
use mockall::*;
use uuid::Uuid;
use crate::billing::domain::line_item_aggregate::LineItem;
use super::errors::*;
#[cfg(test)]
#[allow(unused_imports)]
pub use tests::*;
#[automock]
#[async_trait::async_trait]
pub trait GetLineItemsForBillIDDBPort: Send + Sync {
async fn get_line_items_for_bill_id(&self, bill_id: Uuid) -> BillingDBResult<Vec<LineItem>>;
}
pub type GetLineItemsForBillIDDBPortObj = std::sync::Arc<dyn GetLineItemsForBillIDDBPort>;
#[cfg(test)]
pub mod tests {
use super::*;
use std::sync::Arc;
pub fn mock_get_line_items_for_bill_id_db_port_no_line_items(
times: Option<usize>,
) -> GetLineItemsForBillIDDBPortObj {
let mut m = MockGetLineItemsForBillIDDBPort::new();
if let Some(times) = times {
m.expect_get_line_items_for_bill_id()
.times(times)
.returning(|_| Ok(Vec::default()));
} else {
m.expect_get_line_items_for_bill_id()
.returning(|_| Ok(Vec::default()));
}
Arc::new(m)
}
pub fn mock_get_line_items_for_bill_id_db_port_true(
times: Option<usize>,
) -> GetLineItemsForBillIDDBPortObj {
let mut m = MockGetLineItemsForBillIDDBPort::new();
if let Some(times) = times {
m.expect_get_line_items_for_bill_id()
.times(times)
.returning(|_| Ok(vec![LineItem::default()]));
} else {
m.expect_get_line_items_for_bill_id()
.returning(|_| Ok(vec![LineItem::default()]));
}
Arc::new(m)
}
}

View file

@ -0,0 +1,57 @@
// SPDX-FileCopyrightText: 2024 Aravinth Manivannan <realaravinth@batsense.net>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
use mockall::predicate::*;
use mockall::*;
use uuid::Uuid;
use super::errors::*;
#[cfg(test)]
#[allow(unused_imports)]
pub use tests::*;
#[automock]
#[async_trait::async_trait]
pub trait LineItemIDExistsDBPort: Send + Sync {
async fn line_item_id_exists(&self, line_item_id: &Uuid) -> BillingDBResult<bool>;
}
pub type LineItemIDExistsDBPortObj = std::sync::Arc<dyn LineItemIDExistsDBPort>;
#[cfg(test)]
pub mod tests {
use super::*;
use std::sync::Arc;
pub fn mock_line_item_id_exists_db_port_false(
times: Option<usize>,
) -> LineItemIDExistsDBPortObj {
let mut m = MockLineItemIDExistsDBPort::new();
if let Some(times) = times {
m.expect_line_item_id_exists()
.times(times)
.returning(|_| Ok(false));
} else {
m.expect_line_item_id_exists().returning(|_| Ok(false));
}
Arc::new(m)
}
pub fn mock_line_item_id_exists_db_port_true(
times: Option<usize>,
) -> LineItemIDExistsDBPortObj {
let mut m = MockLineItemIDExistsDBPort::new();
if let Some(times) = times {
m.expect_line_item_id_exists()
.times(times)
.returning(|_| Ok(true));
} else {
m.expect_line_item_id_exists().returning(|_| Ok(true));
}
Arc::new(m)
}
}

Some files were not shown because too many files have changed in this diff Show more