mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-10 07:25:40 +00:00
Compare commits
719 Commits
daniel/fix
...
maidul-sds
Author | SHA1 | Date | |
---|---|---|---|
0df80c5b2d | |||
c577f51c19 | |||
24d121ab59 | |||
9ef8812205 | |||
37a204e49e | |||
11927f341a | |||
6fc17a4964 | |||
eb00232db6 | |||
4fd245e493 | |||
d92c57d051 | |||
beaef1feb0 | |||
033fd5e7a4 | |||
f49f3c926c | |||
280d44f1e5 | |||
4eea0dc544 | |||
8a33f1a591 | |||
56ff11d63f | |||
1ecce285f0 | |||
b5c9b6a1bd | |||
e12ac6c07e | |||
ea480c222b | |||
1fb644af4a | |||
a6f4a95821 | |||
8578208f2d | |||
fc4189ba0f | |||
b9ecf42fb6 | |||
008e18638f | |||
ac3b9c25dd | |||
f4997dec12 | |||
fcf405c630 | |||
efc6876260 | |||
8bab6d87bb | |||
39a49f12f5 | |||
cfd841ea08 | |||
4d67c03e3e | |||
8826bc5d60 | |||
03fdce67f1 | |||
72f3f7980e | |||
f1aa2fbd84 | |||
217de6250f | |||
f742bd01d9 | |||
3fe53d5183 | |||
a5f5f803df | |||
c37e3ba635 | |||
55279e5e41 | |||
88fb37e8c6 | |||
6271dcc25d | |||
0f7faa6bfe | |||
4ace339d5b | |||
e8c0d1ece9 | |||
bb1977976c | |||
bb3da75870 | |||
088e888560 | |||
180241fdf0 | |||
93f27a7ee8 | |||
ed3bc8dd27 | |||
8dc4809ec8 | |||
a55d64e430 | |||
02d54da74a | |||
d660168700 | |||
1c75fc84f0 | |||
f63da87c7f | |||
53b9fe2dec | |||
87dc0eed7e | |||
f2dd6f94a4 | |||
ac26ae3893 | |||
4c65e9910a | |||
5150c102e6 | |||
41c29d41e1 | |||
4de33190a9 | |||
7cfecb39e4 | |||
7524b83c29 | |||
7a41cdf51b | |||
17d99cb2cf | |||
bd0da0ff74 | |||
d2a54234f4 | |||
626262461a | |||
93ba29e57f | |||
1581aa088d | |||
ceab951bca | |||
2e3dcc50ae | |||
a79087670e | |||
7b04c08fc7 | |||
70842b8e5e | |||
36e3e4c1b5 | |||
ce9b66ef14 | |||
1384c8e855 | |||
f213c75ede | |||
6ade708e19 | |||
ce3af41ebc | |||
e442f10fa5 | |||
2e8ad18285 | |||
f03ca7f916 | |||
bfa533e9d2 | |||
a8759e7410 | |||
af1905a39e | |||
16182a9d1d | |||
1321aa712f | |||
c1f61f2db4 | |||
5ad00130ea | |||
ea5e8e29e6 | |||
e7f89bdfef | |||
d23a7e41f3 | |||
52a885716d | |||
3fc907f076 | |||
eaf10483c0 | |||
dcd0234fb5 | |||
4dda270e8e | |||
4e6b289e1b | |||
c1cb85b49f | |||
ed71e651f6 | |||
6fab7d9507 | |||
1a11dd954b | |||
5d3574d3f6 | |||
aa42aa05aa | |||
7a36badb23 | |||
9ce6fd3f8e | |||
a549c8b9e3 | |||
1c749c84f2 | |||
1bc1feb843 | |||
80ca115ccd | |||
5a6bb90870 | |||
de7a693a6a | |||
096417281e | |||
763a96faf8 | |||
870eaf9301 | |||
10abf192a1 | |||
508f697bdd | |||
8ea8a6f72e | |||
54e6f4b607 | |||
ea3b3c5cec | |||
a8fd83652d | |||
45f3675337 | |||
87a9a87dcd | |||
0b882ece8c | |||
e005e94165 | |||
0e07eaaa01 | |||
e10e313af3 | |||
e6c0bbb25b | |||
2b39d9e6c4 | |||
cf42279e5b | |||
fbc4b47198 | |||
4baa6b1d3d | |||
74ee77f41e | |||
ee1b12173a | |||
1bfbc7047c | |||
a410d560a7 | |||
99e150cc1d | |||
e7191c2f71 | |||
f6deb0969a | |||
1163e41e64 | |||
a0f93f995e | |||
50fcf97a36 | |||
8e68d21115 | |||
372b6cbaea | |||
26add7bfd1 | |||
364302a691 | |||
c8dc29d59b | |||
f3d207ab5c | |||
e1cd632546 | |||
655ee4f118 | |||
34a2452bf5 | |||
7846a81636 | |||
6bdf3455f5 | |||
556ae168dd | |||
7b19d2aa6a | |||
bda9bb3d61 | |||
4b66a9343c | |||
4930d7fc02 | |||
ad644db512 | |||
3707b75349 | |||
ffaf145317 | |||
17b0d0081d | |||
ecf177fecc | |||
6112bc9356 | |||
6c3156273c | |||
eb7c804bb9 | |||
9d7bfae519 | |||
1292b5bf56 | |||
f09e18a706 | |||
5d9a43a3fd | |||
12154c869f | |||
8d66272ab2 | |||
0e44e630cb | |||
49c4929c9c | |||
da561e37c5 | |||
ebc584d36f | |||
656d979d7d | |||
a29fb613b9 | |||
5382f3de2d | |||
b2b858f7e8 | |||
dbc5b5a3d1 | |||
8f3d328b9a | |||
b7d683ee1b | |||
9bd6ec19c4 | |||
03fd0a1eb9 | |||
97023e7714 | |||
1d23ed0680 | |||
1bd66a614b | |||
802a9cf83c | |||
9e95fdbb58 | |||
803f56cfe5 | |||
b163a6c5ad | |||
ddc119ceb6 | |||
302e068c74 | |||
95b92caff3 | |||
5d894b6d43 | |||
09e621539e | |||
dab3e2efad | |||
5e0b78b104 | |||
27852607d1 | |||
956719f797 | |||
04cbbccd25 | |||
7f48e9d62e | |||
8a0018eff2 | |||
e6a920caa3 | |||
71b8c59050 | |||
11411ca4eb | |||
b7c79fa45b | |||
18951b99de | |||
bd05c440c3 | |||
9ca5013a59 | |||
b65b8bc362 | |||
f494c182ff | |||
2fae822e1f | |||
5df140cbd5 | |||
d93cbb023d | |||
9056d1be0c | |||
5f503949eb | |||
15c5fe4095 | |||
91ebcca0fd | |||
9cf917de07 | |||
0826b40e2a | |||
911b62c63a | |||
5343c7af00 | |||
8c03c160a9 | |||
604b0467f9 | |||
a2b555dd81 | |||
ce7bb82f02 | |||
7cd092c0cf | |||
cbfb9af0b9 | |||
ef236106b4 | |||
773a338397 | |||
afb5820113 | |||
5acc0fc243 | |||
c56469ecdb | |||
c59a53180c | |||
f56d265e62 | |||
cc0ff98d4f | |||
4a14c3efd2 | |||
b2d2297914 | |||
836bb6d835 | |||
177eb2afee | |||
594df18611 | |||
3bcb8bf6fc | |||
23c362f9cd | |||
9120367562 | |||
a74c37c18b | |||
f509464947 | |||
07fd489982 | |||
f6d3831d6d | |||
3ece81d663 | |||
f6d87ebf32 | |||
23483ab7e1 | |||
fe31d44d22 | |||
58bab4d163 | |||
8f48a64fd6 | |||
929dc059c3 | |||
d604ef2480 | |||
45e471b16a | |||
fe096772e0 | |||
7c540b6be8 | |||
7dbe8dd3c9 | |||
0dec602729 | |||
66ded779fc | |||
01d24291f2 | |||
55b36b033e | |||
8f461bf50c | |||
1847491cb3 | |||
541c7b63cd | |||
7e5e177680 | |||
40f552e4f1 | |||
ecb54ee3b3 | |||
35a63b8cc6 | |||
2a4596d415 | |||
35e476d916 | |||
b975996158 | |||
122f789cdf | |||
c9911aa841 | |||
32cd0d8af8 | |||
585f0d9f1b | |||
d0292aa139 | |||
4e9be8ca3c | |||
ad49e9eaf1 | |||
fed60f7c03 | |||
1bc0e3087a | |||
80a4f838a1 | |||
d31ec44f50 | |||
d0caef37ce | |||
2d26febe58 | |||
c23ad8ebf2 | |||
bad068ef19 | |||
53430608a8 | |||
b9071ab2b3 | |||
a556c02df6 | |||
bfab270d68 | |||
8ea6a1f3d5 | |||
3c39bf6a0f | |||
828644799f | |||
411e67ae41 | |||
4914bc4b5a | |||
d7050a1947 | |||
3c59422511 | |||
c81204e6d5 | |||
880f39519f | |||
8646f6c50b | |||
437a9e6ccb | |||
b54139bd37 | |||
8a6a36ac54 | |||
c6eb973da0 | |||
21750a8c20 | |||
a598665b2f | |||
56bbf502a2 | |||
9975f7d83f | |||
7ad366b363 | |||
cca4d68d94 | |||
b82b94db54 | |||
de9cb265e0 | |||
5611b9aba1 | |||
53075d503a | |||
e47cfa262a | |||
0ab7a4e713 | |||
5138d588db | |||
7e2d093e29 | |||
2d780e0566 | |||
7ac4ad3194 | |||
3ab6eb62c8 | |||
8eb234a12f | |||
85590af99e | |||
5c7cec0c81 | |||
68f768749b | |||
2c7e342b18 | |||
632900e516 | |||
5fd975b1d7 | |||
d45ac66064 | |||
47cba8ec3c | |||
d4aab66da2 | |||
0dc4c92c89 | |||
f49c963367 | |||
fe11b8e57e | |||
79680b6a73 | |||
58838c541f | |||
03cc71cfed | |||
02529106c9 | |||
0401f55bc3 | |||
403e0d2d9d | |||
d939ff289d | |||
d1816c3051 | |||
cb350788c0 | |||
cd58768d6f | |||
dcd6f4d55d | |||
3c828614b8 | |||
09e7988596 | |||
f40df19334 | |||
76c9d3488b | |||
0809da33e0 | |||
b528eec4bb | |||
5179103680 | |||
25a9e5f58a | |||
8ddfe7b6e9 | |||
c23f21d57a | |||
1242a43d98 | |||
1655ca27d1 | |||
2bcead03b0 | |||
41ab1972ce | |||
b00fff6922 | |||
97b01ca5f8 | |||
c2bd6f5ef3 | |||
18efc9a6de | |||
436ccb25fb | |||
8f08a352dd | |||
00f86cfd00 | |||
3944aafb11 | |||
a6b852fab9 | |||
2a043afe11 | |||
df8f2cf9ab | |||
a18015b1e5 | |||
8b80622d2f | |||
c0fd0a56f3 | |||
326764dd41 | |||
1f24d02c5e | |||
c130fbddd9 | |||
f560534493 | |||
10a97f4522 | |||
7a2f0214f3 | |||
a2b994ab23 | |||
e73d3f87f3 | |||
c4715124dc | |||
b53607f8e4 | |||
8f79d3210a | |||
67c1cb9bf1 | |||
68b1984a76 | |||
ba45e83880 | |||
28ecc37163 | |||
a6a2e2bae0 | |||
d8bbfacae0 | |||
58549c398f | |||
842ed62bec | |||
06d8800ee0 | |||
2ecfd1bb7e | |||
783d4c7bd6 | |||
fbf3f26abd | |||
1d09693041 | |||
626e37e3d0 | |||
07fd67b328 | |||
3f1f018adc | |||
fe04e6d20c | |||
d7171a1617 | |||
384a0daa31 | |||
c5c949e034 | |||
c2c9edf156 | |||
c8248ef4e9 | |||
9f6a6a7b7c | |||
121b642d50 | |||
59b16f647e | |||
2ab5932693 | |||
8dfcef3900 | |||
8ca70eec44 | |||
60df59c7f0 | |||
e231c531a6 | |||
d48bb910fa | |||
1317266415 | |||
f0938330a7 | |||
e1bb0ac3ad | |||
f54d930de2 | |||
288f47f4bd | |||
b090ebfd41 | |||
67773bff5e | |||
8ef1cfda04 | |||
2a79d5ba36 | |||
0cb95f36ff | |||
4a1dfda41f | |||
c238b7b6ae | |||
288d7e88ae | |||
83d314ba32 | |||
b94a0ffa6c | |||
f88389bf9e | |||
2e88c5e2c5 | |||
73f3b8173e | |||
b60e404243 | |||
aa5b88ff04 | |||
b7caff88cf | |||
10120e1825 | |||
31e66c18e7 | |||
fb06f5a3bc | |||
1515dd8a71 | |||
da18a12648 | |||
49a0d3cec6 | |||
e821a11271 | |||
af4428acec | |||
61370cc6b2 | |||
cf3b2ebbca | |||
e970cc0f47 | |||
bd5cd03aeb | |||
760a1e917a | |||
c46e4d7fc1 | |||
1f3896231a | |||
4323f6fa8f | |||
65db91d491 | |||
ae5b57f69f | |||
b717de4f78 | |||
1216d218c1 | |||
209004ec6d | |||
c865d12849 | |||
c921c28185 | |||
3647943c80 | |||
4bf5381060 | |||
a10c358f83 | |||
d3c63b5699 | |||
c64334462f | |||
c497e19b99 | |||
2aeae616de | |||
e0e21530e2 | |||
2d7ff66246 | |||
179497e830 | |||
4c08c80e5b | |||
7b4b802a9b | |||
95cf3cf6cc | |||
d021b414cf | |||
bed75c36dd | |||
7d6af64904 | |||
16519f9486 | |||
bb27d38a12 | |||
5b26928751 | |||
f425e7e48f | |||
4601f46afb | |||
692bdc060c | |||
3a4f8c2e54 | |||
04cb499f0f | |||
189a610f52 | |||
00039ba0e4 | |||
abdcb95a8f | |||
47ea4ae9a6 | |||
903b2c3dc6 | |||
c795b3b3a0 | |||
0d8ff1828e | |||
30d6af7760 | |||
44b42359da | |||
38373722e3 | |||
7ec68ca9a1 | |||
a49d5b121b | |||
901ff7a605 | |||
ba4aa15c92 | |||
a00103aa1e | |||
0c17cc3577 | |||
51d84a47b9 | |||
d529670a52 | |||
ed0463e3e4 | |||
20db0a255c | |||
6fe1d77375 | |||
f90855e7a5 | |||
97f5c33aea | |||
34c2200269 | |||
69925721cc | |||
0961d2f1c6 | |||
b9bd518aa6 | |||
692c9b5d9c | |||
32046ca880 | |||
590dbbcb04 | |||
27d2af4979 | |||
a1e6c6f7d5 | |||
cc94a3366a | |||
6a6c084b8a | |||
7baa3b4cbe | |||
6cab7504fc | |||
ca3d8c5594 | |||
28a2a6c41a | |||
05efd95472 | |||
fa31f87479 | |||
b176f13392 | |||
f4384bb01e | |||
856c2423be | |||
4570de09ae | |||
4feff5b4ca | |||
6081e2927e | |||
0b42f29916 | |||
b60d0992f4 | |||
146c4284a2 | |||
a8a68f600c | |||
742f5f6621 | |||
f993e4aa5c | |||
bb6416acb7 | |||
5ae33b9f3b | |||
1f38b92ec6 | |||
f3cd7efe0e | |||
2b16c19b70 | |||
943b540383 | |||
e180021aa6 | |||
f2a49a79f0 | |||
8e08c443ad | |||
dae26daeeb | |||
170f8d9add | |||
8d41ef198a | |||
69d60a227a | |||
c8eefcfbf9 | |||
53cec754cc | |||
5db3e177eb | |||
3fcc3ccff4 | |||
df07d7b6d7 | |||
28a655bef1 | |||
5f2cd04f46 | |||
897ce1f267 | |||
6afc17b84b | |||
9017a5e838 | |||
cb8e4d884e | |||
16807c3dd6 | |||
61791e385c | |||
bbd7bfb0f5 | |||
4de8c48b2c | |||
a4bbe2c612 | |||
541a2e7d05 | |||
3ddb4cd27a | |||
a5555c3816 | |||
ea4e51d826 | |||
3bc920c593 | |||
f4244c6d4d | |||
e1b9965f01 | |||
705b4f7513 | |||
df38c761ad | |||
32a84471f2 | |||
fc4a20caf2 | |||
ea14df2cbd | |||
6bd6cac366 | |||
45294253aa | |||
635fbdc80b | |||
d20c48b7cf | |||
1fc18fe23b | |||
99403e122b | |||
5176e70437 | |||
82b2b0af97 | |||
e313c866a2 | |||
2d81606049 | |||
718f4ef129 | |||
a42f3b3763 | |||
f7d882a6fc | |||
385afdfcf8 | |||
281d703cc3 | |||
6f56ed5474 | |||
809e4eeba1 | |||
254446c895 | |||
8479c406a5 | |||
8e0b4254b1 | |||
bb52e2beb4 | |||
2739b08e59 | |||
ba5e877a3b | |||
d2752216f6 | |||
d91fb0db02 | |||
556e4d62c4 | |||
4892eea009 | |||
09c6fcb73b | |||
79181a1e3d | |||
bb934ef7b1 | |||
cd9316537d | |||
942e5f2f65 | |||
353d231a4e | |||
069651bdb4 | |||
9061ec2dff | |||
68e05b7198 | |||
b0a5023723 | |||
4f998e3940 | |||
1248840dc8 | |||
64c8125e4b | |||
1690a9429c | |||
69fe5bf71d | |||
f12d4d80c6 | |||
56f2a3afa4 | |||
c109fbab3e | |||
15fb01089b | |||
6f4be3e25a | |||
406da1b5f0 | |||
da45e132a3 | |||
8d33647739 | |||
d1c142e5b1 | |||
fb719a9383 | |||
3c64359597 | |||
bb1cad0c5b | |||
2a1cfe15b4 | |||
e420973dd2 | |||
881d70bc64 | |||
14c1b4f07b | |||
3028bdd424 | |||
15cc157c5f | |||
902a0b0ed4 | |||
ad89ffe94d | |||
ba92192537 | |||
4de1713a18 | |||
26ed8df73c | |||
1917e0fdb7 | |||
4b07234997 | |||
c1decab912 | |||
216c073290 | |||
8626bce632 | |||
c5a2b0321f | |||
6a402950c3 | |||
63333159ca | |||
ce4ba24ef2 | |||
f606e31b98 | |||
ecdbb3eb53 | |||
0321ec32fb | |||
5af53d3398 | |||
8da8c6a66c | |||
88a4390ea0 | |||
c70d0a577c | |||
1070954bdd | |||
587a4a1120 | |||
cc689d3178 | |||
e6848828f2 | |||
c8b93e4467 | |||
0bca24bb00 | |||
c563ada50f | |||
26d1616e22 | |||
5fd071d1de | |||
a6ac78356b | |||
e4a2137991 | |||
9721d7a15e | |||
93db5c4555 | |||
ad4393fdef | |||
cd06e4e7f3 | |||
711a4179ce | |||
b4a2a477d3 | |||
8e53a1b171 | |||
0f23b7e1d3 | |||
33193a47ae | |||
1ad286ca87 | |||
be7c11a3f5 | |||
b97bbe5beb | |||
cf5260b383 | |||
13e0dd8e0f | |||
55a6740714 | |||
891cb06de0 | |||
02e8f20cbf | |||
dbe771dba0 | |||
273fd6c98f | |||
d5f4ce4376 | |||
18aac6508b | |||
85653a90d5 | |||
879ef2c178 | |||
8777cfe680 | |||
2b630f75aa | |||
91cee20cc8 | |||
4249ec6030 | |||
e7a95e6af2 | |||
a9f04a3c1f | |||
3d380710ee | |||
2177ec6bcc | |||
070eb2aacd | |||
e619cfa313 | |||
c3038e3ca1 | |||
ff0e7feeee |
@ -70,3 +70,8 @@ NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
WORKFLOW_SLACK_CLIENT_ID=
|
||||
WORKFLOW_SLACK_CLIENT_SECRET=
|
||||
|
@ -6,9 +6,15 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
|
2
.github/workflows/run-cli-tests.yml
vendored
2
.github/workflows/run-cli-tests.yml
vendored
@ -50,6 +50,6 @@ jobs:
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -63,6 +63,7 @@ yarn-error.log*
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
.idea/*
|
||||
|
||||
frontend-build
|
||||
|
||||
|
13
Makefile
13
Makefile
@ -15,3 +15,16 @@ up-prod:
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
|
||||
reviewable-ui:
|
||||
cd frontend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable-api:
|
||||
cd backend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
35
backend/e2e-test/routes/v1/secret-approval-policy.spec.ts
Normal file
35
backend/e2e-test/routes/v1/secret-approval-policy.spec.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: string[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [seedData1.id],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,73 +1,61 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: importEnv,
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
await deleteSecretImport(payload.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@ -89,25 +77,60 @@ describe("Secret Import Router", async () => {
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
@ -161,22 +184,55 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -201,6 +257,552 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
406
backend/e2e-test/routes/v1/secret-replication.spec.ts
Normal file
406
backend/e2e-test/routes/v1/secret-replication.spec.ts
Normal file
@ -0,0 +1,406 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
330
backend/e2e-test/routes/v3/secret-reference.spec.ts
Normal file
330
backend/e2e-test/routes/v3/secret-reference.spec.ts
Normal file
@ -0,0 +1,330 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -8,6 +8,7 @@ type TRawSecret = {
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
|
73
backend/e2e-test/testUtils/folders.ts
Normal file
73
backend/e2e-test/testUtils/folders.ts
Normal file
@ -0,0 +1,73 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
93
backend/e2e-test/testUtils/secret-imports.ts
Normal file
93
backend/e2e-test/testUtils/secret-imports.ts
Normal file
@ -0,0 +1,93 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
128
backend/e2e-test/testUtils/secrets.ts
Normal file
128
backend/e2e-test/testUtils/secrets.ts
Normal file
@ -0,0 +1,128 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -11,10 +11,11 @@ import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -28,19 +29,31 @@ export default {
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -58,10 +71,12 @@ export default {
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
@ -80,6 +95,9 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
5480
backend/package-lock.json
generated
5480
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -34,9 +34,9 @@
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup",
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node dist/main.mjs",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
@ -50,6 +50,7 @@
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@ -78,6 +79,7 @@
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
@ -101,15 +103,16 @@
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-kms": "^3.609.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
@ -121,12 +124,15 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.10.0",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
@ -154,6 +160,7 @@
|
||||
"ldapjs": "^3.0.7",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
@ -169,8 +176,12 @@
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
@ -7,14 +7,33 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
type ComponentType = 1 | 2 | 3;
|
||||
|
||||
console.log(`
|
||||
Component List
|
||||
--------------
|
||||
0. Exit
|
||||
1. Service component
|
||||
2. DAL component
|
||||
3. Router component
|
||||
`);
|
||||
const componentType = parseInt(prompt("Select a component: "), 10);
|
||||
|
||||
function getComponentType(): ComponentType {
|
||||
while (true) {
|
||||
const input = prompt("Select a component (0-3): ");
|
||||
const componentType = parseInt(input, 10);
|
||||
|
||||
if (componentType === 0) {
|
||||
console.log("Exiting the program. Goodbye!");
|
||||
process.exit(0);
|
||||
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
|
||||
return componentType;
|
||||
} else {
|
||||
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
|
||||
}
|
||||
}
|
||||
}
|
||||
const componentType = getComponentType();
|
||||
|
||||
if (componentType === 1) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
|
16
backend/src/@types/fastify.d.ts
vendored
16
backend/src/@types/fastify.d.ts
vendored
@ -7,6 +7,7 @@ import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-se
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
@ -18,6 +19,7 @@ import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-ser
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@ -35,6 +37,7 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
@ -50,6 +53,9 @@ import { TIntegrationServiceFactory } from "@app/services/integration/integratio
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
@ -64,12 +70,14 @@ import { TSecretReplicationServiceFactory } from "@app/services/secret-replicati
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyRequest {
|
||||
@ -88,6 +96,7 @@ declare module "fastify" {
|
||||
id: string;
|
||||
orgId: string;
|
||||
};
|
||||
rateLimits: RateLimitConfiguration;
|
||||
// passport data
|
||||
passportUser: {
|
||||
isUserCompleted: string;
|
||||
@ -113,6 +122,7 @@ declare module "fastify" {
|
||||
group: TGroupServiceFactory;
|
||||
groupProject: TGroupProjectServiceFactory;
|
||||
apiKey: TApiKeyServiceFactory;
|
||||
pkiAlert: TPkiAlertServiceFactory;
|
||||
project: TProjectServiceFactory;
|
||||
projectMembership: TProjectMembershipServiceFactory;
|
||||
projectEnv: TProjectEnvServiceFactory;
|
||||
@ -150,8 +160,11 @@ declare module "fastify" {
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
certificateEst: TCertificateEstServiceFactory;
|
||||
pkiCollection: TPkiCollectionServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
@ -165,6 +178,9 @@ declare module "fastify" {
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
62
backend/src/@types/knex.d.ts
vendored
62
backend/src/@types/knex.d.ts
vendored
@ -53,6 +53,12 @@ import {
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate,
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
@ -161,6 +167,15 @@ import {
|
||||
TOrgRoles,
|
||||
TOrgRolesInsert,
|
||||
TOrgRolesUpdate,
|
||||
TPkiAlerts,
|
||||
TPkiAlertsInsert,
|
||||
TPkiAlertsUpdate,
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate,
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate,
|
||||
TProjectBots,
|
||||
TProjectBotsInsert,
|
||||
TProjectBotsUpdate,
|
||||
@ -178,6 +193,9 @@ import {
|
||||
TProjectRolesUpdate,
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
@ -284,6 +302,9 @@ import {
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate,
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
@ -307,7 +328,10 @@ import {
|
||||
TUsersUpdate,
|
||||
TWebhooks,
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate
|
||||
TWebhooksUpdate,
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
@ -355,6 +379,16 @@ declare module "knex/types/tables" {
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate
|
||||
>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
@ -365,6 +399,17 @@ declare module "knex/types/tables" {
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
|
||||
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate
|
||||
>;
|
||||
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
@ -740,5 +785,20 @@ declare module "knex/types/tables" {
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate
|
||||
>;
|
||||
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -115,7 +115,14 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
@ -147,13 +154,27 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangeBy: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
@ -177,8 +198,20 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.join(
|
||||
TableName.SecretApprovalRequest,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member").notNullable().alter();
|
||||
|
@ -0,0 +1,294 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
|
||||
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
|
||||
|
||||
if (!hasApproverUserId) {
|
||||
// add the new fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
// if (hasApproverId) tb.setNullable("approverId");
|
||||
tb.uuid("approverUserId");
|
||||
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
// convert project membership id => user id
|
||||
await knex(TableName.AccessApprovalPolicyApprover).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverId`]))
|
||||
});
|
||||
// drop the old field
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
if (hasApproverId) tb.dropColumn("approverId");
|
||||
tb.uuid("approverUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST ------------
|
||||
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
|
||||
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
|
||||
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
|
||||
|
||||
if (hasAccessApprovalRequestTable) {
|
||||
// new fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (!hasRequestedByUserId) {
|
||||
tb.uuid("requestedByUserId");
|
||||
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
|
||||
// copy the assigned project membership => user id to new fields
|
||||
await knex(TableName.AccessApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
requestedByUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedBy`]))
|
||||
});
|
||||
// drop old fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (hasRequestedBy) {
|
||||
// DROP AT A LATER TIME
|
||||
// tb.dropColumn("requestedBy");
|
||||
|
||||
// ADD ALLOW NULLABLE FOR NOW
|
||||
tb.uuid("requestedBy").nullable().alter();
|
||||
}
|
||||
tb.uuid("requestedByUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
|
||||
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
|
||||
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
|
||||
if (!hasReviewerUserId) {
|
||||
// new fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
// if (hasMemberId) tb.setNullable("member");
|
||||
tb.uuid("reviewerUserId");
|
||||
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
});
|
||||
// copy project membership => user id to new fields
|
||||
await knex(TableName.AccessApprovalRequestReviewer).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
reviewerUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.member`]))
|
||||
});
|
||||
// drop table
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
if (hasMemberId) {
|
||||
// DROP AT A LATER TIME
|
||||
// tb.dropColumn("member");
|
||||
|
||||
// ADD ALLOW NULLABLE FOR NOW
|
||||
tb.uuid("member").nullable().alter();
|
||||
}
|
||||
tb.uuid("reviewerUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
|
||||
const projectUserAdditionalPrivilegeHasProjectMembershipId = await knex.schema.hasColumn(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
"projectMembershipId"
|
||||
);
|
||||
|
||||
const projectUserAdditionalPrivilegeHasUserId = await knex.schema.hasColumn(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
"userId"
|
||||
);
|
||||
|
||||
if (!projectUserAdditionalPrivilegeHasUserId) {
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
tb.uuid("userId");
|
||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
|
||||
tb.string("projectId");
|
||||
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege)
|
||||
.update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
userId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`])),
|
||||
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
projectId: knex(TableName.ProjectMembership)
|
||||
.select("projectId")
|
||||
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`]))
|
||||
})
|
||||
.whereNotNull("projectMembershipId");
|
||||
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
tb.uuid("userId").notNullable().alter();
|
||||
tb.string("projectId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
if (projectUserAdditionalPrivilegeHasProjectMembershipId) {
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
// DROP AT A LATER TIME
|
||||
// tb.dropColumn("projectMembershipId");
|
||||
|
||||
// ADD ALLOW NULLABLE FOR NOW
|
||||
tb.uuid("projectMembershipId").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// We remove project user additional privileges first, because it may delete records in the database where the project membership is not found.
|
||||
// The project membership won't be found on records created by group members. In those cades we just delete the record and continue.
|
||||
// When the additionl privilege record is deleted, it will cascade delete the access request created by the group member.
|
||||
|
||||
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
|
||||
const hasUserId = await knex.schema.hasColumn(TableName.ProjectUserAdditionalPrivilege, "userId");
|
||||
const hasProjectMembershipId = await knex.schema.hasColumn(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
"projectMembershipId"
|
||||
);
|
||||
|
||||
// If it doesn't have the userId field, then the up migration has not run
|
||||
if (!hasUserId) {
|
||||
return;
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
if (!hasProjectMembershipId) {
|
||||
tb.uuid("projectMembershipId");
|
||||
tb.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasProjectMembershipId) {
|
||||
// First, update records where a matching project membership exists
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
projectMembershipId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.userId`]))
|
||||
});
|
||||
|
||||
await knex(TableName.AccessApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
projectMembershipId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.userId`]))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
tb.dropColumn("userId");
|
||||
tb.dropColumn("projectId");
|
||||
|
||||
tb.uuid("projectMembershipId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// Then, delete records where no matching project membership was found
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege).whereNull("projectMembershipId").delete();
|
||||
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
|
||||
|
||||
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
|
||||
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
|
||||
|
||||
if (hasApproverUserId) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
if (!hasApproverId) {
|
||||
tb.uuid("approverId");
|
||||
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasApproverId) {
|
||||
await knex(TableName.AccessApprovalPolicyApprover).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
tb.dropColumn("approverUserId");
|
||||
|
||||
tb.uuid("approverId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST ------------
|
||||
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
|
||||
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
|
||||
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
|
||||
|
||||
if (hasAccessApprovalRequestTable) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (!hasRequestedBy) {
|
||||
tb.uuid("requestedBy");
|
||||
tb.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
});
|
||||
|
||||
// Try to find a project membership based on the AccessApprovalRequest.requestedByUserId and AccessApprovalRequest.policyId(reference to AccessApprovalRequestPolicy).envId(reference to Environment).projectId(reference to Project)
|
||||
// If a project membership is found, set the AccessApprovalRequest.requestedBy to the project membership id
|
||||
// If a project membership is not found, remove the AccessApprovalRequest record
|
||||
|
||||
await knex(TableName.AccessApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
requestedBy: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedByUserId`]))
|
||||
});
|
||||
|
||||
// Then, delete records where no matching project membership was found
|
||||
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (hasRequestedByUserId) {
|
||||
tb.dropColumn("requestedByUserId");
|
||||
}
|
||||
if (hasRequestedBy) tb.uuid("requestedBy").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
|
||||
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
|
||||
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
|
||||
|
||||
if (hasReviewerUserId) {
|
||||
if (!hasMemberId) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member");
|
||||
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
await knex(TableName.AccessApprovalRequestReviewer).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
tb.dropColumn("reviewerUserId");
|
||||
|
||||
tb.uuid("member").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
117
backend/src/db/migrations/20240802181855_ca-cert-version.ts
Normal file
117
backend/src/db/migrations/20240802181855_ca-cert-version.ts
Normal file
@ -0,0 +1,117 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
const hasActiveCaCertIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId");
|
||||
if (!hasActiveCaCertIdColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.uuid("activeCaCertId").nullable();
|
||||
t.foreign("activeCaCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.CertificateAuthority}" ca
|
||||
SET "activeCaCertId" = cac.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" cac
|
||||
WHERE ca.id = cac."caId"
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
|
||||
const hasVersionColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version");
|
||||
if (!hasVersionColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.integer("version").nullable();
|
||||
t.dropUnique(["caId"]);
|
||||
});
|
||||
|
||||
await knex(TableName.CertificateAuthorityCert).update({ version: 1 }).whereNull("version");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.integer("version").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId");
|
||||
if (!hasCaSecretIdColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.uuid("caSecretId").nullable();
|
||||
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.CertificateAuthorityCert}" cert
|
||||
SET "caSecretId" = (
|
||||
SELECT sec.id
|
||||
FROM "${TableName.CertificateAuthoritySecret}" sec
|
||||
WHERE sec."caId" = cert."caId"
|
||||
)
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.uuid("caSecretId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthoritySecret)) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthoritySecret, (t) => {
|
||||
t.dropUnique(["caId"]);
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.Certificate}" cert
|
||||
SET "caCertId" = (
|
||||
SELECT caCert.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||
WHERE caCert."caId" = cert."caId"
|
||||
)
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
if (await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId")) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.dropColumn("activeCaCertId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
|
||||
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version")) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.dropColumn("version");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId")) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.dropColumn("caSecretId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
if (await knex.schema.hasColumn(TableName.Certificate, "caCertId")) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropColumn("caCertId");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (!doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("password").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("password");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
|
||||
await knex.schema.alterTable(TableName.RateLimit, (t) => {
|
||||
if (hasCreationLimitCol) {
|
||||
t.dropColumn("creationLimit");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
|
||||
await knex.schema.alterTable(TableName.RateLimit, (t) => {
|
||||
if (!hasCreationLimitCol) {
|
||||
t.integer("creationLimit").defaultTo(30).notNullable();
|
||||
}
|
||||
});
|
||||
}
|
21
backend/src/db/migrations/20240806185442_drop-tag-name.ts
Normal file
21
backend/src/db/migrations/20240806185442_drop-tag-name.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
|
||||
if (hasNameField) {
|
||||
await knex.schema.alterTable(TableName.SecretTag, (t) => {
|
||||
t.dropColumn("name");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
|
||||
if (!hasNameField) {
|
||||
await knex.schema.alterTable(TableName.SecretTag, (t) => {
|
||||
t.string("name");
|
||||
});
|
||||
}
|
||||
}
|
62
backend/src/db/migrations/20240818024923_cert-alerting.ts
Normal file
62
backend/src/db/migrations/20240818024923_cert-alerting.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.PkiCollection))) {
|
||||
await knex.schema.createTable(TableName.PkiCollection, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("name").notNullable();
|
||||
t.string("description").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.PkiCollection);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.PkiCollectionItem))) {
|
||||
await knex.schema.createTable(TableName.PkiCollectionItem, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("pkiCollectionId").notNullable();
|
||||
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
|
||||
t.uuid("caId").nullable();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.uuid("certId").nullable();
|
||||
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.PkiCollectionItem);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.PkiAlert))) {
|
||||
await knex.schema.createTable(TableName.PkiAlert, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("pkiCollectionId").notNullable();
|
||||
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
|
||||
t.string("name").notNullable();
|
||||
t.integer("alertBeforeDays").notNullable();
|
||||
t.string("recipientEmails").notNullable();
|
||||
t.unique(["name", "projectId"]);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.PkiAlert);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.PkiAlert);
|
||||
await dropOnUpdateTrigger(knex, TableName.PkiAlert);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.PkiCollectionItem);
|
||||
await dropOnUpdateTrigger(knex, TableName.PkiCollectionItem);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.PkiCollection);
|
||||
await dropOnUpdateTrigger(knex, TableName.PkiCollection);
|
||||
}
|
@ -0,0 +1,55 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
|
||||
if (!hasCertificateTemplateTable) {
|
||||
await knex.schema.createTable(TableName.CertificateTemplate, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.uuid("caId").notNullable();
|
||||
tb.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
tb.uuid("pkiCollectionId");
|
||||
tb.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("SET NULL");
|
||||
tb.string("name").notNullable();
|
||||
tb.string("commonName").notNullable();
|
||||
tb.string("subjectAlternativeName").notNullable();
|
||||
tb.string("ttl").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateTemplate);
|
||||
}
|
||||
|
||||
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
|
||||
TableName.Certificate,
|
||||
"certificateTemplateId"
|
||||
);
|
||||
|
||||
if (!doesCertificateTableHaveTemplateId) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (tb) => {
|
||||
tb.uuid("certificateTemplateId");
|
||||
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
|
||||
TableName.Certificate,
|
||||
"certificateTemplateId"
|
||||
);
|
||||
|
||||
if (doesCertificateTableHaveTemplateId) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropColumn("certificateTemplateId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
|
||||
if (hasCertificateTemplateTable) {
|
||||
await knex.schema.dropTable(TableName.CertificateTemplate);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateTemplate);
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEstConfigTable = await knex.schema.hasTable(TableName.CertificateTemplateEstConfig);
|
||||
if (!hasEstConfigTable) {
|
||||
await knex.schema.createTable(TableName.CertificateTemplateEstConfig, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.uuid("certificateTemplateId").notNullable().unique();
|
||||
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("CASCADE");
|
||||
tb.binary("encryptedCaChain").notNullable();
|
||||
tb.string("hashedPassphrase").notNullable();
|
||||
tb.boolean("isEnabled").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateTemplateEstConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
|
||||
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCrl, "caSecretId");
|
||||
if (!hasCaSecretIdColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
|
||||
t.uuid("caSecretId").nullable();
|
||||
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.CertificateAuthorityCrl}" crl
|
||||
SET "caSecretId" = (
|
||||
SELECT sec.id
|
||||
FROM "${TableName.CertificateAuthoritySecret}" sec
|
||||
WHERE sec."caId" = crl."caId"
|
||||
)
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
|
||||
t.uuid("caSecretId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
|
||||
t.dropColumn("caSecretId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,96 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
|
||||
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("integration").notNullable();
|
||||
tb.string("slug").notNullable();
|
||||
tb.uuid("orgId").notNullable();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
tb.string("description");
|
||||
tb.unique(["orgId", "slug"]);
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
|
||||
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).notNullable();
|
||||
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
|
||||
tb.string("teamId").notNullable();
|
||||
tb.string("teamName").notNullable();
|
||||
tb.string("slackUserId").notNullable();
|
||||
tb.string("slackAppId").notNullable();
|
||||
tb.binary("encryptedBotAccessToken").notNullable();
|
||||
tb.string("slackBotId").notNullable();
|
||||
tb.string("slackBotUserId").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
|
||||
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("projectId").notNullable().unique();
|
||||
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
tb.uuid("slackIntegrationId").notNullable();
|
||||
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
|
||||
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
|
||||
tb.string("accessRequestChannels").notNullable().defaultTo("");
|
||||
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
|
||||
tb.string("secretRequestChannels").notNullable().defaultTo("");
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
||||
}
|
||||
|
||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedSlackClientSecret"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
if (!doesSuperAdminHaveSlackClientId) {
|
||||
tb.binary("encryptedSlackClientId");
|
||||
}
|
||||
if (!doesSuperAdminHaveSlackClientSecret) {
|
||||
tb.binary("encryptedSlackClientSecret");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
|
||||
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
|
||||
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
||||
|
||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedSlackClientSecret"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
if (doesSuperAdminHaveSlackClientId) {
|
||||
tb.dropColumn("encryptedSlackClientId");
|
||||
}
|
||||
if (doesSuperAdminHaveSlackClientSecret) {
|
||||
tb.dropColumn("encryptedSlackClientSecret");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
|
||||
TableName.CertificateAuthority,
|
||||
"requireTemplateForIssuance"
|
||||
);
|
||||
if (!hasRequireTemplateForIssuanceColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.dropColumn("requireTemplateForIssuance");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// Certificate template
|
||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
|
||||
if (!hasKeyUsagesCol) {
|
||||
tb.specificType("keyUsages", "text[]");
|
||||
}
|
||||
|
||||
if (!hasExtendedKeyUsagesCol) {
|
||||
tb.specificType("extendedKeyUsages", "text[]");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasKeyUsagesCol) {
|
||||
await knex(TableName.CertificateTemplate).update({
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasExtendedKeyUsagesCol) {
|
||||
await knex(TableName.CertificateTemplate).update({
|
||||
extendedKeyUsages: []
|
||||
});
|
||||
}
|
||||
|
||||
// Certificate
|
||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
||||
await knex.schema.alterTable(TableName.Certificate, (tb) => {
|
||||
if (!doesCertTableHaveKeyUsages) {
|
||||
tb.specificType("keyUsages", "text[]");
|
||||
}
|
||||
|
||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
||||
tb.specificType("extendedKeyUsages", "text[]");
|
||||
}
|
||||
});
|
||||
|
||||
if (!doesCertTableHaveKeyUsages) {
|
||||
await knex(TableName.Certificate).update({
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
||||
});
|
||||
}
|
||||
|
||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
||||
await knex(TableName.Certificate).update({
|
||||
extendedKeyUsages: []
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// Certificate Template
|
||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
|
||||
if (hasKeyUsagesCol) {
|
||||
t.dropColumn("keyUsages");
|
||||
}
|
||||
if (hasExtendedKeyUsagesCol) {
|
||||
t.dropColumn("extendedKeyUsages");
|
||||
}
|
||||
});
|
||||
|
||||
// Certificate
|
||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
if (doesCertTableHaveKeyUsages) {
|
||||
t.dropColumn("keyUsages");
|
||||
}
|
||||
if (doesCertTableHaveExtendedKeyUsages) {
|
||||
t.dropColumn("extendedKeyUsages");
|
||||
}
|
||||
});
|
||||
}
|
@ -9,10 +9,10 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalPoliciesApproversSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
approverId: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;
|
||||
|
@ -9,11 +9,12 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalRequestsReviewersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
member: z.string().uuid(),
|
||||
member: z.string().uuid().nullable().optional(),
|
||||
status: z.string(),
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequestsReviewers = z.infer<typeof AccessApprovalRequestsReviewersSchema>;
|
||||
|
@ -11,12 +11,13 @@ export const AccessApprovalRequestsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
privilegeId: z.string().uuid().nullable().optional(),
|
||||
requestedBy: z.string().uuid(),
|
||||
requestedBy: z.string().uuid().nullable().optional(),
|
||||
isTemporary: z.boolean(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
permissions: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
requestedByUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
||||
|
@ -27,7 +27,9 @@ export const CertificateAuthoritiesSchema = z.object({
|
||||
maxPathLength: z.number().nullable().optional(),
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional()
|
||||
notAfter: z.date().nullable().optional(),
|
||||
activeCaCertId: z.string().uuid().nullable().optional(),
|
||||
requireTemplateForIssuance: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
|
@ -15,7 +15,9 @@ export const CertificateAuthorityCertsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedCertificate: zodBuffer,
|
||||
encryptedCertificateChain: zodBuffer
|
||||
encryptedCertificateChain: zodBuffer,
|
||||
version: z.number(),
|
||||
caSecretId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;
|
||||
|
@ -14,7 +14,8 @@ export const CertificateAuthorityCrlSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedCrl: zodBuffer
|
||||
encryptedCrl: zodBuffer,
|
||||
caSecretId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorityCrl = z.infer<typeof CertificateAuthorityCrlSchema>;
|
||||
|
29
backend/src/db/schemas/certificate-template-est-configs.ts
Normal file
29
backend/src/db/schemas/certificate-template-est-configs.ts
Normal file
@ -0,0 +1,29 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateTemplateEstConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
certificateTemplateId: z.string().uuid(),
|
||||
encryptedCaChain: zodBuffer,
|
||||
hashedPassphrase: z.string(),
|
||||
isEnabled: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;
|
||||
export type TCertificateTemplateEstConfigsInsert = Omit<
|
||||
z.input<typeof CertificateTemplateEstConfigsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TCertificateTemplateEstConfigsUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateTemplateEstConfigsSchema>, TImmutableDBKeys>
|
||||
>;
|
26
backend/src/db/schemas/certificate-templates.ts
Normal file
26
backend/src/db/schemas/certificate-templates.ts
Normal file
@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateTemplatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
caId: z.string().uuid(),
|
||||
pkiCollectionId: z.string().uuid().nullable().optional(),
|
||||
name: z.string(),
|
||||
commonName: z.string(),
|
||||
subjectAlternativeName: z.string(),
|
||||
ttl: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateTemplates = z.infer<typeof CertificateTemplatesSchema>;
|
||||
export type TCertificateTemplatesInsert = Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateTemplatesUpdate = Partial<Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>>;
|
@ -20,7 +20,11 @@ export const CertificatesSchema = z.object({
|
||||
notAfter: z.date(),
|
||||
revokedAt: z.date().nullable().optional(),
|
||||
revocationReason: z.number().nullable().optional(),
|
||||
altNames: z.string().default("").nullable().optional()
|
||||
altNames: z.string().default("").nullable().optional(),
|
||||
caCertId: z.string().uuid(),
|
||||
certificateTemplateId: z.string().uuid().nullable().optional(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
|
@ -14,6 +14,8 @@ export * from "./certificate-authority-crl";
|
||||
export * from "./certificate-authority-secret";
|
||||
export * from "./certificate-bodies";
|
||||
export * from "./certificate-secrets";
|
||||
export * from "./certificate-template-est-configs";
|
||||
export * from "./certificate-templates";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
@ -52,11 +54,15 @@ export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
export * from "./organizations";
|
||||
export * from "./pki-alerts";
|
||||
export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
export * from "./project-bots";
|
||||
export * from "./project-environments";
|
||||
export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
export * from "./project-slack-configs";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
@ -96,6 +102,7 @@ export * from "./secret-versions-v2";
|
||||
export * from "./secrets";
|
||||
export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./super-admin";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
@ -104,3 +111,4 @@ export * from "./user-encryption-keys";
|
||||
export * from "./user-group-membership";
|
||||
export * from "./users";
|
||||
export * from "./webhooks";
|
||||
export * from "./workflow-integrations";
|
||||
|
@ -3,12 +3,17 @@ import { z } from "zod";
|
||||
export enum TableName {
|
||||
Users = "users",
|
||||
CertificateAuthority = "certificate_authorities",
|
||||
CertificateTemplateEstConfig = "certificate_template_est_configs",
|
||||
CertificateAuthorityCert = "certificate_authority_certs",
|
||||
CertificateAuthoritySecret = "certificate_authority_secret",
|
||||
CertificateAuthorityCrl = "certificate_authority_crl",
|
||||
Certificate = "certificates",
|
||||
CertificateBody = "certificate_bodies",
|
||||
CertificateSecret = "certificate_secrets",
|
||||
CertificateTemplate = "certificate_templates",
|
||||
PkiAlert = "pki_alerts",
|
||||
PkiCollection = "pki_collections",
|
||||
PkiCollectionItem = "pki_collection_items",
|
||||
Groups = "groups",
|
||||
GroupProjectMembership = "group_project_memberships",
|
||||
GroupProjectMembershipRole = "group_project_membership_roles",
|
||||
@ -109,7 +114,10 @@ export enum TableName {
|
||||
InternalKms = "internal_kms",
|
||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||
// @depreciated
|
||||
KmsKeyVersion = "kms_key_versions"
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
SlackIntegrations = "slack_integrations",
|
||||
ProjectSlackConfigs = "project_slack_configs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
|
23
backend/src/db/schemas/pki-alerts.ts
Normal file
23
backend/src/db/schemas/pki-alerts.ts
Normal file
@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PkiAlertsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string(),
|
||||
pkiCollectionId: z.string().uuid(),
|
||||
name: z.string(),
|
||||
alertBeforeDays: z.number(),
|
||||
recipientEmails: z.string()
|
||||
});
|
||||
|
||||
export type TPkiAlerts = z.infer<typeof PkiAlertsSchema>;
|
||||
export type TPkiAlertsInsert = Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>;
|
||||
export type TPkiAlertsUpdate = Partial<Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>>;
|
21
backend/src/db/schemas/pki-collection-items.ts
Normal file
21
backend/src/db/schemas/pki-collection-items.ts
Normal file
@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PkiCollectionItemsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
pkiCollectionId: z.string().uuid(),
|
||||
caId: z.string().uuid().nullable().optional(),
|
||||
certId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TPkiCollectionItems = z.infer<typeof PkiCollectionItemsSchema>;
|
||||
export type TPkiCollectionItemsInsert = Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>;
|
||||
export type TPkiCollectionItemsUpdate = Partial<Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>>;
|
21
backend/src/db/schemas/pki-collections.ts
Normal file
21
backend/src/db/schemas/pki-collections.ts
Normal file
@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PkiCollectionsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string(),
|
||||
name: z.string(),
|
||||
description: z.string()
|
||||
});
|
||||
|
||||
export type TPkiCollections = z.infer<typeof PkiCollectionsSchema>;
|
||||
export type TPkiCollectionsInsert = Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>;
|
||||
export type TPkiCollectionsUpdate = Partial<Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>>;
|
24
backend/src/db/schemas/project-slack-configs.ts
Normal file
24
backend/src/db/schemas/project-slack-configs.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectSlackConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
slackIntegrationId: z.string().uuid(),
|
||||
isAccessRequestNotificationEnabled: z.boolean().default(false),
|
||||
accessRequestChannels: z.string().default(""),
|
||||
isSecretRequestNotificationEnabled: z.boolean().default(false),
|
||||
secretRequestChannels: z.string().default(""),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectSlackConfigs = z.infer<typeof ProjectSlackConfigsSchema>;
|
||||
export type TProjectSlackConfigsInsert = Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TProjectSlackConfigsUpdate = Partial<Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>>;
|
@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const ProjectUserAdditionalPrivilegeSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
slug: z.string(),
|
||||
projectMembershipId: z.string().uuid(),
|
||||
projectMembershipId: z.string().uuid().nullable().optional(),
|
||||
isTemporary: z.boolean().default(false),
|
||||
temporaryMode: z.string().nullable().optional(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
@ -18,7 +18,9 @@ export const ProjectUserAdditionalPrivilegeSchema = z.object({
|
||||
temporaryAccessEndTime: z.date().nullable().optional(),
|
||||
permissions: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
userId: z.string().uuid(),
|
||||
projectId: z.string()
|
||||
});
|
||||
|
||||
export type TProjectUserAdditionalPrivilege = z.infer<typeof ProjectUserAdditionalPrivilegeSchema>;
|
||||
|
@ -15,7 +15,6 @@ export const RateLimitSchema = z.object({
|
||||
authRateLimit: z.number().default(60),
|
||||
inviteUserRateLimit: z.number().default(30),
|
||||
mfaRateLimit: z.number().default(20),
|
||||
creationLimit: z.number().default(30),
|
||||
publicEndpointLimit: z.number().default(30),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
|
@ -21,7 +21,8 @@ export const SecretSharingSchema = z.object({
|
||||
expiresAfterViews: z.number().nullable().optional(),
|
||||
accessType: z.string().default("anyone"),
|
||||
name: z.string().nullable().optional(),
|
||||
lastViewedAt: z.date().nullable().optional()
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -9,7 +9,6 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretTagsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
color: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
|
27
backend/src/db/schemas/slack-integrations.ts
Normal file
27
backend/src/db/schemas/slack-integrations.ts
Normal file
@ -0,0 +1,27 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SlackIntegrationsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
teamId: z.string(),
|
||||
teamName: z.string(),
|
||||
slackUserId: z.string(),
|
||||
slackAppId: z.string(),
|
||||
encryptedBotAccessToken: zodBuffer,
|
||||
slackBotId: z.string(),
|
||||
slackBotUserId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSlackIntegrations = z.infer<typeof SlackIntegrationsSchema>;
|
||||
export type TSlackIntegrationsInsert = Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>;
|
||||
export type TSlackIntegrationsUpdate = Partial<Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>>;
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SuperAdminSchema = z.object({
|
||||
@ -19,7 +21,9 @@ export const SuperAdminSchema = z.object({
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional(),
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional()
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
22
backend/src/db/schemas/workflow-integrations.ts
Normal file
22
backend/src/db/schemas/workflow-integrations.ts
Normal file
@ -0,0 +1,22 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const WorkflowIntegrationsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
integration: z.string(),
|
||||
slug: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
description: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TWorkflowIntegrations = z.infer<typeof WorkflowIntegrationsSchema>;
|
||||
export type TWorkflowIntegrationsInsert = Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>;
|
||||
export type TWorkflowIntegrationsUpdate = Partial<Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>>;
|
173
backend/src/ee/routes/est/certificate-est-router.ts
Normal file
173
backend/src/ee/routes/est/certificate-est-router.ts
Normal file
@ -0,0 +1,173 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const registerCertificateEstRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
// add support for CSR bodies
|
||||
server.addContentTypeParser("application/pkcs10", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
let csrBody = body as string;
|
||||
// some EST clients send CSRs in PEM format and some in base64 format
|
||||
// for CSRs sent in PEM, we leave them as is
|
||||
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
|
||||
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
|
||||
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
|
||||
}
|
||||
|
||||
done(null, csrBody);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
// Authenticate EST client using Passphrase
|
||||
server.addHook("onRequest", async (req, res) => {
|
||||
const { authorization } = req.headers;
|
||||
const urlFragments = req.url.split("/");
|
||||
|
||||
// cacerts endpoint should not have any authentication
|
||||
if (urlFragments[urlFragments.length - 1] === "cacerts") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!authorization) {
|
||||
const wwwAuthenticateHeader = "WWW-Authenticate";
|
||||
const errAuthRequired = "Authentication required";
|
||||
|
||||
await res.hijack();
|
||||
|
||||
// definitive connection timeout to clean-up open connections and prevent memory leak
|
||||
res.raw.setTimeout(10 * 1000, () => {
|
||||
res.raw.end();
|
||||
});
|
||||
|
||||
res.raw.setHeader(wwwAuthenticateHeader, `Basic realm="infisical"`);
|
||||
res.raw.setHeader("Content-Length", 0);
|
||||
res.raw.statusCode = 401;
|
||||
|
||||
// Write the error message to the response without ending the connection
|
||||
res.raw.write(errAuthRequired);
|
||||
|
||||
// flush headers
|
||||
res.raw.flushHeaders();
|
||||
return;
|
||||
}
|
||||
|
||||
const certificateTemplateId = urlFragments.slice(-2)[0];
|
||||
const estConfig = await server.services.certificateTemplate.getEstConfiguration({
|
||||
isInternal: true,
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const rawCredential = authorization?.split(" ").pop();
|
||||
if (!rawCredential) {
|
||||
throw new UnauthorizedError({ message: "Missing HTTP credentials" });
|
||||
}
|
||||
|
||||
// expected format is user:password
|
||||
const basicCredential = atob(rawCredential);
|
||||
const password = basicCredential.split(":").pop();
|
||||
if (!password) {
|
||||
throw new BadRequestError({
|
||||
message: "No password provided"
|
||||
});
|
||||
}
|
||||
|
||||
const isPasswordValid = await bcrypt.compare(password, estConfig.hashedPassphrase);
|
||||
if (!isPasswordValid) {
|
||||
throw new UnauthorizedError({
|
||||
message: "Invalid credentials"
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:certificateTemplateId/simpleenroll",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.string().min(1),
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.string()
|
||||
}
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only");
|
||||
void res.header("Content-Transfer-Encoding", "base64");
|
||||
|
||||
return server.services.certificateEst.simpleEnroll({
|
||||
csr: req.body,
|
||||
certificateTemplateId: req.params.certificateTemplateId,
|
||||
sslClientCert: req.headers[appCfg.SSL_CLIENT_CERTIFICATE_HEADER_KEY] as string
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:certificateTemplateId/simplereenroll",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.string().min(1),
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.string()
|
||||
}
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only");
|
||||
void res.header("Content-Transfer-Encoding", "base64");
|
||||
|
||||
return server.services.certificateEst.simpleReenroll({
|
||||
csr: req.body,
|
||||
certificateTemplateId: req.params.certificateTemplateId,
|
||||
sslClientCert: req.headers[appCfg.SSL_CLIENT_CERTIFICATE_HEADER_KEY] as string
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:certificateTemplateId/cacerts",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.string()
|
||||
}
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only");
|
||||
void res.header("Content-Transfer-Encoding", "base64");
|
||||
|
||||
return server.services.certificateEst.getCaCerts({
|
||||
certificateTemplateId: req.params.certificateTemplateId
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
@ -56,7 +56,16 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approvals: sapPubSchema.extend({ approvers: z.string().array(), secretPath: z.string().optional() }).array()
|
||||
approvals: sapPubSchema
|
||||
.extend({
|
||||
userApprovers: z
|
||||
.object({
|
||||
userId: z.string()
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -69,6 +78,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectSlug: req.query.projectSlug
|
||||
});
|
||||
|
||||
return { approvals };
|
||||
}
|
||||
});
|
||||
|
@ -1,10 +1,19 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema } from "@app/db/schemas";
|
||||
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const approvalRequestUser = z.object({ userId: z.string() }).merge(
|
||||
UsersSchema.pick({
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
username: true
|
||||
})
|
||||
);
|
||||
|
||||
export const registerAccessApprovalRequestRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
@ -104,10 +113,11 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
member: z.string(),
|
||||
userId: z.string(),
|
||||
status: z.string()
|
||||
})
|
||||
.array()
|
||||
.array(),
|
||||
requestedByUser: approvalRequestUser
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
|
@ -1,86 +1,55 @@
|
||||
/* eslint-disable @typescript-eslint/no-floating-promises */
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { CA_CRLS } from "@app/lib/api-docs";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/crl",
|
||||
url: "/:crlId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get CRL of the CA",
|
||||
description: "Get CRL in DER format (deprecated)",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CRL.caId)
|
||||
crlId: z.string().trim().describe(CA_CRLS.GET.crlId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
crl: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRL.crl)
|
||||
})
|
||||
200: z.instanceof(Buffer)
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { crl, ca } = await server.services.certificateAuthorityCrl.getCaCrl({
|
||||
caId: req.params.caId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
handler: async (req, res) => {
|
||||
const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CA_CRL,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn
|
||||
}
|
||||
}
|
||||
});
|
||||
res.header("Content-Type", "application/pkix-crl");
|
||||
|
||||
return {
|
||||
crl
|
||||
};
|
||||
return Buffer.from(crl);
|
||||
}
|
||||
});
|
||||
|
||||
// server.route({
|
||||
// method: "GET",
|
||||
// url: "/:caId/crl/rotate",
|
||||
// config: {
|
||||
// rateLimit: writeLimit
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
// schema: {
|
||||
// description: "Rotate CRL of the CA",
|
||||
// params: z.object({
|
||||
// caId: z.string().trim()
|
||||
// }),
|
||||
// response: {
|
||||
// 200: z.object({
|
||||
// message: z.string()
|
||||
// })
|
||||
// }
|
||||
// },
|
||||
// handler: async (req) => {
|
||||
// await server.services.certificateAuthority.rotateCaCrl({
|
||||
// caId: req.params.caId,
|
||||
// actor: req.permission.type,
|
||||
// actorId: req.permission.id,
|
||||
// actorAuthMethod: req.permission.authMethod,
|
||||
// actorOrgId: req.permission.orgId
|
||||
// });
|
||||
// return {
|
||||
// message: "Successfully rotated CA CRL"
|
||||
// };
|
||||
// }
|
||||
// });
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:crlId/der",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get CRL in DER format",
|
||||
params: z.object({
|
||||
crlId: z.string().trim().describe(CA_CRLS.GET.crlId)
|
||||
}),
|
||||
response: {
|
||||
200: z.instanceof(Buffer)
|
||||
}
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId);
|
||||
|
||||
res.header("Content-Type", "application/pkix-crl");
|
||||
|
||||
return Buffer.from(crl);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -131,7 +131,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(DYNAMIC_SECRET_LEASES.RENEW.path),
|
||||
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.ttl)
|
||||
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.environmentSlug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -61,7 +61,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
|
||||
await server.register(
|
||||
async (pkiRouter) => {
|
||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/ca" });
|
||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
|
||||
},
|
||||
{ prefix: "/pki" }
|
||||
);
|
||||
|
@ -1,6 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// TODO(akhilmhdh): Fix this when licence service gets it type
|
||||
// TODO(akhilmhdh): Fix this when license service gets it type
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
@ -101,6 +101,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -122,6 +122,10 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
project: z.object({
|
||||
name: z.string(),
|
||||
slug: z.string()
|
||||
}),
|
||||
event: z.object({
|
||||
type: z.string(),
|
||||
metadata: z.any()
|
||||
@ -138,7 +142,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogs = await server.services.auditLog.listProjectAuditLogs({
|
||||
const auditLogs = await server.services.auditLog.listAuditLogs({
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
|
@ -58,7 +58,6 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
|
||||
authRateLimit: z.number(),
|
||||
inviteUserRateLimit: z.number(),
|
||||
mfaRateLimit: z.number(),
|
||||
creationLimit: z.number(),
|
||||
publicEndpointLimit: z.number()
|
||||
}),
|
||||
response: {
|
||||
|
@ -100,17 +100,34 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
async (req, profile, cb) => {
|
||||
try {
|
||||
if (!profile) throw new BadRequestError({ message: "Missing profile" });
|
||||
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
|
||||
const email =
|
||||
profile?.email ??
|
||||
// entra sends data in this format
|
||||
(profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ??
|
||||
(profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\
|
||||
|
||||
if (!email || !profile.firstName) {
|
||||
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
|
||||
const firstName = (profile.firstName ??
|
||||
// entra sends data in this format
|
||||
profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/firstName"]) as string;
|
||||
|
||||
const lastName =
|
||||
profile.lastName ?? profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/lastName"];
|
||||
|
||||
if (!email || !firstName) {
|
||||
logger.info(
|
||||
{
|
||||
err: new Error("Invalid saml request. Missing email or first name"),
|
||||
profile
|
||||
},
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
}
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
|
||||
externalId: profile.nameID,
|
||||
email,
|
||||
firstName: profile.firstName as string,
|
||||
lastName: profile.lastName as string,
|
||||
firstName,
|
||||
lastName: lastName as string,
|
||||
relayState: (req.body as { RelayState?: string }).RelayState,
|
||||
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
|
||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string
|
||||
@ -118,7 +135,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
cb(null, {});
|
||||
cb(error as Error);
|
||||
}
|
||||
},
|
||||
() => {}
|
||||
|
@ -5,19 +5,47 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const ScimUserSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
});
|
||||
|
||||
const ScimGroupSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
});
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/scim-tokens",
|
||||
method: "POST",
|
||||
@ -124,25 +152,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimUserSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -170,30 +180,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
201: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -213,10 +200,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
@ -226,28 +215,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
// displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
active: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -257,8 +228,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
externalId: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
@ -288,6 +259,116 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
active: req.body?.active,
|
||||
email: primaryEmail,
|
||||
externalId: req.body.userName
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PATCH",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any().optional()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.updateScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
});
|
||||
server.route({
|
||||
url: "/Groups",
|
||||
method: "POST",
|
||||
@ -302,25 +383,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional() // okta-specific
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -341,26 +407,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
startIndex: z.coerce.number().default(1),
|
||||
count: z.coerce.number().default(20),
|
||||
filter: z.string().trim().optional()
|
||||
filter: z.string().trim().optional(),
|
||||
excludedAttributes: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
),
|
||||
Resources: z.array(ScimGroupSchema),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -374,7 +426,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
startIndex: req.query.startIndex,
|
||||
filter: req.query.filter,
|
||||
limit: req.query.count
|
||||
limit: req.query.count,
|
||||
isMembersExcluded: req.query.excludedAttributes === "members"
|
||||
});
|
||||
|
||||
return groups;
|
||||
@ -389,20 +442,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -411,6 +451,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -434,25 +475,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePut({
|
||||
const group = await req.server.services.scim.replaceScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
...req.body
|
||||
@ -474,54 +502,34 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.literal("replace"),
|
||||
value: z.object({
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim()
|
||||
})
|
||||
}),
|
||||
z.object({
|
||||
op: z.literal("remove"),
|
||||
path: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
op: z.literal("add"),
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim().optional()
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
)
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
200: ScimGroupSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
const group = await req.server.services.scim.updateScimGroup({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -547,60 +555,4 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
active: req.body.active
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TAccessApprovalPolicies } from "@app/db/schemas";
|
||||
import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, mergeOneToManyRelation, ormify, selectAllTableCols, TFindFilter } from "@app/lib/knex";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
export type TAccessApprovalPolicyDALFactory = ReturnType<typeof accessApprovalPolicyDALFactory>;
|
||||
|
||||
@ -15,12 +15,12 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.select(tx.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
|
||||
@ -35,18 +35,30 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
doc,
|
||||
"id",
|
||||
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
|
||||
...el,
|
||||
envId,
|
||||
environment: { id: envId, name, slug }
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: doc,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
environment: {
|
||||
id: data.envId,
|
||||
name: data.envName,
|
||||
slug: data.envSlug
|
||||
},
|
||||
projectId: data.projectId,
|
||||
...AccessApprovalPoliciesSchema.parse(data)
|
||||
}),
|
||||
({ approverId }) => approverId,
|
||||
"approvers"
|
||||
);
|
||||
return formatedDoc?.[0];
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return formattedDoc?.[0];
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindById" });
|
||||
}
|
||||
@ -55,18 +67,32 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
|
||||
...el,
|
||||
envId,
|
||||
environment: { id: envId, name, slug }
|
||||
|
||||
const formattedDocs = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
environment: {
|
||||
id: data.envId,
|
||||
name: data.envName,
|
||||
slug: data.envSlug
|
||||
},
|
||||
projectId: data.projectId,
|
||||
...AccessApprovalPoliciesSchema.parse(data)
|
||||
// secretPath: data.secretPath || undefined,
|
||||
}),
|
||||
({ approverId }) => approverId,
|
||||
"approvers"
|
||||
);
|
||||
return formatedDoc.map((policy) => ({ ...policy, secretPath: policy.secretPath || undefined }));
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return formattedDocs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find" });
|
||||
}
|
||||
|
@ -34,8 +34,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
accessApprovalPolicyApproverDAL,
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL
|
||||
projectDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const createAccessApprovalPolicy = async ({
|
||||
name,
|
||||
@ -70,15 +69,6 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
|
||||
if (!env) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
const secretApprovers = await projectMembershipDAL.find({
|
||||
projectId: project.id,
|
||||
$in: { id: approvers }
|
||||
});
|
||||
|
||||
if (secretApprovers.length !== approvers.length) {
|
||||
throw new BadRequestError({ message: "Approver not found in project" });
|
||||
}
|
||||
|
||||
await verifyApprovers({
|
||||
projectId: project.id,
|
||||
orgId: actorOrgId,
|
||||
@ -86,7 +76,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: secretApprovers.map((approver) => approver.userId)
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
@ -101,8 +91,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
secretApprovers.map(({ id }) => ({
|
||||
approverId: id,
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
@ -172,15 +162,6 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
if (approvers) {
|
||||
// Find the workspace project memberships of the users passed in the approvers array
|
||||
const secretApprovers = await projectMembershipDAL.find(
|
||||
{
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
$in: { id: approvers }
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
@ -188,15 +169,13 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: secretApprovers.map((approver) => approver.userId)
|
||||
userIds: approvers
|
||||
});
|
||||
|
||||
if (secretApprovers.length !== approvers.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
secretApprovers.map(({ id }) => ({
|
||||
approverId: id,
|
||||
approvers.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests } from "@app/db/schemas";
|
||||
import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests, TUsers } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
@ -40,6 +40,12 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("requestedByUser"),
|
||||
`${TableName.AccessApprovalRequest}.requestedByUserId`,
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
@ -52,7 +58,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId")
|
||||
)
|
||||
|
||||
.select(db.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.Environment),
|
||||
@ -61,15 +67,20 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
|
||||
.select(
|
||||
db.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"),
|
||||
db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"),
|
||||
db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")
|
||||
)
|
||||
|
||||
// TODO: ADD SUPPORT FOR GROUPS!!!!
|
||||
.select(
|
||||
db
|
||||
.ref("projectMembershipId")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
.as("privilegeMembershipId"),
|
||||
db.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
|
||||
db.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
|
||||
db.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
|
||||
db.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
|
||||
|
||||
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeUserId"),
|
||||
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeMembershipId"),
|
||||
|
||||
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeIsTemporary"),
|
||||
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryMode"),
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryRange"),
|
||||
@ -102,9 +113,18 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
envId: doc.policyEnvId
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: doc.requestedByUserId,
|
||||
email: doc.requestedByUserEmail,
|
||||
firstName: doc.requestedByUserFirstName,
|
||||
lastName: doc.requestedByUserLastName,
|
||||
username: doc.requestedByUserUsername
|
||||
},
|
||||
privilege: doc.privilegeId
|
||||
? {
|
||||
membershipId: doc.privilegeMembershipId,
|
||||
userId: doc.privilegeUserId,
|
||||
projectId: doc.projectId,
|
||||
isTemporary: doc.privilegeIsTemporary,
|
||||
temporaryMode: doc.privilegeTemporaryMode,
|
||||
temporaryRange: doc.privilegeTemporaryRange,
|
||||
@ -118,11 +138,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
|
||||
},
|
||||
{ key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId }
|
||||
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId }
|
||||
]
|
||||
});
|
||||
|
||||
@ -146,30 +166,65 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicy}.id`
|
||||
)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("requestedByUser"),
|
||||
`${TableName.AccessApprovalRequest}.requestedByUserId`,
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.join(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
|
||||
"accessApprovalPolicyApproverUser.id"
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalRequestReviewer,
|
||||
`${TableName.AccessApprovalRequest}.id`,
|
||||
`${TableName.AccessApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalReviewerUser"),
|
||||
`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`,
|
||||
`accessApprovalReviewerUser.id`
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(
|
||||
tx.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"),
|
||||
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyApproverUser").as("approverUsername"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyApproverUser").as("approverFirstName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyApproverUser").as("approverLastName"),
|
||||
tx.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
|
||||
tx.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
|
||||
tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
|
||||
tx.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
|
||||
|
||||
tx.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer),
|
||||
|
||||
tx.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"),
|
||||
|
||||
tx.ref("email").withSchema("accessApprovalReviewerUser").as("reviewerEmail"),
|
||||
tx.ref("username").withSchema("accessApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("accessApprovalReviewerUser").as("reviewerFirstName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalReviewerUser").as("reviewerLastName"),
|
||||
|
||||
tx.ref("id").withSchema(TableName.AccessApprovalPolicy).as("policyId"),
|
||||
tx.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"),
|
||||
tx.ref("projectId").withSchema(TableName.Environment),
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover)
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@ -189,15 +244,45 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: el.requestedByUserId,
|
||||
email: el.requestedByUserEmail,
|
||||
firstName: el.requestedByUserFirstName,
|
||||
lastName: el.requestedByUserLastName,
|
||||
username: el.requestedByUserUsername
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
|
||||
mapper: ({
|
||||
reviewerUserId: userId,
|
||||
reviewerStatus: status,
|
||||
reviewerEmail: email,
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName
|
||||
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
|
||||
},
|
||||
{ key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId }
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({
|
||||
approverUserId,
|
||||
approverEmail: email,
|
||||
approverUsername: username,
|
||||
approverLastName: lastName,
|
||||
approverFirstName: firstName
|
||||
}) => ({
|
||||
userId: approverUserId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
if (!formatedDoc?.[0]) return;
|
||||
@ -235,7 +320,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.Environment}.projectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
|
||||
.select(db.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"));
|
||||
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));
|
||||
|
||||
const formattedRequests = sqlNestRelationships({
|
||||
data: accessRequests,
|
||||
@ -245,9 +330,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
|
||||
mapper: ({ reviewerUserId: reviewer, reviewerStatus: status }) =>
|
||||
reviewer ? { reviewer, status } : undefined
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@ -5,9 +5,13 @@ import { ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal";
|
||||
import { triggerSlackNotification } from "@app/services/slack/slack-fns";
|
||||
import { SlackTriggerFeature } from "@app/services/slack/slack-types";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
@ -33,7 +37,10 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
accessApprovalPolicyApproverDAL: Pick<TAccessApprovalPolicyApproverDALFactory, "find">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus" | "findProjectBySlug">;
|
||||
projectDAL: Pick<
|
||||
TProjectDALFactory,
|
||||
"checkProjectUpgradeStatus" | "findProjectBySlug" | "findProjectWithOrg" | "findById"
|
||||
>;
|
||||
accessApprovalRequestDAL: Pick<
|
||||
TAccessApprovalRequestDALFactory,
|
||||
| "create"
|
||||
@ -52,7 +59,12 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
>;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
userDAL: Pick<TUserDALFactory, "findUserByProjectMembershipId" | "findUsersByProjectMembershipIds">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"findUserByProjectMembershipId" | "findUsersByProjectMembershipIds" | "find" | "findById"
|
||||
>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
projectSlackConfigDAL: Pick<TProjectSlackConfigDALFactory, "getIntegrationDetailsByProject">;
|
||||
};
|
||||
|
||||
export type TAccessApprovalRequestServiceFactory = ReturnType<typeof accessApprovalRequestServiceFactory>;
|
||||
@ -68,7 +80,9 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
accessApprovalPolicyApproverDAL,
|
||||
additionalPrivilegeDAL,
|
||||
smtpService,
|
||||
userDAL
|
||||
userDAL,
|
||||
kmsService,
|
||||
projectSlackConfigDAL
|
||||
}: TSecretApprovalRequestServiceFactoryDep) => {
|
||||
const createAccessApprovalRequest = async ({
|
||||
isTemporary,
|
||||
@ -94,7 +108,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
);
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
const requestedByUser = await userDAL.findUserByProjectMembershipId(membership.id);
|
||||
const requestedByUser = await userDAL.findById(actorId);
|
||||
if (!requestedByUser) throw new UnauthorizedError({ message: "User not found" });
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(project.id);
|
||||
@ -114,13 +128,15 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
policyId: policy.id
|
||||
});
|
||||
|
||||
const approverUsers = await userDAL.findUsersByProjectMembershipIds(
|
||||
approvers.map((approver) => approver.approverId)
|
||||
);
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
id: approvers.map((approver) => approver.approverUserId)
|
||||
}
|
||||
});
|
||||
|
||||
const duplicateRequests = await accessApprovalRequestDAL.find({
|
||||
policyId: policy.id,
|
||||
requestedBy: membership.id,
|
||||
requestedByUserId: actorId,
|
||||
permissions: JSON.stringify(requestedPermissions),
|
||||
isTemporary
|
||||
});
|
||||
@ -153,7 +169,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
const approvalRequest = await accessApprovalRequestDAL.create(
|
||||
{
|
||||
policyId: policy.id,
|
||||
requestedBy: membership.id,
|
||||
requestedByUserId: actorId,
|
||||
temporaryRange: temporaryRange || null,
|
||||
permissions: JSON.stringify(requestedPermissions),
|
||||
isTemporary
|
||||
@ -161,13 +177,36 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
|
||||
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
|
||||
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
|
||||
|
||||
await triggerSlackNotification({
|
||||
projectId: project.id,
|
||||
projectSlackConfigDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
notification: {
|
||||
type: SlackTriggerFeature.ACCESS_REQUEST,
|
||||
payload: {
|
||||
projectName: project.name,
|
||||
requesterFullName,
|
||||
isTemporary,
|
||||
requesterEmail: requestedByUser.email as string,
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: approverUsers.filter((approver) => approver.email).map((approver) => approver.email!),
|
||||
subjectLine: "Access Approval Request",
|
||||
|
||||
substitutions: {
|
||||
projectName: project.name,
|
||||
requesterFullName: `${requestedByUser.firstName} ${requestedByUser.lastName}`,
|
||||
requesterFullName,
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary,
|
||||
...(isTemporary && {
|
||||
@ -176,7 +215,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval`
|
||||
approvalUrl
|
||||
},
|
||||
template: SmtpTemplates.AccessApprovalRequest
|
||||
});
|
||||
@ -212,7 +251,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
let requests = await accessApprovalRequestDAL.findRequestsWithPrivilegeByPolicyIds(policies.map((p) => p.id));
|
||||
|
||||
if (authorProjectMembershipId) {
|
||||
requests = requests.filter((request) => request.requestedBy === authorProjectMembershipId);
|
||||
requests = requests.filter((request) => request.requestedByUserId === actorId);
|
||||
}
|
||||
|
||||
if (envSlug) {
|
||||
@ -246,8 +285,8 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
accessApprovalRequest.requestedBy !== membership.id && // The request wasn't made by the current user
|
||||
!policy.approvers.find((approverId) => approverId === membership.id) // The request isn't performed by an assigned approver
|
||||
accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user
|
||||
!policy.approvers.find((approver) => approver.userId === actorId) // The request isn't performed by an assigned approver
|
||||
) {
|
||||
throw new UnauthorizedError({ message: "You are not authorized to approve this request" });
|
||||
}
|
||||
@ -273,7 +312,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
const review = await accessApprovalRequestReviewerDAL.findOne(
|
||||
{
|
||||
requestId: accessApprovalRequest.id,
|
||||
member: membership.id
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -282,7 +321,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
{
|
||||
status,
|
||||
requestId: accessApprovalRequest.id,
|
||||
member: membership.id
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -303,7 +342,8 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
// Permanent access
|
||||
const privilege = await additionalPrivilegeDAL.create(
|
||||
{
|
||||
projectMembershipId: accessApprovalRequest.requestedBy,
|
||||
userId: accessApprovalRequest.requestedByUserId,
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
slug: `requested-privilege-${slugify(alphaNumericNanoId(12))}`,
|
||||
permissions: JSON.stringify(accessApprovalRequest.permissions)
|
||||
},
|
||||
@ -317,7 +357,8 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
|
||||
const privilege = await additionalPrivilegeDAL.create(
|
||||
{
|
||||
projectMembershipId: accessApprovalRequest.requestedBy,
|
||||
userId: accessApprovalRequest.requestedByUserId,
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
slug: `requested-privilege-${slugify(alphaNumericNanoId(12))}`,
|
||||
permissions: JSON.stringify(accessApprovalRequest.permissions),
|
||||
isTemporary: true,
|
||||
|
@ -2,10 +2,11 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateLocalIps } from "@app/lib/validator";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
@ -44,6 +45,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams)
|
||||
throw new BadRequestError({
|
||||
@ -59,7 +61,9 @@ export const auditLogStreamServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
validateLocalIps(url);
|
||||
if (appCfg.isCloud) {
|
||||
blockLocalAndPrivateIpAddresses(url);
|
||||
}
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
@ -131,7 +135,8 @@ export const auditLogStreamServiceFactory = ({
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
if (url) validateLocalIps(url);
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
@ -1,10 +1,11 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { AuditLogsSchema, TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { ormify, selectAllTableCols, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@ -32,23 +33,44 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
.where(
|
||||
stripUndefinedInWhere({
|
||||
projectId,
|
||||
orgId,
|
||||
[`${TableName.AuditLog}.orgId`]: orgId,
|
||||
eventType,
|
||||
actor,
|
||||
userAgentType
|
||||
})
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Project, `${TableName.AuditLog}.projectId`, `${TableName.Project}.id`)
|
||||
|
||||
.select(selectAllTableCols(TableName.AuditLog))
|
||||
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Project).as("projectName"),
|
||||
db.ref("slug").withSchema(TableName.Project).as("projectSlug")
|
||||
)
|
||||
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.orderBy("createdAt", "desc");
|
||||
.orderBy(`${TableName.AuditLog}.createdAt`, "desc");
|
||||
|
||||
if (actor) {
|
||||
void sqlQuery.whereRaw(`"actorMetadata"->>'userId' = ?`, [actor]);
|
||||
}
|
||||
|
||||
if (startDate) {
|
||||
void sqlQuery.where("createdAt", ">=", startDate);
|
||||
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, ">=", startDate);
|
||||
}
|
||||
if (endDate) {
|
||||
void sqlQuery.where("createdAt", "<=", endDate);
|
||||
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, "<=", endDate);
|
||||
}
|
||||
const docs = await sqlQuery;
|
||||
return docs;
|
||||
|
||||
return docs.map((doc) => ({
|
||||
...AuditLogsSchema.parse(doc),
|
||||
project: {
|
||||
name: doc.projectName,
|
||||
slug: doc.projectSlug
|
||||
}
|
||||
}));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error });
|
||||
}
|
||||
@ -62,7 +84,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
@ -75,15 +99,18 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 100); // time to breathe for db
|
||||
});
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@ -1,7 +1,9 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TAuditLogDALFactory } from "./audit-log-dal";
|
||||
@ -10,7 +12,7 @@ import { EventType, TCreateAuditLogDTO, TListProjectAuditLogDTO } from "./audit-
|
||||
|
||||
type TAuditLogServiceFactoryDep = {
|
||||
auditLogDAL: TAuditLogDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
auditLogQueue: TAuditLogQueueServiceFactory;
|
||||
};
|
||||
|
||||
@ -21,7 +23,7 @@ export const auditLogServiceFactory = ({
|
||||
auditLogQueue,
|
||||
permissionService
|
||||
}: TAuditLogServiceFactoryDep) => {
|
||||
const listProjectAuditLogs = async ({
|
||||
const listAuditLogs = async ({
|
||||
userAgentType,
|
||||
eventType,
|
||||
offset,
|
||||
@ -35,14 +37,33 @@ export const auditLogServiceFactory = ({
|
||||
projectId,
|
||||
auditLogActor
|
||||
}: TListProjectAuditLogDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
if (projectId) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
} else {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
/**
|
||||
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
|
||||
* to the organization level
|
||||
*/
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
}
|
||||
|
||||
// If project ID is not provided, then we need to return all the audit logs for the organization itself.
|
||||
|
||||
const auditLogs = await auditLogDAL.find({
|
||||
startDate,
|
||||
endDate,
|
||||
@ -51,8 +72,9 @@ export const auditLogServiceFactory = ({
|
||||
eventType,
|
||||
userAgentType,
|
||||
actor: auditLogActor,
|
||||
projectId
|
||||
...(projectId ? { projectId } : { orgId: actorOrgId })
|
||||
});
|
||||
|
||||
return auditLogs.map(({ eventType: logEventType, actor: eActor, actorMetadata, eventMetadata, ...el }) => ({
|
||||
...el,
|
||||
event: { type: logEventType, metadata: eventMetadata },
|
||||
@ -61,6 +83,10 @@ export const auditLogServiceFactory = ({
|
||||
};
|
||||
|
||||
const createAuditLog = async (data: TCreateAuditLogDTO) => {
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.DISABLE_AUDIT_LOG_GENERATION) {
|
||||
return;
|
||||
}
|
||||
// add all cases in which project id or org id cannot be added
|
||||
if (data.event.type !== EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH) {
|
||||
if (!data.projectId && !data.orgId) throw new BadRequestError({ message: "Must either project id or org id" });
|
||||
@ -71,6 +97,6 @@ export const auditLogServiceFactory = ({
|
||||
|
||||
return {
|
||||
createAuditLog,
|
||||
listProjectAuditLogs
|
||||
listAuditLogs
|
||||
};
|
||||
};
|
||||
|
@ -2,17 +2,18 @@ import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
auditLogActor?: string;
|
||||
projectId: string;
|
||||
projectId?: string;
|
||||
eventType?: string;
|
||||
startDate?: string;
|
||||
endDate?: string;
|
||||
userAgentType?: string;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
} & TProjectPermission;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TCreateAuditLogDTO = {
|
||||
event: Event;
|
||||
@ -130,24 +131,53 @@ export enum EventType {
|
||||
GET_CA = "get-certificate-authority",
|
||||
UPDATE_CA = "update-certificate-authority",
|
||||
DELETE_CA = "delete-certificate-authority",
|
||||
RENEW_CA = "renew-certificate-authority",
|
||||
GET_CA_CSR = "get-certificate-authority-csr",
|
||||
GET_CA_CERTS = "get-certificate-authority-certs",
|
||||
GET_CA_CERT = "get-certificate-authority-cert",
|
||||
SIGN_INTERMEDIATE = "sign-intermediate",
|
||||
IMPORT_CA_CERT = "import-certificate-authority-cert",
|
||||
GET_CA_CRL = "get-certificate-authority-crl",
|
||||
GET_CA_CRLS = "get-certificate-authority-crls",
|
||||
ISSUE_CERT = "issue-cert",
|
||||
SIGN_CERT = "sign-cert",
|
||||
GET_CA_CERTIFICATE_TEMPLATES = "get-ca-certificate-templates",
|
||||
GET_CERT = "get-cert",
|
||||
DELETE_CERT = "delete-cert",
|
||||
REVOKE_CERT = "revoke-cert",
|
||||
GET_CERT_BODY = "get-cert-body",
|
||||
CREATE_PKI_ALERT = "create-pki-alert",
|
||||
GET_PKI_ALERT = "get-pki-alert",
|
||||
UPDATE_PKI_ALERT = "update-pki-alert",
|
||||
DELETE_PKI_ALERT = "delete-pki-alert",
|
||||
CREATE_PKI_COLLECTION = "create-pki-collection",
|
||||
GET_PKI_COLLECTION = "get-pki-collection",
|
||||
UPDATE_PKI_COLLECTION = "update-pki-collection",
|
||||
DELETE_PKI_COLLECTION = "delete-pki-collection",
|
||||
GET_PKI_COLLECTION_ITEMS = "get-pki-collection-items",
|
||||
ADD_PKI_COLLECTION_ITEM = "add-pki-collection-item",
|
||||
DELETE_PKI_COLLECTION_ITEM = "delete-pki-collection-item",
|
||||
CREATE_KMS = "create-kms",
|
||||
UPDATE_KMS = "update-kms",
|
||||
DELETE_KMS = "delete-kms",
|
||||
GET_KMS = "get-kms",
|
||||
UPDATE_PROJECT_KMS = "update-project-kms",
|
||||
GET_PROJECT_KMS_BACKUP = "get-project-kms-backup",
|
||||
LOAD_PROJECT_KMS_BACKUP = "load-project-kms-backup"
|
||||
LOAD_PROJECT_KMS_BACKUP = "load-project-kms-backup",
|
||||
ORG_ADMIN_ACCESS_PROJECT = "org-admin-accessed-project",
|
||||
CREATE_CERTIFICATE_TEMPLATE = "create-certificate-template",
|
||||
UPDATE_CERTIFICATE_TEMPLATE = "update-certificate-template",
|
||||
DELETE_CERTIFICATE_TEMPLATE = "delete-certificate-template",
|
||||
GET_CERTIFICATE_TEMPLATE = "get-certificate-template",
|
||||
CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "create-certificate-template-est-config",
|
||||
UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "update-certificate-template-est-config",
|
||||
GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config",
|
||||
ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration",
|
||||
ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration",
|
||||
GET_SLACK_INTEGRATION = "get-slack-integration",
|
||||
UPDATE_SLACK_INTEGRATION = "update-slack-integration",
|
||||
DELETE_SLACK_INTEGRATION = "delete-slack-integration",
|
||||
GET_PROJECT_SLACK_CONFIG = "get-project-slack-config",
|
||||
UPDATE_PROJECT_SLACK_CONFIG = "update-project-slack-config"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -337,6 +367,7 @@ interface DeleteIntegrationEvent {
|
||||
targetServiceId?: string;
|
||||
path?: string;
|
||||
region?: string;
|
||||
shouldDeleteIntegrationSecrets?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1094,6 +1125,14 @@ interface DeleteCa {
|
||||
};
|
||||
}
|
||||
|
||||
interface RenewCa {
|
||||
type: EventType.RENEW_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCsr {
|
||||
type: EventType.GET_CA_CSR;
|
||||
metadata: {
|
||||
@ -1102,6 +1141,14 @@ interface GetCaCsr {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCerts {
|
||||
type: EventType.GET_CA_CERTS;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCert {
|
||||
type: EventType.GET_CA_CERT;
|
||||
metadata: {
|
||||
@ -1127,8 +1174,8 @@ interface ImportCaCert {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCrl {
|
||||
type: EventType.GET_CA_CRL;
|
||||
interface GetCaCrls {
|
||||
type: EventType.GET_CA_CRLS;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
@ -1153,6 +1200,14 @@ interface SignCert {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCertificateTemplates {
|
||||
type: EventType.GET_CA_CERTIFICATE_TEMPLATES;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCert {
|
||||
type: EventType.GET_CERT;
|
||||
metadata: {
|
||||
@ -1189,6 +1244,95 @@ interface GetCertBody {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreatePkiAlert {
|
||||
type: EventType.CREATE_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
pkiCollectionId: string;
|
||||
name: string;
|
||||
alertBeforeDays: number;
|
||||
recipientEmails: string;
|
||||
};
|
||||
}
|
||||
interface GetPkiAlert {
|
||||
type: EventType.GET_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdatePkiAlert {
|
||||
type: EventType.UPDATE_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
pkiCollectionId?: string;
|
||||
name?: string;
|
||||
alertBeforeDays?: number;
|
||||
recipientEmails?: string;
|
||||
};
|
||||
}
|
||||
interface DeletePkiAlert {
|
||||
type: EventType.DELETE_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreatePkiCollection {
|
||||
type: EventType.CREATE_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetPkiCollection {
|
||||
type: EventType.GET_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdatePkiCollection {
|
||||
type: EventType.UPDATE_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
name?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeletePkiCollection {
|
||||
type: EventType.DELETE_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetPkiCollectionItems {
|
||||
type: EventType.GET_PKI_COLLECTION_ITEMS;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddPkiCollectionItem {
|
||||
type: EventType.ADD_PKI_COLLECTION_ITEM;
|
||||
metadata: {
|
||||
pkiCollectionItemId: string;
|
||||
pkiCollectionId: string;
|
||||
type: PkiItemType;
|
||||
itemId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeletePkiCollectionItem {
|
||||
type: EventType.DELETE_PKI_COLLECTION_ITEM;
|
||||
metadata: {
|
||||
pkiCollectionItemId: string;
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateKmsEvent {
|
||||
type: EventType.CREATE_KMS;
|
||||
metadata: {
|
||||
@ -1245,6 +1389,136 @@ interface LoadProjectKmsBackupEvent {
|
||||
metadata: Record<string, string>; // no metadata yet
|
||||
}
|
||||
|
||||
interface CreateCertificateTemplate {
|
||||
type: EventType.CREATE_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
caId: string;
|
||||
pkiCollectionId?: string;
|
||||
name: string;
|
||||
commonName: string;
|
||||
subjectAlternativeName: string;
|
||||
ttl: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCertificateTemplate {
|
||||
type: EventType.GET_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCertificateTemplate {
|
||||
type: EventType.UPDATE_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
caId: string;
|
||||
pkiCollectionId?: string;
|
||||
name: string;
|
||||
commonName: string;
|
||||
subjectAlternativeName: string;
|
||||
ttl: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCertificateTemplate {
|
||||
type: EventType.DELETE_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface OrgAdminAccessProjectEvent {
|
||||
type: EventType.ORG_ADMIN_ACCESS_PROJECT;
|
||||
metadata: {
|
||||
userId: string;
|
||||
username: string;
|
||||
email: string;
|
||||
projectId: string;
|
||||
}; // no metadata yet
|
||||
}
|
||||
|
||||
interface CreateCertificateTemplateEstConfig {
|
||||
type: EventType.CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
isEnabled: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCertificateTemplateEstConfig {
|
||||
type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
isEnabled: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCertificateTemplateEstConfig {
|
||||
type: EventType.GET_CERTIFICATE_TEMPLATE_EST_CONFIG;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptCreateSlackIntegration {
|
||||
type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptReinstallSlackIntegration {
|
||||
type: EventType.ATTEMPT_REINSTALL_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateSlackIntegration {
|
||||
type: EventType.UPDATE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSlackIntegration {
|
||||
type: EventType.DELETE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSlackIntegration {
|
||||
type: EventType.GET_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateProjectSlackConfig {
|
||||
type: EventType.UPDATE_PROJECT_SLACK_CONFIG;
|
||||
metadata: {
|
||||
id: string;
|
||||
slackIntegrationId: string;
|
||||
isAccessRequestNotificationEnabled: boolean;
|
||||
accessRequestChannels: string;
|
||||
isSecretRequestNotificationEnabled: boolean;
|
||||
secretRequestChannels: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectSlackConfig {
|
||||
type: EventType.GET_PROJECT_SLACK_CONFIG;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -1337,21 +1611,50 @@ export type Event =
|
||||
| GetCa
|
||||
| UpdateCa
|
||||
| DeleteCa
|
||||
| RenewCa
|
||||
| GetCaCsr
|
||||
| GetCaCerts
|
||||
| GetCaCert
|
||||
| SignIntermediate
|
||||
| ImportCaCert
|
||||
| GetCaCrl
|
||||
| GetCaCrls
|
||||
| IssueCert
|
||||
| SignCert
|
||||
| GetCaCertificateTemplates
|
||||
| GetCert
|
||||
| DeleteCert
|
||||
| RevokeCert
|
||||
| GetCertBody
|
||||
| CreatePkiAlert
|
||||
| GetPkiAlert
|
||||
| UpdatePkiAlert
|
||||
| DeletePkiAlert
|
||||
| CreatePkiCollection
|
||||
| GetPkiCollection
|
||||
| UpdatePkiCollection
|
||||
| DeletePkiCollection
|
||||
| GetPkiCollectionItems
|
||||
| AddPkiCollectionItem
|
||||
| DeletePkiCollectionItem
|
||||
| CreateKmsEvent
|
||||
| UpdateKmsEvent
|
||||
| DeleteKmsEvent
|
||||
| GetKmsEvent
|
||||
| UpdateProjectKmsEvent
|
||||
| GetProjectKmsBackupEvent
|
||||
| LoadProjectKmsBackupEvent;
|
||||
| LoadProjectKmsBackupEvent
|
||||
| OrgAdminAccessProjectEvent
|
||||
| CreateCertificateTemplate
|
||||
| UpdateCertificateTemplate
|
||||
| GetCertificateTemplate
|
||||
| DeleteCertificateTemplate
|
||||
| CreateCertificateTemplateEstConfig
|
||||
| UpdateCertificateTemplateEstConfig
|
||||
| GetCertificateTemplateEstConfig
|
||||
| AttemptCreateSlackIntegration
|
||||
| AttemptReinstallSlackIntegration
|
||||
| UpdateSlackIntegration
|
||||
| DeleteSlackIntegration
|
||||
| GetSlackIntegration
|
||||
| UpdateProjectSlackConfig
|
||||
| GetProjectSlackConfig;
|
||||
|
@ -2,24 +2,24 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
// import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
|
||||
|
||||
import { TGetCrl } from "./certificate-authority-crl-types";
|
||||
import { TGetCaCrlsDTO, TGetCrlById } from "./certificate-authority-crl-types";
|
||||
|
||||
type TCertificateAuthorityCrlServiceFactoryDep = {
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "find" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
// licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateAuthorityCrlServiceFactory = ReturnType<typeof certificateAuthorityCrlServiceFactory>;
|
||||
@ -29,13 +29,42 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
certificateAuthorityCrlDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
permissionService,
|
||||
licenseService
|
||||
permissionService // licenseService
|
||||
}: TCertificateAuthorityCrlServiceFactoryDep) => {
|
||||
/**
|
||||
* Return the Certificate Revocation List (CRL) for CA with id [caId]
|
||||
* Return CRL with id [crlId]
|
||||
*/
|
||||
const getCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCrl) => {
|
||||
const getCrlById = async (crlId: TGetCrlById) => {
|
||||
const caCrl = await certificateAuthorityCrlDAL.findById(crlId);
|
||||
if (!caCrl) throw new NotFoundError({ message: "CRL not found" });
|
||||
|
||||
const ca = await certificateAuthorityDAL.findById(caCrl.caId);
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const kmsDecryptor = await kmsService.decryptWithKmsKey({
|
||||
kmsId: keyId
|
||||
});
|
||||
|
||||
const decryptedCrl = await kmsDecryptor({ cipherTextBlob: caCrl.encryptedCrl });
|
||||
|
||||
const crl = new x509.X509Crl(decryptedCrl);
|
||||
|
||||
return {
|
||||
ca,
|
||||
caCrl,
|
||||
crl: crl.rawData
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a list of CRL ids for CA with id [caId]
|
||||
*/
|
||||
const getCaCrls = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCrlsDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
@ -52,15 +81,14 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.caCrl)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to get CA certificate revocation list (CRL) due to plan restriction. Upgrade plan to get the CA CRL."
|
||||
});
|
||||
// const plan = await licenseService.getPlan(actorOrgId);
|
||||
// if (!plan.caCrl)
|
||||
// throw new BadRequestError({
|
||||
// message:
|
||||
// "Failed to get CA certificate revocation lists (CRLs) due to plan restriction. Upgrade plan to get the CA CRL."
|
||||
// });
|
||||
|
||||
const caCrl = await certificateAuthorityCrlDAL.findOne({ caId: ca.id });
|
||||
if (!caCrl) throw new BadRequestError({ message: "CRL not found" });
|
||||
const caCrls = await certificateAuthorityCrlDAL.find({ caId: ca.id }, { sort: [["createdAt", "desc"]] });
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
@ -72,15 +100,23 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
kmsId: keyId
|
||||
});
|
||||
|
||||
const decryptedCrl = await kmsDecryptor({ cipherTextBlob: caCrl.encryptedCrl });
|
||||
const crl = new x509.X509Crl(decryptedCrl);
|
||||
const decryptedCrls = await Promise.all(
|
||||
caCrls.map(async (caCrl) => {
|
||||
const decryptedCrl = await kmsDecryptor({ cipherTextBlob: caCrl.encryptedCrl });
|
||||
const crl = new x509.X509Crl(decryptedCrl);
|
||||
|
||||
const base64crl = crl.toString("base64");
|
||||
const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
|
||||
const base64crl = crl.toString("base64");
|
||||
const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
|
||||
return {
|
||||
id: caCrl.id,
|
||||
crl: crlPem
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
crl: crlPem,
|
||||
ca
|
||||
ca,
|
||||
crls: decryptedCrls
|
||||
};
|
||||
};
|
||||
|
||||
@ -166,7 +202,8 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
// };
|
||||
|
||||
return {
|
||||
getCaCrl
|
||||
getCrlById,
|
||||
getCaCrls
|
||||
// rotateCaCrl
|
||||
};
|
||||
};
|
||||
|
@ -1,5 +1,7 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TGetCrl = {
|
||||
export type TGetCrlById = string;
|
||||
|
||||
export type TGetCaCrlsDTO = {
|
||||
caId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -0,0 +1,24 @@
|
||||
import { Certificate, ContentInfo, EncapsulatedContentInfo, SignedData } from "pkijs";
|
||||
|
||||
export const convertRawCertsToPkcs7 = (rawCertificate: ArrayBuffer[]) => {
|
||||
const certs = rawCertificate.map((rawCert) => Certificate.fromBER(rawCert));
|
||||
const cmsSigned = new SignedData({
|
||||
encapContentInfo: new EncapsulatedContentInfo({
|
||||
eContentType: "1.2.840.113549.1.7.1" // not encrypted and not compressed data
|
||||
}),
|
||||
certificates: certs
|
||||
});
|
||||
|
||||
const cmsContent = new ContentInfo({
|
||||
contentType: "1.2.840.113549.1.7.2", // SignedData
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
content: cmsSigned.toSchema()
|
||||
});
|
||||
|
||||
const derBuffer = cmsContent.toSchema().toBER(false);
|
||||
const base64Pkcs7 = Buffer.from(derBuffer)
|
||||
.toString("base64")
|
||||
.replace(/(.{64})/g, "$1\n"); // we add a linebreak for CURL clients
|
||||
|
||||
return base64Pkcs7;
|
||||
};
|
@ -0,0 +1,268 @@
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { getCaCertChain, getCaCertChains } from "@app/services/certificate-authority/certificate-authority-fns";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { convertRawCertsToPkcs7 } from "./certificate-est-fns";
|
||||
|
||||
type TCertificateEstServiceFactoryDep = {
|
||||
certificateAuthorityService: Pick<TCertificateAuthorityServiceFactory, "signCertFromCa">;
|
||||
certificateTemplateService: Pick<TCertificateTemplateServiceFactory, "getEstConfiguration">;
|
||||
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "findById">;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "find" | "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateEstServiceFactory = ReturnType<typeof certificateEstServiceFactory>;
|
||||
|
||||
export const certificateEstServiceFactory = ({
|
||||
certificateAuthorityService,
|
||||
certificateTemplateService,
|
||||
certificateTemplateDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthorityDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
licenseService
|
||||
}: TCertificateEstServiceFactoryDep) => {
|
||||
const simpleReenroll = async ({
|
||||
csr,
|
||||
certificateTemplateId,
|
||||
sslClientCert
|
||||
}: {
|
||||
csr: string;
|
||||
certificateTemplateId: string;
|
||||
sslClientCert: string;
|
||||
}) => {
|
||||
const estConfig = await certificateTemplateService.getEstConfiguration({
|
||||
isInternal: true,
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to perform EST operation - simpleReenroll due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new UnauthorizedError({ message: "Missing client certificate" });
|
||||
}
|
||||
|
||||
const cert = new x509.X509Certificate(leafCertificate);
|
||||
// We have to assert that the client certificate provided can be traced back to the Root CA
|
||||
const caCertChains = await getCaCertChains({
|
||||
caId: certTemplate.caId,
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthorityDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const verifiedChains = await Promise.all(
|
||||
caCertChains.map((chain) => {
|
||||
const caCert = new x509.X509Certificate(chain.certificate);
|
||||
const caChain =
|
||||
chain.certificateChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((c) => new x509.X509Certificate(c)) || [];
|
||||
|
||||
return isCertChainValid([cert, caCert, ...caChain]);
|
||||
})
|
||||
);
|
||||
|
||||
if (!verifiedChains.some(Boolean)) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid client certificate: unable to build a valid certificate chain"
|
||||
});
|
||||
}
|
||||
|
||||
// We ensure that the Subject and SubjectAltNames of the CSR and the existing certificate are exactly the same
|
||||
const csrObj = new x509.Pkcs10CertificateRequest(csr);
|
||||
if (csrObj.subject !== cert.subject) {
|
||||
throw new BadRequestError({
|
||||
message: "Subject mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
let csrSanSet: Set<string> = new Set();
|
||||
const csrSanExtension = csrObj.extensions.find((ext) => ext.type === "2.5.29.17");
|
||||
if (csrSanExtension) {
|
||||
const sanNames = new x509.GeneralNames(csrSanExtension.value);
|
||||
csrSanSet = new Set([...sanNames.items.map((name) => `${name.type}-${name.value}`)]);
|
||||
}
|
||||
|
||||
let certSanSet: Set<string> = new Set();
|
||||
const certSanExtension = cert.extensions.find((ext) => ext.type === "2.5.29.17");
|
||||
if (certSanExtension) {
|
||||
const sanNames = new x509.GeneralNames(certSanExtension.value);
|
||||
certSanSet = new Set([...sanNames.items.map((name) => `${name.type}-${name.value}`)]);
|
||||
}
|
||||
|
||||
if (csrSanSet.size !== certSanSet.size || ![...csrSanSet].every((element) => certSanSet.has(element))) {
|
||||
throw new BadRequestError({
|
||||
message: "Subject alternative names mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
const { certificate } = await certificateAuthorityService.signCertFromCa({
|
||||
isInternal: true,
|
||||
certificateTemplateId,
|
||||
csr
|
||||
});
|
||||
|
||||
return convertRawCertsToPkcs7([certificate.rawData]);
|
||||
};
|
||||
|
||||
const simpleEnroll = async ({
|
||||
csr,
|
||||
certificateTemplateId,
|
||||
sslClientCert
|
||||
}: {
|
||||
csr: string;
|
||||
certificateTemplateId: string;
|
||||
sslClientCert: string;
|
||||
}) => {
|
||||
/* We first have to assert that the client certificate provided can be traced back to the attached
|
||||
CA chain in the EST configuration
|
||||
*/
|
||||
const estConfig = await certificateTemplateService.getEstConfiguration({
|
||||
isInternal: true,
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to perform EST operation - simpleEnroll due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
|
||||
if (!caCerts) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
}
|
||||
|
||||
const certObj = new x509.X509Certificate(leafCertificate);
|
||||
if (!(await isCertChainValid([certObj, ...caCerts]))) {
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
}
|
||||
|
||||
const { certificate } = await certificateAuthorityService.signCertFromCa({
|
||||
isInternal: true,
|
||||
certificateTemplateId,
|
||||
csr
|
||||
});
|
||||
|
||||
return convertRawCertsToPkcs7([certificate.rawData]);
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the CA certificate and CA certificate chain for the CA bound to
|
||||
* the certificate template with id [certificateTemplateId] as part of EST protocol
|
||||
*/
|
||||
const getCaCerts = async ({ certificateTemplateId }: { certificateTemplateId: string }) => {
|
||||
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
const estConfig = await certificateTemplateService.getEstConfiguration({
|
||||
isInternal: true,
|
||||
certificateTemplateId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(estConfig.orgId);
|
||||
if (!plan.pkiEst) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to perform EST operation - caCerts due to plan restriction. Upgrade to the Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
if (!estConfig.isEnabled) {
|
||||
throw new BadRequestError({
|
||||
message: "EST is disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const ca = await certificateAuthorityDAL.findById(certTemplate.caId);
|
||||
if (!ca) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate Authority not found"
|
||||
});
|
||||
}
|
||||
|
||||
const { caCert, caCertChain } = await getCaCertChain({
|
||||
caCertId: ca.activeCaCertId as string,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const certificates = caCertChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const caCertificate = new x509.X509Certificate(caCert);
|
||||
return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]);
|
||||
};
|
||||
|
||||
return {
|
||||
simpleEnroll,
|
||||
simpleReenroll,
|
||||
getCaCerts
|
||||
};
|
||||
};
|
@ -98,6 +98,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.create({
|
||||
type: provider.type,
|
||||
version: 1,
|
||||
|
@ -0,0 +1,226 @@
|
||||
import {
|
||||
CreateUserCommand,
|
||||
CreateUserGroupCommand,
|
||||
DeleteUserCommand,
|
||||
DescribeReplicationGroupsCommand,
|
||||
DescribeUserGroupsCommand,
|
||||
ElastiCache,
|
||||
ModifyReplicationGroupCommand,
|
||||
ModifyUserGroupCommand
|
||||
} from "@aws-sdk/client-elasticache";
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const CreateElastiCacheUserSchema = z.object({
|
||||
UserId: z.string().trim().min(1),
|
||||
UserName: z.string().trim().min(1),
|
||||
Engine: z.string().default("redis"),
|
||||
Passwords: z.array(z.string().trim().min(1)).min(1).max(1), // Minimum password length is 16 characters, required by AWS.
|
||||
AccessString: z.string().trim().min(1) // Example: "on ~* +@all"
|
||||
});
|
||||
|
||||
const DeleteElasticCacheUserSchema = z.object({
|
||||
UserId: z.string().trim().min(1)
|
||||
});
|
||||
|
||||
type TElastiCacheRedisUser = { userId: string; password: string };
|
||||
type TBasicAWSCredentials = { accessKeyId: string; secretAccessKey: string };
|
||||
|
||||
type TCreateElastiCacheUserInput = z.infer<typeof CreateElastiCacheUserSchema>;
|
||||
type TDeleteElastiCacheUserInput = z.infer<typeof DeleteElasticCacheUserSchema>;
|
||||
|
||||
const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: string) => {
|
||||
const elastiCache = new ElastiCache({
|
||||
region,
|
||||
credentials
|
||||
});
|
||||
const infisicalGroup = "infisical-managed-group-elasticache";
|
||||
|
||||
const ensureInfisicalGroupExists = async (clusterName: string) => {
|
||||
const replicationGroups = await elastiCache.send(new DescribeUserGroupsCommand());
|
||||
|
||||
const existingGroup = replicationGroups.UserGroups?.find((group) => group.UserGroupId === infisicalGroup);
|
||||
|
||||
let newlyCreatedGroup = false;
|
||||
if (!existingGroup) {
|
||||
const createGroupCommand = new CreateUserGroupCommand({
|
||||
UserGroupId: infisicalGroup,
|
||||
UserIds: ["default"],
|
||||
Engine: "redis"
|
||||
});
|
||||
|
||||
await elastiCache.send(createGroupCommand);
|
||||
newlyCreatedGroup = true;
|
||||
}
|
||||
|
||||
if (existingGroup || newlyCreatedGroup) {
|
||||
const replicationGroup = (
|
||||
await elastiCache.send(
|
||||
new DescribeReplicationGroupsCommand({
|
||||
ReplicationGroupId: clusterName
|
||||
})
|
||||
)
|
||||
).ReplicationGroups?.[0];
|
||||
|
||||
if (!replicationGroup?.UserGroupIds?.includes(infisicalGroup)) {
|
||||
// If the replication group doesn't have the infisical user group, we need to associate it
|
||||
const modifyGroupCommand = new ModifyReplicationGroupCommand({
|
||||
UserGroupIdsToAdd: [infisicalGroup],
|
||||
UserGroupIdsToRemove: [],
|
||||
ApplyImmediately: true,
|
||||
ReplicationGroupId: clusterName
|
||||
});
|
||||
await elastiCache.send(modifyGroupCommand);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const addUserToInfisicalGroup = async (userId: string) => {
|
||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||
|
||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||
UserGroupId: infisicalGroup,
|
||||
UserIdsToAdd: [userId],
|
||||
UserIdsToRemove: []
|
||||
});
|
||||
|
||||
await elastiCache.send(addUserToGroupCommand);
|
||||
};
|
||||
|
||||
const createUser = async (creationInput: TCreateElastiCacheUserInput, clusterName: string) => {
|
||||
await ensureInfisicalGroupExists(clusterName);
|
||||
|
||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
|
||||
return {
|
||||
userId: creationInput.UserId,
|
||||
password: creationInput.Passwords[0]
|
||||
};
|
||||
};
|
||||
|
||||
const deleteUser = async (
|
||||
deletionInput: TDeleteElastiCacheUserInput
|
||||
): Promise<Pick<TElastiCacheRedisUser, "userId">> => {
|
||||
await elastiCache.send(new DeleteUserCommand(deletionInput));
|
||||
return { userId: deletionInput.UserId };
|
||||
};
|
||||
|
||||
const verifyCredentials = async (clusterName: string) => {
|
||||
await elastiCache.send(
|
||||
new DescribeReplicationGroupsCommand({
|
||||
ReplicationGroupId: clusterName
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
createUser,
|
||||
deleteUser,
|
||||
verifyCredentials
|
||||
};
|
||||
};
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
|
||||
return `inf-${customAlphabet(charset, 32)()}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
|
||||
};
|
||||
|
||||
export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = DynamicSecretAwsElastiCacheSchema.parse(inputs);
|
||||
|
||||
// We need to ensure the that the creation & revocation statements are valid and can be used to create and revoke users.
|
||||
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
|
||||
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
|
||||
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).verifyCredentials(providerInputs.clusterName);
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!(await validateConnection(providerInputs))) {
|
||||
throw new BadRequestError({ message: "Failed to establish connection" });
|
||||
}
|
||||
|
||||
const leaseUsername = generateUsername();
|
||||
const leasePassword = generatePassword();
|
||||
const leaseExpiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username: leaseUsername,
|
||||
password: leasePassword,
|
||||
expiration: leaseExpiration
|
||||
});
|
||||
|
||||
const parsedStatement = CreateElastiCacheUserSchema.parse(JSON.parse(creationStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).createUser(parsedStatement, providerInputs.clusterName);
|
||||
|
||||
return {
|
||||
entityId: leaseUsername,
|
||||
data: {
|
||||
DB_USERNAME: leaseUsername,
|
||||
DB_PASSWORD: leasePassword
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId });
|
||||
const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement));
|
||||
|
||||
await ElastiCacheUserManager(
|
||||
{
|
||||
accessKeyId: providerInputs.accessKeyId,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
},
|
||||
providerInputs.region
|
||||
).deleteUser(parsedStatement);
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -0,0 +1,126 @@
|
||||
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
})
|
||||
},
|
||||
auth: {
|
||||
...(providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey
|
||||
? {
|
||||
apiKey: {
|
||||
api_key: providerInputs.auth.apiKey,
|
||||
id: providerInputs.auth.apiKeyId
|
||||
}
|
||||
}
|
||||
: {
|
||||
username: providerInputs.auth.username,
|
||||
password: providerInputs.auth.password
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
return connection;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,10 +1,22 @@
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider()
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider(),
|
||||
[DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider(),
|
||||
[DynamicSecretProviders.MongoDB]: MongoDBProvider(),
|
||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
|
||||
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider()
|
||||
});
|
||||
|
@ -7,6 +7,75 @@ export enum SqlProviders {
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
User = "user",
|
||||
ApiKey = "api-key"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(), // this is often "default".
|
||||
password: z.string().trim().optional(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsElastiCacheSchema = z.object({
|
||||
clusterName: z.string().trim().min(1),
|
||||
accessKeyId: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
|
||||
region: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretElasticSearchSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
roles: z.array(z.string().trim().min(1)).min(1),
|
||||
|
||||
// two auth types "user, apikey"
|
||||
auth: z.discriminatedUnion("type", [
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.User),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.ApiKey),
|
||||
apiKey: z.string().trim(),
|
||||
apiKeyId: z.string().trim()
|
||||
})
|
||||
]),
|
||||
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretRabbitMqSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
tags: z.array(z.string().trim()).default([]),
|
||||
|
||||
username: z.string().trim().min(1),
|
||||
password: z.string().trim().min(1),
|
||||
|
||||
ca: z.string().optional(),
|
||||
|
||||
virtualHost: z.object({
|
||||
name: z.string().trim().min(1),
|
||||
permissions: z.object({
|
||||
read: z.string().trim().min(1),
|
||||
write: z.string().trim().min(1),
|
||||
configure: z.string().trim().min(1)
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
client: z.nativeEnum(SqlProviders),
|
||||
host: z.string().trim().toLowerCase(),
|
||||
@ -44,16 +113,81 @@ export const DynamicSecretAwsIamSchema = z.object({
|
||||
policyArns: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoAtlasSchema = z.object({
|
||||
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
|
||||
adminPrivateKey: z.string().trim().min(1).describe("Admin user private api key"),
|
||||
groupId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe("Unique 24-hexadecimal digit string that identifies your project. This is same as project id"),
|
||||
roles: z
|
||||
.object({
|
||||
collectionName: z.string().optional().describe("Collection on which this role applies."),
|
||||
databaseName: z.string().min(1).describe("Database to which the user is granted access privileges."),
|
||||
roleName: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
' Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
})
|
||||
.array()
|
||||
.min(1),
|
||||
scopes: z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
"Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access."
|
||||
),
|
||||
type: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe("Category of resource that this database user can access. Enum: CLUSTER, DATA_LAKE, STREAM")
|
||||
})
|
||||
.array()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoDBSchema = z.object({
|
||||
host: z.string().min(1).trim().toLowerCase(),
|
||||
port: z.number().optional(),
|
||||
username: z.string().min(1).trim(),
|
||||
password: z.string().min(1).trim(),
|
||||
database: z.string().min(1).trim(),
|
||||
ca: z.string().min(1).optional(),
|
||||
roles: z
|
||||
.string()
|
||||
.array()
|
||||
.min(1)
|
||||
.describe(
|
||||
'Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
});
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
AwsIam = "aws-iam"
|
||||
AwsIam = "aws-iam",
|
||||
Redis = "redis",
|
||||
AwsElastiCache = "aws-elasticache",
|
||||
MongoAtlas = "mongo-db-atlas",
|
||||
ElasticSearch = "elastic-search",
|
||||
MongoDB = "mongo-db",
|
||||
RabbitMq = "rabbit-mq"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
146
backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts
Normal file
146
backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts
Normal file
@ -0,0 +1,146 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoAtlasSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
Accept: "application/vnd.atlas.2023-02-01+json",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
const digestAuth = createDigestAuthRequestInterceptor(
|
||||
client,
|
||||
providerInputs.adminPublicKey,
|
||||
providerInputs.adminPrivateKey
|
||||
);
|
||||
return digestAuth;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
})
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
method: "GET",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((err) => {
|
||||
if ((err as AxiosError).response?.status === 404) return false;
|
||||
throw err;
|
||||
});
|
||||
if (isExisting) {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
116
backend/src/ee/services/dynamic-secret/providers/mongo-db.ts
Normal file
116
backend/src/ee/services/dynamic-secret/providers/mongo-db.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
},
|
||||
directConnection: !isSrv,
|
||||
ca: providerInputs.ca
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
.command({ ping: 1 })
|
||||
.then(() => true);
|
||||
|
||||
await client.close();
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
|
||||
await db.command({
|
||||
createUser: username,
|
||||
pwd: password,
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
await db.command({
|
||||
dropUser: username
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
172
backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts
Normal file
172
backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts
Normal file
@ -0,0 +1,172 @@
|
||||
import axios, { Axios } from "axios";
|
||||
import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
type TCreateRabbitMQUser = {
|
||||
axiosInstance: Axios;
|
||||
createUser: {
|
||||
username: string;
|
||||
password: string;
|
||||
tags: string[];
|
||||
};
|
||||
virtualHost: {
|
||||
name: string;
|
||||
permissions: {
|
||||
read: string;
|
||||
write: string;
|
||||
configure: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
type TDeleteRabbitMqUser = {
|
||||
axiosInstance: Axios;
|
||||
usernameToDelete: string;
|
||||
};
|
||||
|
||||
async function createRabbitMqUser({ axiosInstance, createUser, virtualHost }: TCreateRabbitMQUser): Promise<void> {
|
||||
try {
|
||||
// Create user
|
||||
const userUrl = `/users/${createUser.username}`;
|
||||
const userData = {
|
||||
password: createUser.password,
|
||||
tags: createUser.tags.join(",")
|
||||
};
|
||||
|
||||
await axiosInstance.put(userUrl, userData);
|
||||
|
||||
// Set permissions for the virtual host
|
||||
if (virtualHost) {
|
||||
const permissionData = {
|
||||
configure: virtualHost.permissions.configure,
|
||||
write: virtualHost.permissions.write,
|
||||
read: virtualHost.permissions.read
|
||||
};
|
||||
|
||||
await axiosInstance.put(
|
||||
`/permissions/${encodeURIComponent(virtualHost.name)}/${createUser.username}`,
|
||||
permissionData
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error, "Error creating RabbitMQ user");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRabbitMqUser) {
|
||||
await axiosInstance.delete(`users/${usernameToDelete}`);
|
||||
return { username: usernameToDelete };
|
||||
}
|
||||
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
},
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
|
||||
...(providerInputs.ca && {
|
||||
httpsAgent: new https.Agent({ ca: providerInputs.ca, rejectUnauthorized: false })
|
||||
})
|
||||
});
|
||||
|
||||
return axiosInstance;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await createRabbitMqUser({
|
||||
axiosInstance: connection,
|
||||
virtualHost: providerInputs.virtualHost,
|
||||
createUser: {
|
||||
password,
|
||||
username,
|
||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||
}
|
||||
});
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
182
backend/src/ee/services/dynamic-secret/providers/redis.ts
Normal file
182
backend/src/ee/services/dynamic-secret/providers/redis.ts
Normal file
@ -0,0 +1,182 @@
|
||||
import handlebars from "handlebars";
|
||||
import { Redis } from "ioredis";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
const executeTransactions = async (connection: Redis, commands: string[]): Promise<(string | null)[] | null> => {
|
||||
// Initiate a transaction
|
||||
const pipeline = connection.multi();
|
||||
|
||||
// Add all commands to the pipeline
|
||||
for (const command of commands) {
|
||||
const args = command
|
||||
.split(" ")
|
||||
.map((arg) => arg.trim())
|
||||
.filter((arg) => arg.length > 0);
|
||||
pipeline.call(args[0], ...args.slice(1));
|
||||
}
|
||||
|
||||
// Execute the transaction
|
||||
const results = await pipeline.exec();
|
||||
|
||||
if (!results) {
|
||||
throw new BadRequestError({ message: "Redis transaction failed: No results returned" });
|
||||
}
|
||||
|
||||
// Check for errors in the results
|
||||
const errors = results.filter(([err]) => err !== null);
|
||||
if (errors.length > 0) {
|
||||
throw new BadRequestError({ message: "Redis transaction failed with errors" });
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
return results.map(([_, result]) => result as string | null);
|
||||
};
|
||||
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1" || dbHost === providerInputs.host)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
username: providerInputs.username,
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca && {
|
||||
tls: {
|
||||
rejectUnauthorized: false,
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let result: string;
|
||||
if (providerInputs.password) {
|
||||
result = await connection.auth(providerInputs.username, providerInputs.password, () => {});
|
||||
} else {
|
||||
result = await connection.auth(providerInputs.username, () => {});
|
||||
}
|
||||
|
||||
if (result !== "OK") {
|
||||
throw new BadRequestError({ message: `Invalid credentials, Redis returned ${result} status` });
|
||||
}
|
||||
|
||||
return connection;
|
||||
} catch (err) {
|
||||
if (connection) await connection.quit();
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return pingResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
|
||||
await executeTransactions(connection, queries);
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await executeTransactions(connection, queries);
|
||||
}
|
||||
|
||||
await connection.quit();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -41,10 +41,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
// special query
|
||||
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
|
||||
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const usernameDocs: string[] = await db
|
||||
.replicaNode()(TableName.UserGroupMembership)
|
||||
const usernameDocs: string[] = await (tx || db.replicaNode())(TableName.UserGroupMembership)
|
||||
.join(
|
||||
TableName.GroupProjectMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
|
@ -26,8 +26,10 @@ export const getDefaultOnPremFeatures = () => {
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretApproval: true,
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
};
|
||||
};
|
||||
|
||||
export const setupLicenseRequestWithStore = () => {};
|
@ -40,18 +40,24 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
secretRotation: true,
|
||||
caCrl: false,
|
||||
instanceUserManagement: false,
|
||||
externalKms: false
|
||||
externalKms: false,
|
||||
rateLimits: {
|
||||
readLimit: 60,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 40
|
||||
},
|
||||
pkiEst: false
|
||||
});
|
||||
|
||||
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
let token: string;
|
||||
const licenceReq = axios.create({
|
||||
const licenseReq = axios.create({
|
||||
baseURL,
|
||||
timeout: 35 * 1000
|
||||
// signal: AbortSignal.timeout(60 * 1000)
|
||||
});
|
||||
|
||||
const refreshLicence = async () => {
|
||||
const refreshLicense = async () => {
|
||||
const appCfg = getConfig();
|
||||
const {
|
||||
data: { token: authToken }
|
||||
@ -69,7 +75,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
return token;
|
||||
};
|
||||
|
||||
licenceReq.interceptors.request.use(
|
||||
licenseReq.interceptors.request.use(
|
||||
(config) => {
|
||||
if (token && config.headers) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
@ -80,7 +86,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(err) => Promise.reject(err)
|
||||
);
|
||||
|
||||
licenceReq.interceptors.response.use(
|
||||
licenseReq.interceptors.response.use(
|
||||
(response) => response,
|
||||
async (err) => {
|
||||
const originalRequest = (err as AxiosError).config;
|
||||
@ -91,15 +97,15 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
|
||||
(originalRequest as any)._retry = true; // injected
|
||||
|
||||
// refresh
|
||||
await refreshLicence();
|
||||
await refreshLicense();
|
||||
|
||||
licenceReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenceReq(originalRequest!);
|
||||
licenseReq.defaults.headers.common.Authorization = `Bearer ${token}`;
|
||||
return licenseReq(originalRequest!);
|
||||
}
|
||||
|
||||
return Promise.reject(err);
|
||||
}
|
||||
);
|
||||
|
||||
return { request: licenceReq, refreshLicence };
|
||||
return { request: licenseReq, refreshLicense };
|
||||
};
|
@ -16,8 +16,8 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { getDefaultOnPremFeatures, setupLicenceRequestWithStore } from "./licence-fns";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
InstanceType,
|
||||
TAddOrgPmtMethodDTO,
|
||||
@ -64,13 +64,13 @@ export const licenseServiceFactory = ({
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenceRequestWithStore(
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_CLOUD_LOGIN,
|
||||
appCfg.LICENSE_SERVER_KEY || ""
|
||||
);
|
||||
|
||||
const licenseServerOnPremApi = setupLicenceRequestWithStore(
|
||||
const licenseServerOnPremApi = setupLicenseRequestWithStore(
|
||||
appCfg.LICENSE_SERVER_URL || "",
|
||||
LICENSE_SERVER_ON_PREM_LOGIN,
|
||||
appCfg.LICENSE_KEY || ""
|
||||
@ -79,7 +79,7 @@ export const licenseServiceFactory = ({
|
||||
const init = async () => {
|
||||
try {
|
||||
if (appCfg.LICENSE_SERVER_KEY) {
|
||||
const token = await licenseServerCloudApi.refreshLicence();
|
||||
const token = await licenseServerCloudApi.refreshLicense();
|
||||
if (token) instanceType = InstanceType.Cloud;
|
||||
logger.info(`Instance type: ${InstanceType.Cloud}`);
|
||||
isValidLicense = true;
|
||||
@ -87,7 +87,7 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
|
||||
if (appCfg.LICENSE_KEY) {
|
||||
const token = await licenseServerOnPremApi.refreshLicence();
|
||||
const token = await licenseServerOnPremApi.refreshLicense();
|
||||
if (token) {
|
||||
const {
|
||||
data: { currentPlan }
|
||||
|
@ -58,6 +58,12 @@ export type TFeatureSet = {
|
||||
caCrl: false;
|
||||
instanceUserManagement: false;
|
||||
externalKms: false;
|
||||
rateLimits: {
|
||||
readLimit: number;
|
||||
writeLimit: number;
|
||||
secretsLimit: number;
|
||||
};
|
||||
pkiEst: boolean;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@ -9,6 +9,10 @@ export enum OrgPermissionActions {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum OrgPermissionAdminConsoleAction {
|
||||
AccessAllProjects = "access-all-projects"
|
||||
}
|
||||
|
||||
export enum OrgPermissionSubjects {
|
||||
Workspace = "workspace",
|
||||
Role = "role",
|
||||
@ -22,7 +26,8 @@ export enum OrgPermissionSubjects {
|
||||
Billing = "billing",
|
||||
SecretScanning = "secret-scanning",
|
||||
Identity = "identity",
|
||||
Kms = "kms"
|
||||
Kms = "kms",
|
||||
AdminConsole = "organization-admin-console"
|
||||
}
|
||||
|
||||
export type OrgPermissionSet =
|
||||
@ -39,7 +44,8 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms];
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole];
|
||||
|
||||
const buildAdminPermission = () => {
|
||||
const { can, build } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
@ -107,6 +113,8 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms);
|
||||
|
||||
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
||||
|
||||
return build({ conditionsMatcher });
|
||||
};
|
||||
|
||||
@ -118,7 +126,6 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
|
@ -66,6 +66,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
`${TableName.GroupProjectMembershipRole}.projectMembershipId`,
|
||||
`${TableName.GroupProjectMembership}.id`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.ProjectRoles,
|
||||
`${TableName.GroupProjectMembershipRole}.customRoleId`,
|
||||
@ -73,6 +74,12 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
.join(TableName.Project, `${TableName.GroupProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
|
||||
|
||||
.leftJoin(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
`${TableName.GroupProjectMembership}.projectId`,
|
||||
`${TableName.Project}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.GroupProjectMembershipRole))
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
|
||||
@ -81,9 +88,30 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("projectId").withSchema(TableName.GroupProjectMembership),
|
||||
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
|
||||
db.ref("orgId").withSchema(TableName.Project),
|
||||
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug")
|
||||
)
|
||||
.select("permissions");
|
||||
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
|
||||
|
||||
db.ref("permissions").withSchema(TableName.ProjectRoles).as("permissions"),
|
||||
// db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("apPermissions")
|
||||
// Additional Privileges
|
||||
db.ref("id").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApId"),
|
||||
db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApPermissions"),
|
||||
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryMode"),
|
||||
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApIsTemporary"),
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryRange"),
|
||||
|
||||
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApProjectId"),
|
||||
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApUserId"),
|
||||
|
||||
db
|
||||
.ref("temporaryAccessStartTime")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
.as("userApTemporaryAccessStartTime"),
|
||||
db
|
||||
.ref("temporaryAccessEndTime")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
.as("userApTemporaryAccessEndTime")
|
||||
);
|
||||
// .select(`${TableName.ProjectRoles}.permissions`);
|
||||
|
||||
const docs = await db(TableName.ProjectMembership)
|
||||
.join(
|
||||
@ -98,12 +126,13 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`,
|
||||
`${TableName.ProjectMembership}.id`
|
||||
`${TableName.ProjectUserAdditionalPrivilege}.projectId`,
|
||||
`${TableName.ProjectMembership}.projectId`
|
||||
)
|
||||
|
||||
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("userId", userId)
|
||||
.where(`${TableName.ProjectMembership}.userId`, userId)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.ProjectUserMembershipRole))
|
||||
.select(
|
||||
@ -120,6 +149,10 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryMode"),
|
||||
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApIsTemporary"),
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryRange"),
|
||||
|
||||
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApProjectId"),
|
||||
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApUserId"),
|
||||
|
||||
db
|
||||
.ref("temporaryAccessStartTime")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
@ -198,6 +231,31 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
permissions: z.unknown(),
|
||||
customRoleSlug: z.string().optional().nullable()
|
||||
}).parse(data)
|
||||
},
|
||||
{
|
||||
key: "userApId",
|
||||
label: "additionalPrivileges" as const,
|
||||
mapper: ({
|
||||
userApId,
|
||||
userApProjectId,
|
||||
userApUserId,
|
||||
userApPermissions,
|
||||
userApIsTemporary,
|
||||
userApTemporaryMode,
|
||||
userApTemporaryRange,
|
||||
userApTemporaryAccessEndTime,
|
||||
userApTemporaryAccessStartTime
|
||||
}) => ({
|
||||
id: userApId,
|
||||
userId: userApUserId,
|
||||
projectId: userApProjectId,
|
||||
permissions: userApPermissions,
|
||||
temporaryRange: userApTemporaryRange,
|
||||
temporaryMode: userApTemporaryMode,
|
||||
temporaryAccessEndTime: userApTemporaryAccessEndTime,
|
||||
temporaryAccessStartTime: userApTemporaryAccessStartTime,
|
||||
isTemporary: userApIsTemporary
|
||||
})
|
||||
}
|
||||
]
|
||||
})
|
||||
@ -218,15 +276,24 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
|
||||
) ?? [];
|
||||
|
||||
const activeAdditionalPrivileges = permission?.[0]?.additionalPrivileges?.filter(
|
||||
({ isTemporary, temporaryAccessEndTime }) =>
|
||||
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
|
||||
);
|
||||
const activeAdditionalPrivileges =
|
||||
permission?.[0]?.additionalPrivileges?.filter(
|
||||
({ isTemporary, temporaryAccessEndTime }) =>
|
||||
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
|
||||
) ?? [];
|
||||
|
||||
const activeGroupAdditionalPrivileges =
|
||||
groupPermission?.[0]?.additionalPrivileges?.filter(
|
||||
({ isTemporary, temporaryAccessEndTime, userId: apUserId, projectId: apProjectId }) =>
|
||||
apProjectId === projectId &&
|
||||
apUserId === userId &&
|
||||
(!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime))
|
||||
) ?? [];
|
||||
|
||||
return {
|
||||
...(permission[0] || groupPermission[0]),
|
||||
roles: [...activeRoles, ...activeGroupRoles],
|
||||
additionalPrivileges: activeAdditionalPrivileges
|
||||
additionalPrivileges: [...activeAdditionalPrivileges, ...activeGroupAdditionalPrivileges]
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "GetProjectPermission" });
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
export enum ProjectPermissionActions {
|
||||
Read = "read",
|
||||
@ -30,6 +31,9 @@ export enum ProjectPermissionSub {
|
||||
Identity = "identity",
|
||||
CertificateAuthorities = "certificate-authorities",
|
||||
Certificates = "certificates",
|
||||
CertificateTemplates = "certificate-templates",
|
||||
PkiAlerts = "pki-alerts",
|
||||
PkiCollections = "pki-collections",
|
||||
Kms = "kms"
|
||||
}
|
||||
|
||||
@ -63,108 +67,134 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.PkiCollections]
|
||||
| [ProjectPermissionActions.Delete, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
|
||||
|
||||
export const fullProjectPermissionSet: [ProjectPermissionActions, ProjectPermissionSub][] = [
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Secrets],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Secrets],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.SecretApproval],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.SecretApproval],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.SecretRotation],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.SecretRotation],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.SecretRotation],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Member],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Member],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Member],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Member],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Groups],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Groups],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Groups],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Groups],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Role],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Role],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Role],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Role],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Integrations],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Integrations],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Webhooks],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Webhooks],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Webhooks],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Identity],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Identity],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Identity],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Identity],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.ServiceTokens],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.ServiceTokens],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.ServiceTokens],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Settings],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Settings],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Settings],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Settings],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Environments],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Environments],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Environments],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Environments],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Tags],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Tags],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Tags],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Tags],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.AuditLogs],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.AuditLogs],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.AuditLogs],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.IpAllowList],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.IpAllowList],
|
||||
|
||||
// double check if all CRUD are needed for CA and Certificates
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.CertificateAuthorities],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.CertificateAuthorities],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.CertificateAuthorities],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.Certificates],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.Certificates],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.CertificateTemplates],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.CertificateTemplates],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.CertificateTemplates],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.CertificateTemplates],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.PkiAlerts],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.PkiAlerts],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.PkiAlerts],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.PkiAlerts],
|
||||
|
||||
[ProjectPermissionActions.Read, ProjectPermissionSub.PkiCollections],
|
||||
[ProjectPermissionActions.Create, ProjectPermissionSub.PkiCollections],
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.PkiCollections],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.PkiCollections],
|
||||
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Project],
|
||||
[ProjectPermissionActions.Delete, ProjectPermissionSub.Project],
|
||||
|
||||
[ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
|
||||
];
|
||||
|
||||
const buildAdminPermissionRules = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.SecretApproval);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.SecretRotation);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretRotation);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.SecretRotation);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Member);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Member);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Groups);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Groups);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Groups);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Role);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Role);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Role);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Integrations);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Webhooks);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Webhooks);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Webhooks);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Identity);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.ServiceTokens);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.ServiceTokens);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.ServiceTokens);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Settings);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Settings);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Settings);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Environments);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Environments);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Environments);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Tags);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Tags);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.AuditLogs);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.IpAllowList);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.IpAllowList);
|
||||
|
||||
// double check if all CRUD are needed for CA and Certificates
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.CertificateAuthorities);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
|
||||
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Project);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Project);
|
||||
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Kms);
|
||||
// Admins get full access to everything
|
||||
fullProjectPermissionSet.forEach((permission) => {
|
||||
const [action, subject] = permission;
|
||||
can(action, subject);
|
||||
});
|
||||
|
||||
return rules;
|
||||
};
|
||||
@ -237,6 +267,11 @@ const buildMemberPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateTemplates);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.PkiAlerts);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.PkiCollections);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@ -346,4 +381,31 @@ export const isAtLeastAsPrivilegedWorkspace = (
|
||||
return set1.size >= set2.size;
|
||||
};
|
||||
|
||||
/*
|
||||
* Case: The user requests to create a role with permissions that are not valid and not supposed to be used ever.
|
||||
* If we don't check for this, we can run into issues where functions like the `isAtLeastAsPrivileged` will not work as expected, because we compare the size of each permission set.
|
||||
* If the permission set contains invalid permissions, the size will be different, and result in incorrect results.
|
||||
*/
|
||||
export const validateProjectPermissions = (permissions: unknown) => {
|
||||
const parsedPermissions =
|
||||
typeof permissions === "string" ? (JSON.parse(permissions) as string[]) : (permissions as string[]);
|
||||
|
||||
const flattenedPermissions = [...parsedPermissions];
|
||||
|
||||
for (const perm of flattenedPermissions) {
|
||||
const [action, subject] = perm;
|
||||
|
||||
if (
|
||||
!fullProjectPermissionSet.find(
|
||||
(currentPermission) => currentPermission[0] === action && currentPermission[1] === subject
|
||||
)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `Permission action ${action} on subject ${subject} is not valid`,
|
||||
name: "Create Role"
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/* eslint-enable */
|
||||
|
@ -18,7 +18,7 @@ import {
|
||||
|
||||
type TProjectUserAdditionalPrivilegeServiceFactoryDep = {
|
||||
projectUserAdditionalPrivilegeDAL: TProjectUserAdditionalPrivilegeDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
};
|
||||
|
||||
@ -53,12 +53,17 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({ slug, projectMembershipId });
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
projectId: projectMembership.projectId,
|
||||
userId: projectMembership.userId
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId,
|
||||
userId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId,
|
||||
slug,
|
||||
permissions: customPermission
|
||||
});
|
||||
@ -67,7 +72,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
|
||||
const relativeTempAllocatedTimeInMs = ms(dto.temporaryRange);
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId,
|
||||
projectId: projectMembership.projectId,
|
||||
userId: projectMembership.userId,
|
||||
slug,
|
||||
permissions: customPermission,
|
||||
isTemporary: true,
|
||||
@ -90,7 +96,11 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId);
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
projectId: userPrivilege.projectId
|
||||
});
|
||||
|
||||
if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -105,7 +115,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
if (dto?.slug) {
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
slug: dto.slug,
|
||||
projectMembershipId: projectMembership.id
|
||||
userId: projectMembership.id,
|
||||
projectId: projectMembership.projectId
|
||||
});
|
||||
if (existingSlug && existingSlug.id !== userPrivilege.id)
|
||||
throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
@ -138,7 +149,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId);
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
projectId: userPrivilege.projectId
|
||||
});
|
||||
if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -164,7 +178,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId);
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
projectId: userPrivilege.projectId
|
||||
});
|
||||
if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -198,7 +215,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
|
||||
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find({ projectMembershipId });
|
||||
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find({
|
||||
userId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId
|
||||
});
|
||||
return userPrivileges;
|
||||
};
|
||||
|
||||
|
@ -4,17 +4,16 @@ import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TRateLimitDALFactory } from "./rate-limit-dal";
|
||||
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
|
||||
import { RateLimitConfiguration, TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
|
||||
|
||||
let rateLimitMaxConfiguration = {
|
||||
let rateLimitMaxConfiguration: RateLimitConfiguration = {
|
||||
readLimit: 60,
|
||||
publicEndpointLimit: 30,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 60,
|
||||
authRateLimit: 60,
|
||||
inviteUserRateLimit: 30,
|
||||
mfaRateLimit: 20,
|
||||
creationLimit: 30
|
||||
mfaRateLimit: 20
|
||||
};
|
||||
|
||||
Object.freeze(rateLimitMaxConfiguration);
|
||||
@ -67,8 +66,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
secretsLimit: rateLimit.secretsRateLimit,
|
||||
authRateLimit: rateLimit.authRateLimit,
|
||||
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
|
||||
mfaRateLimit: rateLimit.mfaRateLimit,
|
||||
creationLimit: rateLimit.creationLimit
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
|
@ -5,7 +5,6 @@ export type TRateLimitUpdateDTO = {
|
||||
authRateLimit: number;
|
||||
inviteUserRateLimit: number;
|
||||
mfaRateLimit: number;
|
||||
creationLimit: number;
|
||||
publicEndpointLimit: number;
|
||||
};
|
||||
|
||||
@ -14,3 +13,13 @@ export type TRateLimit = {
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
} & TRateLimitUpdateDTO;
|
||||
|
||||
export type RateLimitConfiguration = {
|
||||
readLimit: number;
|
||||
publicEndpointLimit: number;
|
||||
writeLimit: number;
|
||||
secretsLimit: number;
|
||||
authRateLimit: number;
|
||||
inviteUserRateLimit: number;
|
||||
mfaRateLimit: number;
|
||||
};
|
||||
|
@ -44,19 +44,18 @@ export const buildScimUser = ({
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
groups = [],
|
||||
active
|
||||
active,
|
||||
createdAt,
|
||||
updatedAt
|
||||
}: {
|
||||
orgMembershipId: string;
|
||||
username: string;
|
||||
email?: string | null;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
groups?: {
|
||||
value: string;
|
||||
display: string;
|
||||
}[];
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
active: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimUser => {
|
||||
const scimUser = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
@ -64,9 +63,9 @@ export const buildScimUser = ({
|
||||
userName: username,
|
||||
displayName: `${firstName} ${lastName}`,
|
||||
name: {
|
||||
givenName: firstName,
|
||||
givenName: firstName || "",
|
||||
middleName: null,
|
||||
familyName: lastName
|
||||
familyName: lastName || ""
|
||||
},
|
||||
emails: email
|
||||
? [
|
||||
@ -78,10 +77,10 @@ export const buildScimUser = ({
|
||||
]
|
||||
: [],
|
||||
active,
|
||||
groups,
|
||||
meta: {
|
||||
resourceType: "User",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
@ -109,14 +108,18 @@ export const buildScimGroupList = ({
|
||||
export const buildScimGroup = ({
|
||||
groupId,
|
||||
name,
|
||||
members
|
||||
members,
|
||||
updatedAt,
|
||||
createdAt
|
||||
}: {
|
||||
groupId: string;
|
||||
name: string;
|
||||
members: {
|
||||
value: string;
|
||||
display: string;
|
||||
display?: string;
|
||||
}[];
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}): TScimGroup => {
|
||||
const scimGroup = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
@ -125,7 +128,8 @@ export const buildScimGroup = ({
|
||||
members,
|
||||
meta: {
|
||||
resourceType: "Group",
|
||||
location: null
|
||||
created: createdAt,
|
||||
lastModified: updatedAt
|
||||
}
|
||||
};
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user