mirror of
https://github.com/Infisical/infisical.git
synced 2025-06-29 04:31:59 +00:00
Compare commits
725 Commits
misc/remov
...
aws-syncs-
Author | SHA1 | Date | |
---|---|---|---|
9cdb4dcde9 | |||
69fb87bbfc | |||
b0cd5bd10d | |||
21c6700db2 | |||
619062033b | |||
36973b1b5c | |||
1ca578ee03 | |||
8a7f7ac9fd | |||
049fd8e769 | |||
2c825616a6 | |||
febbd4ade5 | |||
874dc01692 | |||
b44b8bf647 | |||
258e561b84 | |||
5802638fc4 | |||
e2e7004583 | |||
7826324435 | |||
2bfc1caec5 | |||
4b9e3e44e2 | |||
b2a680ebd7 | |||
b269bb81fe | |||
5ca7ff4f2d | |||
ec12d57862 | |||
2d16f5f258 | |||
93912da528 | |||
ffc5e61faa | |||
70e68f4441 | |||
a004934a28 | |||
0811192eed | |||
1e09487572 | |||
86202caa95 | |||
285fca4ded | |||
30fb60b441 | |||
e531390922 | |||
e88ce49463 | |||
9214c93ece | |||
7a3bfa9e4c | |||
7aa0e8572c | |||
296efa975c | |||
b3e72c338f | |||
8c4c969bc2 | |||
0d424f332a | |||
f0b6382f92 | |||
72780c61b4 | |||
c4da0305ba | |||
4fdfdc1a39 | |||
d2cf296250 | |||
30284e3458 | |||
6f90d3befd | |||
f44888afa2 | |||
9877b0e5c4 | |||
b1e35d4b27 | |||
25f6947de5 | |||
ff8f1d3bfb | |||
b1b4cb1823 | |||
78f9ae7fab | |||
20bdff0094 | |||
ccf19fbcd4 | |||
41c526371d | |||
4685132409 | |||
3380d3e828 | |||
74d01c37de | |||
4de8888843 | |||
b644829bb9 | |||
da35ec90bc | |||
6845ac0f5e | |||
4e48ab1eeb | |||
a6671b4355 | |||
6b3b13e40a | |||
3ec14ca33a | |||
732484d332 | |||
2f0b353c4e | |||
ddc819dda1 | |||
1b15cb4c35 | |||
f4e19f8a2e | |||
501022752b | |||
46821ca2ee | |||
9602b864d4 | |||
8204e970a8 | |||
6671c42d0f | |||
b9dee1e6e8 | |||
648fde8f37 | |||
9eed67c21b | |||
1e4164e1c2 | |||
cbbafcfa42 | |||
cc28ebd387 | |||
5f6870fda8 | |||
3e5a58eec4 | |||
4c7ae3475a | |||
49797c3c13 | |||
7d9c5657aa | |||
eda4abb610 | |||
e341bbae9d | |||
7286f9a9e6 | |||
1c9a9283ae | |||
8d52011173 | |||
1b5b937db5 | |||
7b8b024654 | |||
a67badf660 | |||
ba42ea736b | |||
6c7289ebe6 | |||
5cd6a66989 | |||
4e41e84491 | |||
85d71b1085 | |||
f27d483be0 | |||
9ee9d1c0e7 | |||
9d66659f72 | |||
70c9761abe | |||
6047c4489b | |||
c9d7559983 | |||
66251403bf | |||
b9c4407507 | |||
d9a0cf8dd5 | |||
624be80768 | |||
8d7b5968d3 | |||
b7d4bb0ce2 | |||
598dea0dd3 | |||
7154b19703 | |||
9ce465b3e2 | |||
598e5c0be5 | |||
72f08a6b89 | |||
55d8762351 | |||
3c92ec4dc3 | |||
f2224262a4 | |||
23eac40740 | |||
4ae88c0447 | |||
7aecaad050 | |||
cf61390e52 | |||
3f02481e78 | |||
7adc103ed2 | |||
5bdbf37171 | |||
4f874734ab | |||
eb6fd8259b | |||
1766a44dd0 | |||
624c9ef8da | |||
dfd4b13574 | |||
22b57b7a74 | |||
1ba0b9c204 | |||
a903537441 | |||
92c4d83714 | |||
a6414104ad | |||
071f37666e | |||
cd5078d8b7 | |||
110d0e95b0 | |||
a8c0bbb7ca | |||
6af8a4fab8 | |||
407fd8eda7 | |||
9d976de19b | |||
43ecd31b74 | |||
be99e40050 | |||
800d2c0454 | |||
6d0534b165 | |||
ccee0f5428 | |||
14586c7cd0 | |||
7090eea716 | |||
01d3443139 | |||
c4b23a8d4f | |||
90a2a11fff | |||
95d7c2082c | |||
ab5eb4c696 | |||
65aeb81934 | |||
a406511405 | |||
61da0db49e | |||
0968893d4b | |||
59666740ca | |||
9cc7edc869 | |||
e1b016f76d | |||
1175b9b5af | |||
09521144ec | |||
8759944077 | |||
aac3c355e9 | |||
2a28a462a5 | |||
3328e0850f | |||
216cae9b33 | |||
d24a5d96e3 | |||
89d4d4bc92 | |||
cffcb28bc9 | |||
61388753cf | |||
a6145120e6 | |||
dacffbef08 | |||
4db3e5d208 | |||
00e69e6632 | |||
2a84d61862 | |||
a5945204ad | |||
55b0dc7f81 | |||
ba03fc256b | |||
ea28c374a7 | |||
e99eb47cf4 | |||
cf107c0c0d | |||
9fcb1c2161 | |||
70515a1ca2 | |||
955cf9303a | |||
a24ef46d7d | |||
ee49f714b9 | |||
657aca516f | |||
b5d60398d6 | |||
c3d515bb95 | |||
7f89a7c860 | |||
23cb05c16d | |||
d74b819f57 | |||
457056b600 | |||
7dc9ea4f6a | |||
3b4b520d42 | |||
23f605bda7 | |||
1c3c8dbdce | |||
317c95384e | |||
7dd959e124 | |||
2049e5668f | |||
0a3e99b334 | |||
c4ad0aa163 | |||
5bb0b7a508 | |||
96bcd42753 | |||
2c75e23acf | |||
907dd4880a | |||
6af7c5c371 | |||
72468d5428 | |||
939ee892e0 | |||
c7ec9ff816 | |||
554e268f88 | |||
a8a27c3045 | |||
27af943ee1 | |||
9b772ad55a | |||
94a1fc2809 | |||
10c10642a1 | |||
3e0f04273c | |||
91f2d0384e | |||
811dc8dd75 | |||
4ee9375a8d | |||
92f697e195 | |||
8062f0238b | |||
1181c684db | |||
dda436bcd9 | |||
89124b18d2 | |||
effd88c4bd | |||
27efc908e2 | |||
8e4226038b | |||
27425a1a64 | |||
18cf3c89c1 | |||
49e6d7a861 | |||
c4446389b0 | |||
7c21dec54d | |||
2ea5710896 | |||
f9ac7442df | |||
a534a4975c | |||
79a616dc1c | |||
a93bfa69c9 | |||
598d14fc54 | |||
08a0550cd7 | |||
d7503573b1 | |||
b5a89edeed | |||
860eaae4c8 | |||
c7a4b6c4e9 | |||
c12c6dcc6e | |||
99c9b644df | |||
d0d5556bd0 | |||
753c28a2d3 | |||
8741414cfa | |||
b8d29793ec | |||
92013dbfbc | |||
c5319588fe | |||
9efb8eaf78 | |||
dfc973c7f7 | |||
3013d1977c | |||
f358e8942d | |||
58f51411c0 | |||
c3970d1ea2 | |||
2dc00a638a | |||
94aed485a5 | |||
e382941424 | |||
bab9c1f454 | |||
2bd4770fb4 | |||
31905fab6e | |||
784acf16d0 | |||
114b89c952 | |||
81420198cb | |||
b949708f45 | |||
2a6b6b03b9 | |||
0ff18e277f | |||
e093f70301 | |||
8e2ff18f35 | |||
3fbfecf7a9 | |||
9087def21c | |||
89c6ab591a | |||
235a33a01c | |||
dd6c217dc8 | |||
78b1b5583a | |||
8f2a504fd0 | |||
1d5b629d8f | |||
14f895cae2 | |||
b7be6bd1d9 | |||
58a97852f6 | |||
980aa9eaae | |||
a35d1aa72b | |||
52d801bce5 | |||
c92c160709 | |||
71ca7a82db | |||
6f799b478d | |||
a89e6b6e58 | |||
99ca9e04f8 | |||
586dbd79b0 | |||
6cdc71b9b1 | |||
f88d6a183f | |||
fa82d4953e | |||
12d9fe9ffd | |||
86acf88a13 | |||
63c7c39e21 | |||
151edc7efa | |||
5fa7f56285 | |||
810b27d121 | |||
51fe7450ae | |||
938c06a2ed | |||
38d431ec77 | |||
d202fdf5c8 | |||
f1b2028542 | |||
5c9b46dfba | |||
a516e50984 | |||
3c1fc024c2 | |||
569439f208 | |||
9afc282679 | |||
8db85cfb84 | |||
664b2f0089 | |||
5e9bd3a7c6 | |||
2c13af6db3 | |||
ec9171d0bc | |||
81362bec8f | |||
5a4d7541a2 | |||
3c97c45455 | |||
4f015d77fb | |||
78e894c2bb | |||
23513158ed | |||
934ef8ab27 | |||
23e9c52f67 | |||
e276752e7c | |||
01ae19fa2b | |||
9df8cf60ef | |||
1b1fe2a700 | |||
338961480c | |||
03debcab5a | |||
645dfafba0 | |||
4a6f759900 | |||
b9d06ff686 | |||
5cc5a4f03d | |||
5ef2be1a9c | |||
8de9ddfb8b | |||
5b40de16cf | |||
11aac3f5dc | |||
9823c7d1aa | |||
d627ecf05d | |||
3ba396f7fa | |||
9c561266ed | |||
36fef11d91 | |||
742932c4a0 | |||
57a77ae5f1 | |||
7c9564c7dc | |||
736aecebf8 | |||
16748357d7 | |||
12863b389b | |||
6341b7e989 | |||
c592ff00a6 | |||
ef87086272 | |||
bd459d994c | |||
440f93f392 | |||
bc32d6cbbf | |||
0cf3115830 | |||
65f2e626ae | |||
8b3e3152a4 | |||
661b31f762 | |||
e78ad1147b | |||
473efa91f0 | |||
b440e918ac | |||
439f253350 | |||
4e68304262 | |||
c4d0896609 | |||
b8115d481c | |||
5fdec97319 | |||
755bb1679a | |||
7142e7a6c6 | |||
11ade92b5b | |||
4147725260 | |||
c359cf162f | |||
9d04b648fa | |||
9edfdb7234 | |||
ff74e020fc | |||
6ee446e574 | |||
c806059b11 | |||
3a5bb31bde | |||
6f38d6c76f | |||
d721a46ec9 | |||
989065ba34 | |||
5ad419c079 | |||
80f72e8040 | |||
0cef728617 | |||
0a735422ed | |||
8370a0d9c0 | |||
71af662998 | |||
27e391f7e0 | |||
2187c7588c | |||
8ab7e8360d | |||
957cab3117 | |||
4bf16f68fc | |||
ab3ee775bb | |||
2a86e6f4d1 | |||
194fbb79f2 | |||
faaba8deb7 | |||
ae21b157a9 | |||
6167c70a74 | |||
0ecf75cbdb | |||
3f8aa0fa4b | |||
6487c83bda | |||
c08fbbdab2 | |||
a4aa65bb81 | |||
258d19cbe4 | |||
de91356127 | |||
ccb07942de | |||
3d278b0925 | |||
956fb2efb4 | |||
13894261ce | |||
d7ffa70906 | |||
b8fa7c5bb6 | |||
2baacfcd8f | |||
31c11f7d2a | |||
c5f06dece4 | |||
662e79ac98 | |||
17249d603b | |||
9bdff9c504 | |||
4552ce6ca4 | |||
ba4b8801eb | |||
36a5f728a1 | |||
502429d914 | |||
27abfa4fff | |||
4bc9bca287 | |||
612c29225d | |||
4d43accc8a | |||
3c89a69410 | |||
e741b63e63 | |||
9cde1995c7 | |||
3ed3856c85 | |||
8054d93851 | |||
02dc23425c | |||
01534c3525 | |||
325ce73b9f | |||
1639bda3f6 | |||
b5b91c929f | |||
9bc549ca8c | |||
cedc88d83a | |||
f866a810c1 | |||
0c034d69ac | |||
e29f7f656c | |||
858e569d4d | |||
5d8f32b774 | |||
bb71f5eb7e | |||
30b431a255 | |||
fd32118685 | |||
8528f3fd2a | |||
60749cfc43 | |||
eb358bcafd | |||
ffbd29c575 | |||
a74f0170da | |||
a0fad34a6d | |||
f0dc5ec876 | |||
c2453f0c84 | |||
2819c8519e | |||
616b013b12 | |||
0b9d890a51 | |||
5ba507bc1c | |||
0ecc196e5d | |||
ddac9f7cc4 | |||
f5adc4d9f3 | |||
34354994d8 | |||
d7c3192099 | |||
1576358805 | |||
e6103d2d3f | |||
8bf8bc77c9 | |||
3219723149 | |||
74b95d92ab | |||
6d3793beff | |||
0df41f3391 | |||
1acac9d479 | |||
0cefd6f837 | |||
5e9dc0b98d | |||
f632847dc6 | |||
faa6d1cf40 | |||
7fb18870e3 | |||
ae841715e5 | |||
baac87c16a | |||
291d29ec41 | |||
b726187ba3 | |||
d98ff32b07 | |||
1fa510b32f | |||
c57f0d8120 | |||
00490f2cff | |||
ee58f538c0 | |||
0fa20f7839 | |||
40ef75d3bd | |||
26af13453c | |||
ad1f71883d | |||
aa39451bc2 | |||
f5548b3e8c | |||
2659ea7170 | |||
d2e3f504fd | |||
ca4151a34d | |||
d4bc104fd1 | |||
7e3a3fcdb0 | |||
7f67912d2f | |||
a7f4020c08 | |||
d2d89034ba | |||
2fc6c564c0 | |||
240b86c1d5 | |||
30d66cef89 | |||
b7d11444a9 | |||
0a6aef0afa | |||
0ac7ec460b | |||
808a901aee | |||
d5412f916f | |||
d0648ca596 | |||
3291f1f908 | |||
965d30bd03 | |||
68fe7c589a | |||
54377a44d3 | |||
8c902d7699 | |||
c25c84aeb3 | |||
4359eb7313 | |||
322536d738 | |||
6c5db3a187 | |||
a337e6badd | |||
524a97e9a6 | |||
c56f598115 | |||
19d32a1a3b | |||
7e5417a0eb | |||
afd6de27fe | |||
7781a6b7e7 | |||
b3b4e41d92 | |||
5225f5136a | |||
398adfaf76 | |||
d77c26fa38 | |||
ef7b81734a | |||
09b489a348 | |||
6b5c50def0 | |||
1f2d52176c | |||
7002e297c8 | |||
71864a131f | |||
9964d2ecaa | |||
3ebbaefc2a | |||
dd5c494bdb | |||
bace8af5a1 | |||
f56196b820 | |||
7042d73410 | |||
cb22ee315e | |||
701eb7cfc6 | |||
bf8df14b01 | |||
1ba8b6394b | |||
c442c8483a | |||
0435305a68 | |||
febf11f502 | |||
64fd15c32d | |||
a2c9494d52 | |||
18460e0678 | |||
3d03fece74 | |||
234e7eb9be | |||
04af313bf0 | |||
9b038ccc45 | |||
9beb384546 | |||
12ec9b4b4e | |||
96b8e7fda8 | |||
93b9108aa3 | |||
99017ea1ae | |||
f32588112e | |||
f9b0b6700a | |||
b45d9398f0 | |||
1d1140237f | |||
937560fd8d | |||
5f4b7b9ea7 | |||
05139820a5 | |||
7f6bc3ecfe | |||
d8cc000ad1 | |||
8fc03c06d9 | |||
50ceedf39f | |||
550096e72b | |||
1190ca2d77 | |||
2fb60201bc | |||
e763a6f683 | |||
cb1b006118 | |||
356e7c5958 | |||
634b500244 | |||
54b4d4ae55 | |||
1a68765f15 | |||
2f6dab3f63 | |||
ae07d38c19 | |||
e9564f5231 | |||
05cdca9202 | |||
5ab0c66dee | |||
f5a0641671 | |||
2843818395 | |||
2357f3bc1f | |||
cde813aafb | |||
bbc8091d44 | |||
ce5e591457 | |||
5ae74f9761 | |||
eef331bbd1 | |||
d5c2e9236a | |||
025b4b8761 | |||
13eef7e524 | |||
ef688efc8d | |||
8c98565715 | |||
f97f98b2c3 | |||
e9358cd1d8 | |||
3fa84c578c | |||
c22ed04733 | |||
64fac1979f | |||
2d60f389c2 | |||
7798e5a2ad | |||
ed78227725 | |||
89848a2f5c | |||
1936f7cc4f | |||
1adeb5a70d | |||
058475fc3f | |||
ee4eb7f84b | |||
8122433f5c | |||
a0411e3ba8 | |||
62968c5e43 | |||
f3cf1a3f50 | |||
b4b417658f | |||
fed99a14a8 | |||
d4cfee99a6 | |||
e70ca57510 | |||
06f321e4bf | |||
3c3fcd0db8 | |||
21eb2bed7e | |||
31a21a432d | |||
381960b0bd | |||
7eb05afe2a | |||
0b54948b15 | |||
39e598e408 | |||
b735618601 | |||
3a5e862def | |||
d1c4a9c75a | |||
5532844ee7 | |||
dd5aab973f | |||
ced12baf5d | |||
7db1e62654 | |||
0ab3ae442e | |||
ed9472efc8 | |||
e094844601 | |||
e761b49964 | |||
6a8be75b79 | |||
4daaf80caa | |||
cf7768d8e5 | |||
e76d2f58ea | |||
a92e61575d | |||
761007208d | |||
cc3e0d1922 | |||
765280eef6 | |||
215761ca6b | |||
0977ff1e36 | |||
c6081900a4 | |||
86800c0cdb | |||
1fa99e5585 | |||
7947e73569 | |||
8f5bb44ff4 | |||
3f70f08e8c | |||
078eaff164 | |||
221aa99374 | |||
6a681dcf6a | |||
d7271b9631 | |||
1f151a9b05 | |||
52ce90846a | |||
be36827392 | |||
68a3291235 | |||
471f47d260 | |||
ccb757ec3e | |||
35f7420447 | |||
c6a0e36318 | |||
181ba75f2a | |||
c00f6601bd | |||
111605a945 | |||
2ac110f00e | |||
0366506213 | |||
fb253d00eb | |||
097512c691 | |||
36a13d182f | |||
cedb22a39a | |||
8b26670d73 | |||
35d3581e23 | |||
f5920f416a | |||
3b2154bab4 | |||
c5816014a6 | |||
48174e2500 | |||
7cf297344b | |||
0edf0dac98 | |||
42249726d4 | |||
a757ea22a1 | |||
74df374998 | |||
925a594a1b | |||
36af975594 | |||
ee54d460a0 | |||
3c32d8dd90 | |||
9b50d451ec | |||
7ede4e2cf5 | |||
4552f0efa4 | |||
0d35273857 | |||
5ad8dab250 | |||
92a80b3314 | |||
01dcbb0122 | |||
adb0819102 | |||
41ba111a69 | |||
1b48ce21be | |||
2f922d6343 | |||
e67b0540dd | |||
a78455fde6 | |||
967dac9be6 | |||
922b245780 | |||
ec1ce3dc06 | |||
82a4b89bb5 | |||
ff3d8c896b | |||
6e720c2f64 | |||
5b618b07fa | |||
a5a1f57284 | |||
8327f6154e | |||
20a9fc113c | |||
8edfa9ad0b | |||
00ce755996 | |||
3b2173a098 | |||
07d9398aad | |||
4fc8c509ac | |||
242595fceb |
27
.env.example
27
.env.example
@ -26,7 +26,8 @@ SITE_URL=http://localhost:8080
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_NAME=
|
||||
SMTP_FROM_ADDRESS=
|
||||
SMTP_FROM_NAME=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
@ -88,3 +89,27 @@ PLAIN_WISH_LABEL_IDS=
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
|
||||
# App Connections
|
||||
|
||||
# aws assume-role connection
|
||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# github oauth connection
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github app connection
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
8
.github/workflows/check-fe-ts-and-lint.yml
vendored
8
.github/workflows/check-fe-ts-and-lint.yml
vendored
@ -18,18 +18,18 @@ jobs:
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 16
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "16"
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Type check
|
||||
run: npm run type:check
|
||||
run: npm run type:check
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Link check
|
||||
run: npm run lint:fix
|
||||
run: npm run lint:fix
|
||||
working-directory: frontend
|
||||
|
212
.github/workflows/deployment-pipeline.yml
vendored
212
.github/workflows/deployment-pipeline.yml
vendored
@ -1,212 +0,0 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Integration tests
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-us:
|
||||
name: US production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-eu:
|
||||
name: EU production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [production-us]
|
||||
environment:
|
||||
name: production-eu
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
@ -1,4 +1,4 @@
|
||||
name: Release Helm Charts
|
||||
name: Release Infisical Core Helm chart
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
@ -17,6 +17,6 @@ jobs:
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-to-cloudsmith.sh
|
||||
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,4 +1,4 @@
|
||||
name: Release Docker image for K8 operator
|
||||
name: Release image + Helm chart K8s Operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
@ -35,3 +35,18 @@ jobs:
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
@ -7,3 +7,4 @@ docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
||||
|
@ -8,7 +8,7 @@ FROM node:20-slim AS base
|
||||
FROM base AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
@ -23,17 +23,16 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@ -44,20 +43,10 @@ WORKDIR /app
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
USER non-root-user
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
@ -137,6 +126,7 @@ RUN apt-get update && apt-get install -y \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
@ -159,14 +149,11 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
@ -191,4 +178,4 @@ EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
|
@ -12,7 +12,7 @@ RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
@ -27,17 +27,16 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@ -49,20 +48,10 @@ WORKDIR /app
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
USER non-root-user
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
@ -139,7 +128,8 @@ RUN apk --update add \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git
|
||||
git \
|
||||
openssh
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
@ -158,14 +148,11 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
@ -188,4 +175,4 @@ EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
|
3
Makefile
3
Makefile
@ -30,3 +30,6 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
up-dev-sso:
|
||||
docker compose -f docker-compose.dev.yml --profile sso up --build
|
||||
|
11
README.md
11
README.md
@ -56,7 +56,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
||||
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
|
||||
|
||||
### Internal PKI:
|
||||
### Infisical (Internal) PKI:
|
||||
|
||||
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
|
||||
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
|
||||
@ -64,12 +64,17 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
||||
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
|
||||
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
|
||||
|
||||
### Key Management (KMS):
|
||||
### Infisical Key Management System (KMS):
|
||||
|
||||
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
||||
|
||||
### Infisical SSH
|
||||
|
||||
- **[Signed SSH Certificates](https://infisical.com/docs/documentation/platform/ssh)**: Issue ephemeral SSH credentials for secure, short-lived, and centralized access to infrastructure.
|
||||
|
||||
### General Platform:
|
||||
|
||||
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
|
||||
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
|
||||
@ -120,7 +125,7 @@ Install pre commit hook to scan each commit before you push to your repository
|
||||
infisical scan install --pre-commit-hook
|
||||
```
|
||||
|
||||
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||
|
||||
## Open-source vs. paid
|
||||
|
||||
|
@ -7,7 +7,8 @@ WORKDIR /app
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
|
@ -17,7 +17,8 @@ RUN apk --update add \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
|
@ -22,8 +22,10 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
getRepeatableJobs: async () => [],
|
||||
clearQueue: async () => {},
|
||||
stopJobById: async () => {},
|
||||
stopRepeatableJobByJobId: async () => true
|
||||
stopRepeatableJobByJobId: async () => true,
|
||||
stopRepeatableJobByKey: async () => true
|
||||
};
|
||||
};
|
||||
|
@ -535,6 +535,107 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
|
@ -23,14 +23,14 @@ export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const logger = initLogger();
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
@ -42,6 +42,7 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
@ -52,14 +53,24 @@ export default {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
const server = await main({
|
||||
db,
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -73,8 +84,8 @@ export default {
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
@ -109,3 +120,4 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
231
backend/package-lock.json
generated
231
backend/package-lock.json
generated
@ -21,11 +21,12 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "^8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
@ -47,8 +48,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -80,7 +81,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
@ -5406,6 +5407,7 @@
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/accept-negotiator/-/accept-negotiator-1.1.0.tgz",
|
||||
"integrity": "sha512-OIHZrb2ImZ7XG85HXOONLcJWGosv7sIvM2ifAPQVhg9Lv7qdmMBNVaai4QTdyuaqbKM5eO6sLSQOYI7wEQeCJQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
@ -5421,13 +5423,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.1.1.tgz",
|
||||
"integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/cookie": {
|
||||
"version": "9.3.1",
|
||||
@ -5500,19 +5499,41 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart": {
|
||||
"version": "8.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.0.tgz",
|
||||
"integrity": "sha512-A8h80TTyqUzaMVH0Cr9Qcm6RxSkVqmhK/MVBYHYeRRSUbUYv08WecjWKSlG2aSnD4aGI841pVxAjC+G1GafUeQ==",
|
||||
"version": "8.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.1.tgz",
|
||||
"integrity": "sha512-pncbnG28S6MIskFSVRtzTKE9dK+GrKAJl0NbaQ/CG8ded80okWFsYKzSlP9haaLNQhNRDOoHqmGQNvgbiPVpWQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.1.0",
|
||||
"@fastify/deepmerge": "^1.0.0",
|
||||
"@fastify/error": "^3.0.0",
|
||||
"@fastify/busboy": "^3.0.0",
|
||||
"@fastify/deepmerge": "^2.0.0",
|
||||
"@fastify/error": "^4.0.0",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"secure-json-parse": "^2.4.0",
|
||||
"stream-wormhole": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/deepmerge": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.1.tgz",
|
||||
"integrity": "sha512-hx+wJQr9Ph1hY/dyzY0SxqjumMyqZDlIF6oe71dpRKDHUg7dFQfjG94qqwQ274XRjmUrwKiYadex8XplNHx3CA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/multipart/node_modules/@fastify/error": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.0.0.tgz",
|
||||
"integrity": "sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/passport": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/passport/-/passport-2.4.0.tgz",
|
||||
@ -5545,6 +5566,7 @@
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/send/-/send-2.1.0.tgz",
|
||||
"integrity": "sha512-yNYiY6sDkexoJR0D8IDy3aRP3+L4wdqCpvx5WP+VtEU58sn7USmKynBzDQex5X42Zzvw2gNzzYgP90UfWShLFA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@lukeed/ms": "^2.0.1",
|
||||
"escape-html": "~1.0.3",
|
||||
@ -5563,16 +5585,85 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/static": {
|
||||
"version": "6.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/static/-/static-6.12.0.tgz",
|
||||
"integrity": "sha512-KK1B84E6QD/FcQWxDI2aiUCwHxMJBI1KeCUzm1BwYpPY1b742+jeKruGHP2uOluuM6OkBPI8CIANrXcCRtC2oQ==",
|
||||
"version": "7.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/static/-/static-7.0.4.tgz",
|
||||
"integrity": "sha512-p2uKtaf8BMOZWLs6wu+Ihg7bWNBdjNgCwDza4MJtTqg+5ovKmcbgbR9Xs5/smZ1YISfzKOCNYmZV8LaCj+eJ1Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/accept-negotiator": "^1.0.0",
|
||||
"@fastify/send": "^2.0.0",
|
||||
"content-disposition": "^0.5.3",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"glob": "^8.0.1",
|
||||
"p-limit": "^3.1.0"
|
||||
"fastq": "^1.17.0",
|
||||
"glob": "^10.3.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/static/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/static/node_modules/glob": {
|
||||
"version": "10.4.5",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
|
||||
"integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"foreground-child": "^3.1.0",
|
||||
"jackspeak": "^3.1.2",
|
||||
"minimatch": "^9.0.4",
|
||||
"minipass": "^7.1.2",
|
||||
"package-json-from-dist": "^1.0.0",
|
||||
"path-scurry": "^1.11.1"
|
||||
},
|
||||
"bin": {
|
||||
"glob": "dist/esm/bin.mjs"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/static/node_modules/jackspeak": {
|
||||
"version": "3.4.3",
|
||||
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
|
||||
"integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
|
||||
"license": "BlueOak-1.0.0",
|
||||
"dependencies": {
|
||||
"@isaacs/cliui": "^8.0.2"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@pkgjs/parseargs": "^0.11.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/static/node_modules/minimatch": {
|
||||
"version": "9.0.5",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
|
||||
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/static/node_modules/minipass": {
|
||||
"version": "7.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
|
||||
"integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/swagger": {
|
||||
@ -5599,6 +5690,20 @@
|
||||
"yaml": "^2.2.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/swagger-ui/node_modules/@fastify/static": {
|
||||
"version": "6.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/static/-/static-6.12.0.tgz",
|
||||
"integrity": "sha512-KK1B84E6QD/FcQWxDI2aiUCwHxMJBI1KeCUzm1BwYpPY1b742+jeKruGHP2uOluuM6OkBPI8CIANrXcCRtC2oQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/accept-negotiator": "^1.0.0",
|
||||
"@fastify/send": "^2.0.0",
|
||||
"content-disposition": "^0.5.3",
|
||||
"fastify-plugin": "^4.0.0",
|
||||
"glob": "^8.0.1",
|
||||
"p-limit": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@google-cloud/kms": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/kms/-/kms-4.5.0.tgz",
|
||||
@ -6062,9 +6167,10 @@
|
||||
"integrity": "sha512-O89xFDLW2gBoZWNXuXpBSM32/KealKCTb3JGtJdtUQc7RjAk8XzrRgyz02cPAwGKwKPxy0ivuC7UP9bmN87egQ=="
|
||||
},
|
||||
"node_modules/@lukeed/ms": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@lukeed/ms/-/ms-2.0.1.tgz",
|
||||
"integrity": "sha512-Xs/4RZltsAL7pkvaNStUQt7netTkyxrS0K+RILcVr3TRMS/ToOg4I6uNfhB9SlGsnWBym4U+EaXq0f0cEMNkHA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@lukeed/ms/-/ms-2.0.2.tgz",
|
||||
"integrity": "sha512-9I2Zn6+NJLfaGoz9jN3lpwDgAYvfGeNYdbAIjJOqzs4Tpc+VU3Jqq4IofSUBKajiDS8k9fZIg18/z13mpk1bsA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
@ -8962,6 +9068,7 @@
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/logger/-/logger-4.0.0.tgz",
|
||||
"integrity": "sha512-Wz7QYfPAlG/DR+DfABddUZeNgoeY7d1J39OCR2jR+v7VBsB8ezulDK5szTnDDPDwLH5IWhLvXIHlCFZV7MSKgA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": ">=18.0.0"
|
||||
},
|
||||
@ -8971,12 +9078,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/oauth": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.1.tgz",
|
||||
"integrity": "sha512-TuR9PI6bYKX6qHC7FQI4keMnhj45TNfSNQtTU3mtnHUX4XLM2dYLvRkUNADyiLTle2qu2rsOQtCIsZJw6H0sDA==",
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.2.tgz",
|
||||
"integrity": "sha512-MdPS8AP9n3u/hBeqRFu+waArJLD/q+wOSZ48ktMTwxQLc6HJyaWPf8soqAyS/b0D6IlvI5TxAdyRyyv3wQ5IVw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@types/jsonwebtoken": "^9",
|
||||
"@types/node": ">=18",
|
||||
"jsonwebtoken": "^9",
|
||||
@ -8988,24 +9096,26 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/types": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.12.0.tgz",
|
||||
"integrity": "sha512-yFewzUomYZ2BYaGJidPuIgjoYj5wqPDmi7DLSaGIkf+rCi4YZ2Z3DaiYIbz7qb/PL2NmamWjCvB7e9ArI5HkKg==",
|
||||
"version": "2.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.14.0.tgz",
|
||||
"integrity": "sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12.13.0",
|
||||
"npm": ">= 6.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@slack/web-api": {
|
||||
"version": "7.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.3.4.tgz",
|
||||
"integrity": "sha512-KwLK8dlz2lhr3NO7kbYQ7zgPTXPKrhq1JfQc0etJ0K8LSJhYYnf8GbVznvgDT/Uz1/pBXfFQnoXjrQIOKAdSuw==",
|
||||
"version": "7.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.8.0.tgz",
|
||||
"integrity": "sha512-d4SdG+6UmGdzWw38a4sN3lF/nTEzsDxhzU13wm10ejOpPehtmRoqBKnPztQUfFiWbNvSb4czkWYJD4kt+5+Fuw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@slack/logger": "^4.0.0",
|
||||
"@slack/types": "^2.9.0",
|
||||
"@types/node": ">=18.0.0",
|
||||
"@types/retry": "0.12.0",
|
||||
"axios": "^1.7.4",
|
||||
"axios": "^1.7.8",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
"is-electron": "2.2.2",
|
||||
@ -9023,6 +9133,7 @@
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
@ -10439,7 +10550,8 @@
|
||||
"node_modules/@types/retry": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz",
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA=="
|
||||
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/safe-regex": {
|
||||
"version": "1.1.6",
|
||||
@ -11882,9 +11994,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.4",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
|
||||
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
@ -13839,7 +13952,8 @@
|
||||
"node_modules/eventemitter3": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz",
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="
|
||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
@ -13879,9 +13993,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/express": {
|
||||
"version": "4.21.1",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz",
|
||||
"integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==",
|
||||
"version": "4.21.2",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
|
||||
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"accepts": "~1.3.8",
|
||||
@ -13903,7 +14017,7 @@
|
||||
"methods": "~1.1.2",
|
||||
"on-finished": "2.4.1",
|
||||
"parseurl": "~1.3.3",
|
||||
"path-to-regexp": "0.1.10",
|
||||
"path-to-regexp": "0.1.12",
|
||||
"proxy-addr": "~2.0.7",
|
||||
"qs": "6.13.0",
|
||||
"range-parser": "~1.2.1",
|
||||
@ -13918,6 +14032,10 @@
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.10.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/express-session": {
|
||||
@ -15851,7 +15969,8 @@
|
||||
"node_modules/is-electron": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/is-electron/-/is-electron-2.2.2.tgz",
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg=="
|
||||
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-extglob": {
|
||||
"version": "2.1.1",
|
||||
@ -17388,15 +17507,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/nanoid": {
|
||||
"version": "3.3.7",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
|
||||
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
|
||||
"version": "3.3.8",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz",
|
||||
"integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"nanoid": "bin/nanoid.cjs"
|
||||
},
|
||||
@ -18090,6 +18210,7 @@
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
|
||||
"integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
@ -18136,6 +18257,7 @@
|
||||
"version": "6.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz",
|
||||
"integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"eventemitter3": "^4.0.4",
|
||||
"p-timeout": "^3.2.0"
|
||||
@ -18150,12 +18272,14 @@
|
||||
"node_modules/p-queue/node_modules/eventemitter3": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/p-retry": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz",
|
||||
"integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/retry": "0.12.0",
|
||||
"retry": "^0.13.1"
|
||||
@ -18179,6 +18303,7 @@
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz",
|
||||
"integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"p-finally": "^1.0.0"
|
||||
},
|
||||
@ -18383,9 +18508,9 @@
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/path-to-regexp": {
|
||||
"version": "0.1.10",
|
||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz",
|
||||
"integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==",
|
||||
"version": "0.1.12",
|
||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
|
||||
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/path-type": {
|
||||
|
@ -45,24 +45,24 @@
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@ -129,11 +129,12 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
@ -155,8 +156,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -188,7 +189,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
|
14
backend/src/@types/fastify.d.ts
vendored
14
backend/src/@types/fastify.d.ts
vendored
@ -31,9 +31,12 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
@ -77,6 +80,7 @@ import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
@ -89,6 +93,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
@ -177,6 +187,8 @@ declare module "fastify" {
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
|
||||
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
certificateEst: TCertificateEstServiceFactory;
|
||||
@ -204,6 +216,8 @@ declare module "fastify" {
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
totp: TTotpServiceFactory;
|
||||
appConnection: TAppConnectionServiceFactory;
|
||||
secretSync: TSecretSyncServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
56
backend/src/@types/knex.d.ts
vendored
56
backend/src/@types/knex.d.ts
vendored
@ -218,6 +218,9 @@ import {
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
@ -317,6 +320,21 @@ import {
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate,
|
||||
TSshCertificateAuthorities,
|
||||
TSshCertificateAuthoritiesInsert,
|
||||
TSshCertificateAuthoritiesUpdate,
|
||||
TSshCertificateAuthoritySecrets,
|
||||
TSshCertificateAuthoritySecretsInsert,
|
||||
TSshCertificateAuthoritySecretsUpdate,
|
||||
TSshCertificateBodies,
|
||||
TSshCertificateBodiesInsert,
|
||||
TSshCertificateBodiesUpdate,
|
||||
TSshCertificates,
|
||||
TSshCertificatesInsert,
|
||||
TSshCertificatesUpdate,
|
||||
TSshCertificateTemplates,
|
||||
TSshCertificateTemplatesInsert,
|
||||
TSshCertificateTemplatesUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
@ -348,11 +366,13 @@ import {
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
@ -378,6 +398,31 @@ declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateAuthorities,
|
||||
TSshCertificateAuthoritiesInsert,
|
||||
TSshCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateAuthoritySecrets,
|
||||
TSshCertificateAuthoritySecretsInsert,
|
||||
TSshCertificateAuthoritySecretsUpdate
|
||||
>;
|
||||
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateTemplates,
|
||||
TSshCertificateTemplatesInsert,
|
||||
TSshCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificates,
|
||||
TSshCertificatesInsert,
|
||||
TSshCertificatesUpdate
|
||||
>;
|
||||
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateBodies,
|
||||
TSshCertificateBodiesInsert,
|
||||
TSshCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
@ -846,5 +891,16 @@ declare module "knex/types/tables" {
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate
|
||||
>;
|
||||
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate
|
||||
>;
|
||||
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
|
||||
TAppConnections,
|
||||
TAppConnectionsInsert,
|
||||
TAppConnectionsUpdate
|
||||
>;
|
||||
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
|
||||
}
|
||||
}
|
||||
|
105
backend/src/auto-start-migrations.ts
Normal file
105
backend/src/auto-start-migrations.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { PgSqlLock } from "./keystore/keystore";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
type TArgs = {
|
||||
auditLogDb?: Knex;
|
||||
applicationDb: Knex;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
const migrationConfig = {
|
||||
directory: path.join(__dirname, "./db/migrations"),
|
||||
loadExtensions: [".mjs", ".ts"],
|
||||
tableName: "infisical_migrations"
|
||||
};
|
||||
|
||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
||||
// happens for first time in which the migration table itself is not created yet
|
||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
||||
if (err?.message?.includes("does not exist")) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
};
|
||||
|
||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
||||
try {
|
||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
||||
if (isProduction) {
|
||||
const migrationTable = migrationConfig.tableName;
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldRunMigration = Boolean(
|
||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
||||
); // db.length - code.length
|
||||
if (!shouldRunMigration) {
|
||||
logger.info("No migrations pending: Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (auditLogDb) {
|
||||
await auditLogDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running audit log migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await auditLogDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished audit log migrations.");
|
||||
});
|
||||
}
|
||||
|
||||
await applicationDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running application migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await applicationDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished application migrations.");
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Boot up migration failed");
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
@ -49,6 +49,9 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
@ -64,6 +67,9 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -98,6 +104,9 @@ export const initAuditLogDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -38,7 +38,8 @@ export default {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@ -62,7 +63,8 @@ export default {
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
99
backend/src/db/migrations/20241216013357_ssh-mgmt.ts
Normal file
99
backend/src/db/migrations/20241216013357_ssh-mgmt.ts
Normal file
@ -0,0 +1,99 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthority))) {
|
||||
await knex.schema.createTable(TableName.SshCertificateAuthority, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("status").notNullable(); // active / disabled
|
||||
t.string("friendlyName").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthoritySecret))) {
|
||||
await knex.schema.createTable(TableName.SshCertificateAuthoritySecret, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("sshCaId").notNullable().unique();
|
||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedPrivateKey").notNullable();
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SshCertificateTemplate))) {
|
||||
await knex.schema.createTable(TableName.SshCertificateTemplate, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("sshCaId").notNullable();
|
||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
||||
t.string("status").notNullable(); // active / disabled
|
||||
t.string("name").notNullable();
|
||||
t.string("ttl").notNullable();
|
||||
t.string("maxTTL").notNullable();
|
||||
t.specificType("allowedUsers", "text[]").notNullable();
|
||||
t.specificType("allowedHosts", "text[]").notNullable();
|
||||
t.boolean("allowUserCertificates").notNullable();
|
||||
t.boolean("allowHostCertificates").notNullable();
|
||||
t.boolean("allowCustomKeyIds").notNullable();
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SshCertificate))) {
|
||||
await knex.schema.createTable(TableName.SshCertificate, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("sshCaId").notNullable();
|
||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
|
||||
t.uuid("sshCertificateTemplateId");
|
||||
t.foreign("sshCertificateTemplateId")
|
||||
.references("id")
|
||||
.inTable(TableName.SshCertificateTemplate)
|
||||
.onDelete("SET NULL");
|
||||
t.string("serialNumber").notNullable().unique();
|
||||
t.string("certType").notNullable(); // user or host
|
||||
t.specificType("principals", "text[]").notNullable();
|
||||
t.string("keyId").notNullable();
|
||||
t.datetime("notBefore").notNullable();
|
||||
t.datetime("notAfter").notNullable();
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SshCertificate);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SshCertificateBody))) {
|
||||
await knex.schema.createTable(TableName.SshCertificateBody, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("sshCertId").notNullable().unique();
|
||||
t.foreign("sshCertId").references("id").inTable(TableName.SshCertificate).onDelete("CASCADE");
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SshCertificateBody);
|
||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SshCertificate);
|
||||
await dropOnUpdateTrigger(knex, TableName.SshCertificate);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SshCertificateTemplate);
|
||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthoritySecret);
|
||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthority);
|
||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.ResourceMetadata))) {
|
||||
await knex.schema.createTable(TableName.ResourceMetadata, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("key").notNullable();
|
||||
tb.string("value", 1020).notNullable();
|
||||
tb.uuid("orgId").notNullable();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
tb.uuid("userId");
|
||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
tb.uuid("identityId");
|
||||
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
tb.uuid("secretId");
|
||||
tb.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
||||
if (!hasSecretMetadataField) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
||||
t.jsonb("secretMetadata");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.ResourceMetadata);
|
||||
|
||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
||||
if (hasSecretMetadataField) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
||||
t.dropColumn("secretMetadata");
|
||||
});
|
||||
}
|
||||
}
|
28
backend/src/db/migrations/20241218181018_app-connection.ts
Normal file
28
backend/src/db/migrations/20241218181018_app-connection.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AppConnection))) {
|
||||
await knex.schema.createTable(TableName.AppConnection, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name", 32).notNullable();
|
||||
t.string("description");
|
||||
t.string("app").notNullable();
|
||||
t.string("method").notNullable();
|
||||
t.binary("encryptedCredentials").notNullable();
|
||||
t.integer("version").defaultTo(1).notNullable();
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.AppConnection);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.AppConnection);
|
||||
await dropOnUpdateTrigger(knex, TableName.AppConnection);
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// find any duplicate group names within organizations
|
||||
const duplicates = await knex(TableName.Groups)
|
||||
.select("orgId", "name")
|
||||
.count("* as count")
|
||||
.groupBy("orgId", "name")
|
||||
.having(knex.raw("count(*) > 1"));
|
||||
|
||||
// for each set of duplicates, update all but one with a numbered suffix
|
||||
for await (const duplicate of duplicates) {
|
||||
const groups = await knex(TableName.Groups)
|
||||
.select("id", "name")
|
||||
.where({
|
||||
orgId: duplicate.orgId,
|
||||
name: duplicate.name
|
||||
})
|
||||
.orderBy("createdAt", "asc"); // keep original name for oldest group
|
||||
|
||||
// skip the first (oldest) group, rename others with numbered suffix
|
||||
for (let i = 1; i < groups.length; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Groups)
|
||||
.where("id", groups[i].id)
|
||||
.update({
|
||||
name: `${groups[i].name} (${i})`,
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore TS doesn't know about Knex's timestamp types
|
||||
updatedAt: new Date()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// add the unique constraint
|
||||
await knex.schema.alterTable(TableName.Groups, (t) => {
|
||||
t.unique(["orgId", "name"]);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// Remove the unique constraint
|
||||
await knex.schema.alterTable(TableName.Groups, (t) => {
|
||||
t.dropUnique(["orgId", "name"]);
|
||||
});
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
|
||||
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
|
||||
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (!hasEnforceCapitalizationCol) {
|
||||
t.boolean("enforceCapitalization").defaultTo(false).notNullable();
|
||||
}
|
||||
|
||||
if (hasAutoCapitalizationCol) {
|
||||
t.boolean("autoCapitalization").defaultTo(false).alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
|
||||
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
|
||||
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (hasEnforceCapitalizationCol) {
|
||||
t.dropColumn("enforceCapitalization");
|
||||
}
|
||||
|
||||
if (hasAutoCapitalizationCol) {
|
||||
t.boolean("autoCapitalization").defaultTo(true).alter();
|
||||
}
|
||||
});
|
||||
}
|
50
backend/src/db/migrations/20250122055102_secret-sync.ts
Normal file
50
backend/src/db/migrations/20250122055102_secret-sync.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretSync))) {
|
||||
await knex.schema.createTable(TableName.SecretSync, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name", 32).notNullable();
|
||||
t.string("description");
|
||||
t.string("destination").notNullable();
|
||||
t.boolean("isAutoSyncEnabled").notNullable().defaultTo(true);
|
||||
t.integer("version").defaultTo(1).notNullable();
|
||||
t.jsonb("destinationConfig").notNullable();
|
||||
t.jsonb("syncOptions").notNullable();
|
||||
// we're including projectId in addition to folder ID because we allow folderId to be null (if the folder
|
||||
// is deleted), to preserve sync configuration
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("folderId");
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("SET NULL");
|
||||
t.uuid("connectionId").notNullable();
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.timestamps(true, true, true);
|
||||
// sync secrets to destination
|
||||
t.string("syncStatus");
|
||||
t.string("lastSyncJobId");
|
||||
t.string("lastSyncMessage");
|
||||
t.datetime("lastSyncedAt");
|
||||
// import secrets from destination
|
||||
t.string("importStatus");
|
||||
t.string("lastImportJobId");
|
||||
t.string("lastImportMessage");
|
||||
t.datetime("lastImportedAt");
|
||||
// remove secrets from destination
|
||||
t.string("removeStatus");
|
||||
t.string("lastRemoveJobId");
|
||||
t.string("lastRemoveMessage");
|
||||
t.datetime("lastRemovedAt");
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretSync);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretSync);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretSync);
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
||||
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
||||
if (!hasManageGroupMembershipsCol) {
|
||||
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
||||
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasManageGroupMembershipsCol) {
|
||||
t.dropColumn("manageGroupMemberships");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.unique(["orgId", "name"]);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
t.unique(["projectId", "name"]);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.dropUnique(["orgId", "name"]);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
t.dropUnique(["projectId", "name"]);
|
||||
});
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
||||
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
||||
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityGcpAuth,
|
||||
"allowedServiceAccounts"
|
||||
);
|
||||
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
||||
if (hasTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
|
||||
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
|
||||
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
||||
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
||||
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityGcpAuth,
|
||||
"allowedServiceAccounts"
|
||||
);
|
||||
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
||||
if (hasTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
|
||||
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
|
||||
if (hasAllowedZones) t.string("allowedZones").alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (hasSlugCol) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||
t.dropColumn("slug");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (!hasSlugCol) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||
t.string("slug", 32);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
|
||||
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
|
||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
|
||||
if (hasLastImportMessage) t.string("lastImportMessage").alter();
|
||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
|
||||
});
|
||||
}
|
||||
}
|
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
@ -0,0 +1,130 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedKey) t.binary("encryptedPassKey");
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl");
|
||||
if (hasUrl) t.string("url").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const webhooks = await knex(TableName.Webhook)
|
||||
.where({})
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
||||
.select(
|
||||
"url",
|
||||
"encryptedSecretKey",
|
||||
"iv",
|
||||
"tag",
|
||||
"keyEncoding",
|
||||
"urlCipherText",
|
||||
"urlIV",
|
||||
"urlTag",
|
||||
knex.ref("id").withSchema(TableName.Webhook),
|
||||
"envId"
|
||||
)
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedWebhooks = await Promise.all(
|
||||
webhooks.map(async (el) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: el.projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||
}
|
||||
|
||||
let encryptedSecretKey = null;
|
||||
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
||||
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
ciphertext: el.encryptedSecretKey
|
||||
});
|
||||
encryptedSecretKey = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decyptedSecretKey, "utf8")
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
|
||||
const decryptedUrl =
|
||||
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.urlIV,
|
||||
tag: el.urlTag,
|
||||
ciphertext: el.urlCipherText
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedUrl = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedUrl || el.url || "")
|
||||
}).cipherTextBlob;
|
||||
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Webhook)
|
||||
.insert(
|
||||
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
|
||||
id: el.id,
|
||||
envId: el.envId,
|
||||
url: "",
|
||||
encryptedUrl: el.encryptedUrl,
|
||||
encryptedPassKey: el.encryptedSecretKey
|
||||
}))
|
||||
)
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
|
||||
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,111 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
||||
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
||||
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
|
||||
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
|
||||
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
|
||||
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedDynamicSecrets = await Promise.all(
|
||||
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedInputData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.inputIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.inputTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.inputCiphertext
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedInput = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedInputData)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return { ...el, encryptedInput };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.DynamicSecret)
|
||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const secretRotations = await knex(TableName.SecretRotation)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
|
||||
.select(selectAllTableCols(TableName.SecretRotation))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedRotationData = await Promise.all(
|
||||
secretRotations.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedRotationData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.encryptedDataIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.encryptedDataTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedData
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedRotationData = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedRotationData)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedRotationData };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretRotation)
|
||||
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,200 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
|
||||
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedTokenReviewerJwt"
|
||||
);
|
||||
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtIV"
|
||||
);
|
||||
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtTag"
|
||||
);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
|
||||
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
|
||||
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityKubernetesConfigs = [];
|
||||
|
||||
for await (const {
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyKeyEncoding,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyIV,
|
||||
orgId,
|
||||
...el
|
||||
} of identityKubernetesConfigs) {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedTokenReviewerJwt =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.tokenReviewerJwtIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.tokenReviewerJwtTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedTokenReviewerJwt
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedTokenReviewerJwt)
|
||||
}).cipherTextBlob;
|
||||
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
updatedIdentityKubernetesConfigs.push({
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedKubernetesCaCertificate,
|
||||
encryptedKubernetesTokenReviewerJwt
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityKubernetesAuth)
|
||||
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt)
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityK8sAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityK8sColumns = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityK8sColumns(knex);
|
||||
}
|
@ -0,0 +1,141 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
|
||||
|
||||
if (hasidentityOidcAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityOidcAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityOidcConfigs = await Promise.all(
|
||||
identityOidcConfig.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return {
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedCaCertificate
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityOidcAuth)
|
||||
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityOidcAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityOidcColumns = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
if (hasidentityOidcTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityOidcColumns(knex);
|
||||
}
|
@ -0,0 +1,493 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptSamlConfig = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const samlConfigs = await knex(TableName.SamlConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.SamlConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedSamlConfigs = await Promise.all(
|
||||
samlConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedEntryPoint =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.entryPointIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.entryPointTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedEntryPoint
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedIssuer =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedIssuer && el.issuerIV && el.issuerTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.issuerIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.issuerTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedIssuer
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCert && el.certIV && el.certTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.certIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.certTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedSamlIssuer = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedIssuer)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlEntryPoint = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedEntryPoint)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SamlConfig)
|
||||
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptLdapConfig = async (knex: Knex) => {
|
||||
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
|
||||
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
|
||||
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
|
||||
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
|
||||
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
|
||||
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
|
||||
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
|
||||
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
|
||||
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
|
||||
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
|
||||
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
|
||||
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const ldapConfigs = await knex(TableName.LdapConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.LdapConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedLdapConfigs = await Promise.all(
|
||||
ldapConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedBindDN =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindDNIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindDNTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindDN
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedBindPass =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindPassIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindPassTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindPass
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCACert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCACert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedLdapBindDN = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindDN)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapBindPass = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindPass)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.LdapConfig)
|
||||
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptOidcConfig = async (knex: Knex) => {
|
||||
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
|
||||
TableName.OidcConfig,
|
||||
"encryptedOidcClientSecret"
|
||||
);
|
||||
|
||||
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
|
||||
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
|
||||
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
|
||||
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
|
||||
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
|
||||
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
|
||||
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
|
||||
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
|
||||
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
|
||||
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
|
||||
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
|
||||
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const oidcConfigs = await knex(TableName.OidcConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.OidcConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedOidcConfigs = await Promise.all(
|
||||
oidcConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedClientId =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientId && el.clientIdIV && el.clientIdTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientIdIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientIdTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientId
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedClientSecret =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientSecretIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientSecretTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientSecret
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedOidcClientId = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientId)
|
||||
}).cipherTextBlob;
|
||||
const encryptedOidcClientSecret = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientSecret)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.OidcConfig)
|
||||
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptSamlConfig(knex);
|
||||
await reencryptLdapConfig(knex);
|
||||
await reencryptOidcConfig(knex);
|
||||
}
|
||||
|
||||
const dropSamlConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
|
||||
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropLdapConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
|
||||
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropOidcConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
|
||||
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropSamlConfigColumns(knex);
|
||||
await dropLdapConfigColumns(knex);
|
||||
await dropOidcConfigColumns(knex);
|
||||
}
|
53
backend/src/db/migrations/utils/env-config.ts
Normal file
53
backend/src/db/migrations/utils/env-config.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { zpStr } from "@app/lib/zod";
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
||||
),
|
||||
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
||||
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
||||
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
// TODO(akhilmhdh): will be changed to one
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||
)
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
|
||||
}));
|
||||
|
||||
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
||||
|
||||
export const getMigrationEnvConfig = () => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Invalid environment variables. Check the error below");
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
|
||||
);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
return Object.freeze(parsedEnv.data);
|
||||
};
|
@ -1,105 +0,0 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
const getInstanceRootKey = async (knex: Knex) => {
|
||||
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
|
||||
// if root key its base64 encoded
|
||||
const isBase64 = !process.env.ENCRYPTION_KEY;
|
||||
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
if (kmsRootConfig) {
|
||||
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||
// set the flag so that other instancen nodes can start
|
||||
return decryptedRootKey;
|
||||
}
|
||||
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
||||
await knex(TableName.KmsServerRootConfig).insert({
|
||||
encryptedRootKey,
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
|
||||
id: KMS_ROOT_CONFIG_UUID
|
||||
});
|
||||
return encryptedRootKey;
|
||||
};
|
||||
|
||||
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
|
||||
const KMS_VERSION = "v01";
|
||||
const KMS_VERSION_BLOB_LENGTH = 3;
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const project = await knex(TableName.Project).where({ id: projectId }).first();
|
||||
if (!project) throw new Error("Missing project id");
|
||||
|
||||
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
|
||||
|
||||
let secretManagerKmsKey;
|
||||
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
|
||||
if (projectSecretManagerKmsId) {
|
||||
const kmsDoc = await knex(TableName.KmsKey)
|
||||
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
|
||||
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
|
||||
.first();
|
||||
if (!kmsDoc) throw new Error("missing kms");
|
||||
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
||||
} else {
|
||||
const [kmsDoc] = await knex(TableName.KmsKey)
|
||||
.insert({
|
||||
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
orgId: project.orgId,
|
||||
isReserved: false
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
secretManagerKmsKey = randomSecureBytes(32);
|
||||
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
|
||||
await knex(TableName.InternalKms).insert({
|
||||
version: 1,
|
||||
encryptedKey: encryptedKeyMaterial,
|
||||
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
|
||||
kmsKeyId: kmsDoc.id
|
||||
});
|
||||
}
|
||||
|
||||
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
|
||||
let dataKey: Buffer;
|
||||
if (!encryptedSecretManagerDataKey) {
|
||||
dataKey = randomSecureBytes();
|
||||
// the below versioning we do it automatically in kms service
|
||||
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
await knex(TableName.Project)
|
||||
.where({ id: projectId })
|
||||
.update({
|
||||
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
|
||||
});
|
||||
} else {
|
||||
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
|
||||
}
|
||||
|
||||
return {
|
||||
encryptor: ({ plainText }: { plainText: Buffer }) => {
|
||||
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
|
||||
|
||||
// Buffer#1 encrypted text + Buffer#2 version number
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
|
||||
return { cipherTextBlob };
|
||||
},
|
||||
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
|
||||
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
|
||||
return decryptedBlob;
|
||||
}
|
||||
};
|
||||
};
|
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
@ -0,0 +1,19 @@
|
||||
export const createCircularCache = <T>(bufferSize = 10) => {
|
||||
const bufferItems: { id: string; item: T }[] = [];
|
||||
let bufferIndex = 0;
|
||||
|
||||
const push = (id: string, item: T) => {
|
||||
if (bufferItems.length < bufferSize) {
|
||||
bufferItems.push({ id, item });
|
||||
} else {
|
||||
bufferItems[bufferIndex] = { id, item };
|
||||
}
|
||||
bufferIndex = (bufferIndex + 1) % bufferSize;
|
||||
};
|
||||
|
||||
const getItem = (id: string) => {
|
||||
return bufferItems.find((i) => i.id === id)?.item;
|
||||
};
|
||||
|
||||
return { push, getItem };
|
||||
};
|
52
backend/src/db/migrations/utils/services.ts
Normal file
52
backend/src/db/migrations/utils/services.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
type TDependencies = {
|
||||
envConfig: TMigrationEnvConfig;
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
};
|
||||
|
||||
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const projectDAL = projectDALFactory(db);
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
await kmsService.startService();
|
||||
|
||||
return { kmsService };
|
||||
};
|
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
const runRename = async () => {
|
||||
if (!isProduction) return;
|
||||
const migrationTable = "infisical_migrations";
|
||||
const applicationDb = initDbConnection({
|
||||
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
|
||||
dbRootCert: process.env.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
? initAuditLogDbConnection({
|
||||
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
})
|
||||
: undefined;
|
||||
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
await applicationDb.destroy();
|
||||
await auditLogDb?.destroy();
|
||||
};
|
||||
|
||||
void runRename();
|
27
backend/src/db/schemas/app-connections.ts
Normal file
27
backend/src/db/schemas/app-connections.ts
Normal file
@ -0,0 +1,27 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AppConnectionsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
app: z.string(),
|
||||
method: z.string(),
|
||||
encryptedCredentials: zodBuffer,
|
||||
version: z.number().default(1),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
|
||||
export type TAppConnectionsInsert = Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>;
|
||||
export type TAppConnectionsUpdate = Partial<Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>>;
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const DynamicSecretsSchema = z.object({
|
||||
@ -14,16 +16,17 @@ export const DynamicSecretsSchema = z.object({
|
||||
type: z.string(),
|
||||
defaultTTL: z.string(),
|
||||
maxTTL: z.string().nullable().optional(),
|
||||
inputIV: z.string(),
|
||||
inputCiphertext: z.string(),
|
||||
inputTag: z.string(),
|
||||
inputIV: z.string().nullable().optional(),
|
||||
inputCiphertext: z.string().nullable().optional(),
|
||||
inputTag: z.string().nullable().optional(),
|
||||
algorithm: z.string().default("aes-256-gcm"),
|
||||
keyEncoding: z.string().default("utf8"),
|
||||
folderId: z.string().uuid(),
|
||||
status: z.string().nullable().optional(),
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
type: z.string(),
|
||||
allowedServiceAccounts: z.string(),
|
||||
allowedProjects: z.string(),
|
||||
allowedZones: z.string()
|
||||
allowedServiceAccounts: z.string().nullable().optional(),
|
||||
allowedProjects: z.string().nullable().optional(),
|
||||
allowedZones: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityKubernetesAuthsSchema = z.object({
|
||||
@ -17,15 +19,17 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
kubernetesHost: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedTokenReviewerJwt: z.string(),
|
||||
tokenReviewerJwtIV: z.string(),
|
||||
tokenReviewerJwtTag: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
encryptedTokenReviewerJwt: z.string().nullable().optional(),
|
||||
tokenReviewerJwtIV: z.string().nullable().optional(),
|
||||
tokenReviewerJwtTag: z.string().nullable().optional(),
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string()
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityOidcAuthsSchema = z.object({
|
||||
@ -15,15 +17,16 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
identityId: z.string().uuid(),
|
||||
oidcDiscoveryUrl: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
boundIssuer: z.string(),
|
||||
boundAudiences: z.string(),
|
||||
boundClaims: z.unknown(),
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
@ -71,6 +71,7 @@ export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
export * from "./rate-limit";
|
||||
export * from "./resource-metadata";
|
||||
export * from "./saml-configs";
|
||||
export * from "./scim-tokens";
|
||||
export * from "./secret-approval-policies";
|
||||
@ -107,6 +108,11 @@ export * from "./secrets";
|
||||
export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./ssh-certificate-authorities";
|
||||
export * from "./ssh-certificate-authority-secrets";
|
||||
export * from "./ssh-certificate-bodies";
|
||||
export * from "./ssh-certificate-templates";
|
||||
export * from "./ssh-certificates";
|
||||
export * from "./super-admin";
|
||||
export * from "./totp-configs";
|
||||
export * from "./trusted-ips";
|
||||
|
@ -16,8 +16,7 @@ export const KmsKeysSchema = z.object({
|
||||
name: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
slug: z.string().nullable().optional()
|
||||
projectId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const LdapConfigsSchema = z.object({
|
||||
@ -12,22 +14,25 @@ export const LdapConfigsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string(),
|
||||
encryptedBindDN: z.string(),
|
||||
bindDNIV: z.string(),
|
||||
bindDNTag: z.string(),
|
||||
encryptedBindPass: z.string(),
|
||||
bindPassIV: z.string(),
|
||||
bindPassTag: z.string(),
|
||||
encryptedBindDN: z.string().nullable().optional(),
|
||||
bindDNIV: z.string().nullable().optional(),
|
||||
bindDNTag: z.string().nullable().optional(),
|
||||
encryptedBindPass: z.string().nullable().optional(),
|
||||
bindPassIV: z.string().nullable().optional(),
|
||||
bindPassTag: z.string().nullable().optional(),
|
||||
searchBase: z.string(),
|
||||
encryptedCACert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedCACert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default(""),
|
||||
uniqueUserAttribute: z.string().default("")
|
||||
uniqueUserAttribute: z.string().default(""),
|
||||
encryptedLdapBindDN: zodBuffer,
|
||||
encryptedLdapBindPass: zodBuffer,
|
||||
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
@ -2,6 +2,11 @@ import { z } from "zod";
|
||||
|
||||
export enum TableName {
|
||||
Users = "users",
|
||||
SshCertificateAuthority = "ssh_certificate_authorities",
|
||||
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
|
||||
SshCertificateTemplate = "ssh_certificate_templates",
|
||||
SshCertificate = "ssh_certificates",
|
||||
SshCertificateBody = "ssh_certificate_bodies",
|
||||
CertificateAuthority = "certificate_authorities",
|
||||
CertificateTemplateEstConfig = "certificate_template_est_configs",
|
||||
CertificateAuthorityCert = "certificate_authority_certs",
|
||||
@ -75,6 +80,7 @@ export enum TableName {
|
||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||
// used by both identity and users
|
||||
IdentityMetadata = "identity_metadata",
|
||||
ResourceMetadata = "resource_metadata",
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
||||
@ -124,7 +130,9 @@ export enum TableName {
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
SlackIntegrations = "slack_integrations",
|
||||
ProjectSlackConfigs = "project_slack_configs"
|
||||
ProjectSlackConfigs = "project_slack_configs",
|
||||
AppConnection = "app_connections",
|
||||
SecretSync = "secret_syncs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
@ -205,5 +213,15 @@ export enum IdentityAuthMethod {
|
||||
export enum ProjectType {
|
||||
SecretManager = "secret-manager",
|
||||
CertificateManager = "cert-manager",
|
||||
KMS = "kms"
|
||||
KMS = "kms",
|
||||
SSH = "ssh"
|
||||
}
|
||||
|
||||
export enum ActionProjectType {
|
||||
SecretManager = ProjectType.SecretManager,
|
||||
CertificateManager = ProjectType.CertificateManager,
|
||||
KMS = ProjectType.KMS,
|
||||
SSH = ProjectType.SSH,
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OidcConfigsSchema = z.object({
|
||||
@ -15,19 +17,22 @@ export const OidcConfigsSchema = z.object({
|
||||
jwksUri: z.string().nullable().optional(),
|
||||
tokenEndpoint: z.string().nullable().optional(),
|
||||
userinfoEndpoint: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string(),
|
||||
encryptedClientId: z.string().nullable().optional(),
|
||||
configurationType: z.string(),
|
||||
clientIdIV: z.string(),
|
||||
clientIdTag: z.string(),
|
||||
encryptedClientSecret: z.string(),
|
||||
clientSecretIV: z.string(),
|
||||
clientSecretTag: z.string(),
|
||||
clientIdIV: z.string().nullable().optional(),
|
||||
clientIdTag: z.string().nullable().optional(),
|
||||
encryptedClientSecret: z.string().nullable().optional(),
|
||||
clientSecretIV: z.string().nullable().optional(),
|
||||
clientSecretTag: z.string().nullable().optional(),
|
||||
allowedEmailDomains: z.string().nullable().optional(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
manageGroupMemberships: z.boolean().default(false),
|
||||
encryptedOidcClientId: zodBuffer,
|
||||
encryptedOidcClientSecret: zodBuffer
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
|
@ -13,7 +13,7 @@ export const ProjectsSchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
autoCapitalization: z.boolean().default(true).nullable().optional(),
|
||||
autoCapitalization: z.boolean().default(false).nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
@ -25,7 +25,8 @@ export const ProjectsSchema = z.object({
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string()
|
||||
type: z.string(),
|
||||
enforceCapitalization: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
24
backend/src/db/schemas/resource-metadata.ts
Normal file
24
backend/src/db/schemas/resource-metadata.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ResourceMetadataSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
key: z.string(),
|
||||
value: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
identityId: z.string().uuid().nullable().optional(),
|
||||
secretId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TResourceMetadata = z.infer<typeof ResourceMetadataSchema>;
|
||||
export type TResourceMetadataInsert = Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>;
|
||||
export type TResourceMetadataUpdate = Partial<Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>>;
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SamlConfigsSchema = z.object({
|
||||
@ -23,7 +25,10 @@ export const SamlConfigsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
encryptedSamlEntryPoint: zodBuffer,
|
||||
encryptedSamlIssuer: zodBuffer,
|
||||
encryptedSamlCertificate: zodBuffer
|
||||
});
|
||||
|
||||
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
||||
|
@ -24,7 +24,8 @@ export const SecretApprovalRequestsSecretsV2Schema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
op: z.string(),
|
||||
secretId: z.string().uuid().nullable().optional(),
|
||||
secretVersion: z.string().uuid().nullable().optional()
|
||||
secretVersion: z.string().uuid().nullable().optional(),
|
||||
secretMetadata: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationsSchema = z.object({
|
||||
@ -22,7 +24,8 @@ export const SecretRotationsSchema = z.object({
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedRotationData: zodBuffer
|
||||
});
|
||||
|
||||
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
||||
|
40
backend/src/db/schemas/secret-syncs.ts
Normal file
40
backend/src/db/schemas/secret-syncs.ts
Normal file
@ -0,0 +1,40 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretSyncsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
destination: z.string(),
|
||||
isAutoSyncEnabled: z.boolean().default(true),
|
||||
version: z.number().default(1),
|
||||
destinationConfig: z.unknown(),
|
||||
syncOptions: z.unknown(),
|
||||
projectId: z.string(),
|
||||
folderId: z.string().uuid().nullable().optional(),
|
||||
connectionId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
syncStatus: z.string().nullable().optional(),
|
||||
lastSyncJobId: z.string().nullable().optional(),
|
||||
lastSyncMessage: z.string().nullable().optional(),
|
||||
lastSyncedAt: z.date().nullable().optional(),
|
||||
importStatus: z.string().nullable().optional(),
|
||||
lastImportJobId: z.string().nullable().optional(),
|
||||
lastImportMessage: z.string().nullable().optional(),
|
||||
lastImportedAt: z.date().nullable().optional(),
|
||||
removeStatus: z.string().nullable().optional(),
|
||||
lastRemoveJobId: z.string().nullable().optional(),
|
||||
lastRemoveMessage: z.string().nullable().optional(),
|
||||
lastRemovedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSyncs = z.infer<typeof SecretSyncsSchema>;
|
||||
export type TSecretSyncsInsert = Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretSyncsUpdate = Partial<Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>>;
|
24
backend/src/db/schemas/ssh-certificate-authorities.ts
Normal file
24
backend/src/db/schemas/ssh-certificate-authorities.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SshCertificateAuthoritiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string(),
|
||||
status: z.string(),
|
||||
friendlyName: z.string(),
|
||||
keyAlgorithm: z.string()
|
||||
});
|
||||
|
||||
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;
|
||||
export type TSshCertificateAuthoritiesInsert = Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>;
|
||||
export type TSshCertificateAuthoritiesUpdate = Partial<
|
||||
Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>
|
||||
>;
|
27
backend/src/db/schemas/ssh-certificate-authority-secrets.ts
Normal file
27
backend/src/db/schemas/ssh-certificate-authority-secrets.ts
Normal file
@ -0,0 +1,27 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SshCertificateAuthoritySecretsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
sshCaId: z.string().uuid(),
|
||||
encryptedPrivateKey: zodBuffer
|
||||
});
|
||||
|
||||
export type TSshCertificateAuthoritySecrets = z.infer<typeof SshCertificateAuthoritySecretsSchema>;
|
||||
export type TSshCertificateAuthoritySecretsInsert = Omit<
|
||||
z.input<typeof SshCertificateAuthoritySecretsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TSshCertificateAuthoritySecretsUpdate = Partial<
|
||||
Omit<z.input<typeof SshCertificateAuthoritySecretsSchema>, TImmutableDBKeys>
|
||||
>;
|
22
backend/src/db/schemas/ssh-certificate-bodies.ts
Normal file
22
backend/src/db/schemas/ssh-certificate-bodies.ts
Normal file
@ -0,0 +1,22 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SshCertificateBodiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
sshCertId: z.string().uuid(),
|
||||
encryptedCertificate: zodBuffer
|
||||
});
|
||||
|
||||
export type TSshCertificateBodies = z.infer<typeof SshCertificateBodiesSchema>;
|
||||
export type TSshCertificateBodiesInsert = Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>;
|
||||
export type TSshCertificateBodiesUpdate = Partial<Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>>;
|
30
backend/src/db/schemas/ssh-certificate-templates.ts
Normal file
30
backend/src/db/schemas/ssh-certificate-templates.ts
Normal file
@ -0,0 +1,30 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SshCertificateTemplatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
sshCaId: z.string().uuid(),
|
||||
status: z.string(),
|
||||
name: z.string(),
|
||||
ttl: z.string(),
|
||||
maxTTL: z.string(),
|
||||
allowedUsers: z.string().array(),
|
||||
allowedHosts: z.string().array(),
|
||||
allowUserCertificates: z.boolean(),
|
||||
allowHostCertificates: z.boolean(),
|
||||
allowCustomKeyIds: z.boolean()
|
||||
});
|
||||
|
||||
export type TSshCertificateTemplates = z.infer<typeof SshCertificateTemplatesSchema>;
|
||||
export type TSshCertificateTemplatesInsert = Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>;
|
||||
export type TSshCertificateTemplatesUpdate = Partial<
|
||||
Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>
|
||||
>;
|
26
backend/src/db/schemas/ssh-certificates.ts
Normal file
26
backend/src/db/schemas/ssh-certificates.ts
Normal file
@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SshCertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
sshCaId: z.string().uuid(),
|
||||
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
|
||||
serialNumber: z.string(),
|
||||
certType: z.string(),
|
||||
principals: z.string().array(),
|
||||
keyId: z.string(),
|
||||
notBefore: z.date(),
|
||||
notAfter: z.date()
|
||||
});
|
||||
|
||||
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;
|
||||
export type TSshCertificatesInsert = Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>;
|
||||
export type TSshCertificatesUpdate = Partial<Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>>;
|
@ -5,12 +5,14 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const WebhooksSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
secretPath: z.string().default("/"),
|
||||
url: z.string(),
|
||||
url: z.string().nullable().optional(),
|
||||
lastStatus: z.string().nullable().optional(),
|
||||
lastRunErrorMessage: z.string().nullable().optional(),
|
||||
isDisabled: z.boolean().default(false),
|
||||
@ -25,7 +27,9 @@ export const WebhooksSchema = z.object({
|
||||
urlCipherText: z.string().nullable().optional(),
|
||||
urlIV: z.string().nullable().optional(),
|
||||
urlTag: z.string().nullable().optional(),
|
||||
type: z.string().default("general").nullable().optional()
|
||||
type: z.string().default("general").nullable().optional(),
|
||||
encryptedPassKey: zodBuffer.nullable().optional(),
|
||||
encryptedUrl: zodBuffer
|
||||
});
|
||||
|
||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||
|
@ -22,9 +22,13 @@ import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-rou
|
||||
import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router";
|
||||
import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router";
|
||||
import { registerSecretRotationRouter } from "./secret-rotation-router";
|
||||
import { registerSecretRouter } from "./secret-router";
|
||||
import { registerSecretScanningRouter } from "./secret-scanning-router";
|
||||
import { registerSecretVersionRouter } from "./secret-version-router";
|
||||
import { registerSnapshotRouter } from "./snapshot-router";
|
||||
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
|
||||
import { registerSshCertRouter } from "./ssh-certificate-router";
|
||||
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
|
||||
import { registerTrustedIpRouter } from "./trusted-ip-router";
|
||||
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
|
||||
|
||||
@ -68,6 +72,15 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/pki" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (sshRouter) => {
|
||||
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
|
||||
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
|
||||
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
|
||||
},
|
||||
{ prefix: "/ssh" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (ssoRouter) => {
|
||||
await ssoRouter.register(registerSamlRouter);
|
||||
@ -80,6 +93,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerLdapRouter, { prefix: "/ldap" });
|
||||
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
|
||||
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
|
||||
await server.register(registerSecretRouter, { prefix: "/secrets" });
|
||||
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
|
||||
await server.register(registerGroupRouter, { prefix: "/groups" });
|
||||
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
|
||||
|
@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
|
||||
import LdapStrategy from "passport-ldapauth";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -22,6 +22,7 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
@ -187,7 +188,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
caCert: z.string().trim().default("")
|
||||
}),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
200: SanitizedLdapConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@ -228,7 +229,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
200: SanitizedLdapConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
|
@ -11,13 +11,28 @@ import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
import { OidcConfigsSchema } from "@app/db/schemas";
|
||||
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true,
|
||||
manageGroupMemberships: true
|
||||
});
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
@ -142,7 +157,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
orgSlug: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
@ -153,7 +168,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
discoveryURL: true,
|
||||
isActive: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true
|
||||
allowedEmailDomains: true,
|
||||
manageGroupMemberships: true
|
||||
}).extend({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string()
|
||||
@ -207,12 +223,13 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
userinfoEndpoint: z.string().trim(),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean()
|
||||
isActive: z.boolean(),
|
||||
manageGroupMemberships: z.boolean().optional()
|
||||
})
|
||||
.partial()
|
||||
.merge(z.object({ orgSlug: z.string() })),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
@ -223,7 +240,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true,
|
||||
isActive: true
|
||||
isActive: true,
|
||||
manageGroupMemberships: true
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -272,7 +290,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
orgSlug: z.string().trim()
|
||||
orgSlug: z.string().trim(),
|
||||
manageGroupMemberships: z.boolean().optional().default(false)
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
||||
@ -323,19 +342,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true
|
||||
})
|
||||
200: SanitizedOidcConfigSchema
|
||||
}
|
||||
},
|
||||
|
||||
@ -350,4 +357,25 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/manage-group-memberships",
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgId: z.string().trim().min(1, "Org ID is required")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
isEnabled: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const isEnabled = await server.services.oidc.isOidcManageGroupMembershipsEnabled(req.query.orgId, req.permission);
|
||||
|
||||
return { isEnabled };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -23,7 +23,8 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
),
|
||||
name: z.string().trim(),
|
||||
description: z.string().trim().optional(),
|
||||
description: z.string().trim().nullish(),
|
||||
// TODO(scott): once UI refactored permissions: OrgPermissionSchema.array()
|
||||
permissions: z.any().array()
|
||||
}),
|
||||
response: {
|
||||
@ -95,7 +96,8 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
.optional(),
|
||||
name: z.string().trim().optional(),
|
||||
description: z.string().trim().optional(),
|
||||
description: z.string().trim().nullish(),
|
||||
// TODO(scott): once UI refactored permissions: OrgPermissionSchema.array().optional()
|
||||
permissions: z.any().array().optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -39,7 +39,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
@ -95,7 +95,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
.optional(),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||
|
@ -12,13 +12,13 @@ import { MultiSamlStrategy } from "@node-saml/passport-saml";
|
||||
import { FastifyRequest } from "fastify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SamlConfigsSchema } from "@app/db/schemas";
|
||||
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
type TSAMLConfig = {
|
||||
@ -84,7 +84,10 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
samlConfig.audience = `spn:${ssoConfig.issuer}`;
|
||||
}
|
||||
}
|
||||
if (ssoConfig.authProvider === SamlProviders.GOOGLE_SAML) {
|
||||
if (
|
||||
ssoConfig.authProvider === SamlProviders.GOOGLE_SAML ||
|
||||
ssoConfig.authProvider === SamlProviders.AUTH0_SAML
|
||||
) {
|
||||
samlConfig.wantAssertionsSigned = false;
|
||||
}
|
||||
|
||||
@ -123,7 +126,10 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
|
||||
throw new Error("Invalid saml request. Missing email or first name");
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Missing email or first name. Please double check your SAML attribute mapping for the selected provider."
|
||||
});
|
||||
}
|
||||
|
||||
const userMetadata = Object.keys(profile.attributes || {})
|
||||
@ -292,7 +298,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
cert: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: SamlConfigsSchema
|
||||
200: SanitizedSamlConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@ -327,7 +333,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
response: {
|
||||
200: SamlConfigsSchema
|
||||
200: SanitizedSamlConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
|
@ -12,6 +12,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
const approvalRequestUser = z.object({ userId: z.string().nullable().optional() }).merge(
|
||||
UsersSchema.pick({
|
||||
@ -274,6 +275,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
.extend({
|
||||
op: z.string(),
|
||||
tags: tagSchema,
|
||||
secretMetadata: ResourceMetadataSchema.nullish(),
|
||||
secret: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
@ -291,7 +293,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretKey: z.string(),
|
||||
secretValue: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
tags: tagSchema
|
||||
tags: tagSchema,
|
||||
secretMetadata: ResourceMetadataSchema.nullish()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
|
71
backend/src/ee/routes/v1/secret-router.ts
Normal file
71
backend/src/ee/routes/v1/secret-router.ts
Normal file
@ -0,0 +1,71 @@
|
||||
import z from "zod";
|
||||
|
||||
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
|
||||
import { RAW_SECRETS } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const AccessListEntrySchema = z
|
||||
.object({
|
||||
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
|
||||
id: z.string(),
|
||||
membershipId: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
.array();
|
||||
|
||||
export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:secretName/access-list",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get list of users, machine identities, and groups with access to a secret",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
secretName: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.secretName)
|
||||
}),
|
||||
querystring: z.object({
|
||||
workspaceId: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.workspaceId),
|
||||
environment: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.environment),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.GET_ACCESS_LIST.secretPath)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
groups: AccessListEntrySchema,
|
||||
identities: AccessListEntrySchema,
|
||||
users: AccessListEntrySchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { secretName } = req.params;
|
||||
const { secretPath, environment, workspaceId: projectId } = req.query;
|
||||
|
||||
return server.services.secret.getSecretAccessList({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
secretName
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
@ -1,9 +1,13 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GitAppOrgSchema, SecretScanningGitRisksSchema } from "@app/db/schemas";
|
||||
import { SecretScanningRiskStatus } from "@app/ee/services/secret-scanning/secret-scanning-types";
|
||||
import {
|
||||
SecretScanningResolvedStatus,
|
||||
SecretScanningRiskStatus
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -97,6 +101,45 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/organization/:organizationId/risks/export",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({ organizationId: z.string().trim() }),
|
||||
querystring: z.object({
|
||||
repositoryNames: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? val.split(",") : undefined)),
|
||||
resolvedStatus: z.nativeEnum(SecretScanningResolvedStatus).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
risks: SecretScanningGitRisksSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const risks = await server.services.secretScanning.getAllRisksByOrg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.params.organizationId,
|
||||
filter: {
|
||||
repositoryNames: req.query.repositoryNames,
|
||||
resolvedStatus: req.query.resolvedStatus
|
||||
}
|
||||
});
|
||||
return { risks };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/organization/:organizationId/risks",
|
||||
method: "GET",
|
||||
@ -105,20 +148,46 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
|
||||
},
|
||||
schema: {
|
||||
params: z.object({ organizationId: z.string().trim() }),
|
||||
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).default(0),
|
||||
limit: z.coerce.number().min(1).max(20000).default(100),
|
||||
orderBy: z.enum(["createdAt", "name"]).default("createdAt"),
|
||||
orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.DESC),
|
||||
repositoryNames: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? val.split(",") : undefined)),
|
||||
resolvedStatus: z.nativeEnum(SecretScanningResolvedStatus).optional()
|
||||
}),
|
||||
|
||||
response: {
|
||||
200: z.object({ risks: SecretScanningGitRisksSchema.array() })
|
||||
200: z.object({
|
||||
risks: SecretScanningGitRisksSchema.array(),
|
||||
totalCount: z.number(),
|
||||
repos: z.array(z.string())
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { risks } = await server.services.secretScanning.getRisksByOrg({
|
||||
const { risks, totalCount, repos } = await server.services.secretScanning.getRisksByOrg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.params.organizationId
|
||||
orgId: req.params.organizationId,
|
||||
filter: {
|
||||
limit: req.query.limit,
|
||||
offset: req.query.offset,
|
||||
orderBy: req.query.orderBy,
|
||||
orderDirection: req.query.orderDirection,
|
||||
repositoryNames: req.query.repositoryNames,
|
||||
resolvedStatus: req.query.resolvedStatus
|
||||
}
|
||||
});
|
||||
return { risks };
|
||||
return { risks, totalCount, repos };
|
||||
}
|
||||
});
|
||||
|
||||
|
279
backend/src/ee/routes/v1/ssh-certificate-authority-router.ts
Normal file
279
backend/src/ee/routes/v1/ssh-certificate-authority-router.ts
Normal file
@ -0,0 +1,279 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
|
||||
import { SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
|
||||
export const registerSshCaRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Create SSH CA",
|
||||
body: z.object({
|
||||
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
|
||||
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
|
||||
keyAlgorithm: z
|
||||
.nativeEnum(CertKeyAlgorithm)
|
||||
.default(CertKeyAlgorithm.RSA_2048)
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
ca: sanitizedSshCa.extend({
|
||||
publicKey: z.string()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ca = await server.services.sshCertificateAuthority.createSshCa({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_SSH_CA,
|
||||
metadata: {
|
||||
sshCaId: ca.id,
|
||||
friendlyName: ca.friendlyName
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
ca
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:sshCaId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get SSH CA",
|
||||
params: z.object({
|
||||
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET.sshCaId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
ca: sanitizedSshCa.extend({
|
||||
publicKey: z.string()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ca = await server.services.sshCertificateAuthority.getSshCaById({
|
||||
caId: req.params.sshCaId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.GET_SSH_CA,
|
||||
metadata: {
|
||||
sshCaId: ca.id,
|
||||
friendlyName: ca.friendlyName
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
ca
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:sshCaId/public-key",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get public key of SSH CA",
|
||||
params: z.object({
|
||||
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_PUBLIC_KEY.sshCaId)
|
||||
}),
|
||||
response: {
|
||||
200: z.string()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const publicKey = await server.services.sshCertificateAuthority.getSshCaPublicKey({
|
||||
caId: req.params.sshCaId
|
||||
});
|
||||
|
||||
return publicKey;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:sshCaId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Update SSH CA",
|
||||
params: z.object({
|
||||
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.sshCaId)
|
||||
}),
|
||||
body: z.object({
|
||||
friendlyName: z.string().optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.friendlyName),
|
||||
status: z.nativeEnum(SshCaStatus).optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.status)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
ca: sanitizedSshCa.extend({
|
||||
publicKey: z.string()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ca = await server.services.sshCertificateAuthority.updateSshCaById({
|
||||
caId: req.params.sshCaId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_SSH_CA,
|
||||
metadata: {
|
||||
sshCaId: ca.id,
|
||||
friendlyName: ca.friendlyName,
|
||||
status: ca.status as SshCaStatus
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
ca
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:sshCaId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Delete SSH CA",
|
||||
params: z.object({
|
||||
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.DELETE.sshCaId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
ca: sanitizedSshCa
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const ca = await server.services.sshCertificateAuthority.deleteSshCaById({
|
||||
caId: req.params.sshCaId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_SSH_CA,
|
||||
metadata: {
|
||||
sshCaId: ca.id,
|
||||
friendlyName: ca.friendlyName
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
ca
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:sshCaId/certificate-templates",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get list of certificate templates for the SSH CA",
|
||||
params: z.object({
|
||||
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_CERTIFICATE_TEMPLATES.sshCaId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificateTemplates: sanitizedSshCertificateTemplate.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { certificateTemplates, ca } = await server.services.sshCertificateAuthority.getSshCaCertificateTemplates({
|
||||
caId: req.params.sshCaId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES,
|
||||
metadata: {
|
||||
sshCaId: ca.id,
|
||||
friendlyName: ca.friendlyName
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificateTemplates
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
164
backend/src/ee/routes/v1/ssh-certificate-router.ts
Normal file
164
backend/src/ee/routes/v1/ssh-certificate-router.ts
Normal file
@ -0,0 +1,164 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
|
||||
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/sign",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Sign SSH public key",
|
||||
body: z.object({
|
||||
certificateTemplateId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certificateTemplateId),
|
||||
publicKey: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.publicKey),
|
||||
certType: z
|
||||
.nativeEnum(SshCertType)
|
||||
.default(SshCertType.USER)
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certType),
|
||||
principals: z
|
||||
.array(z.string().transform((val) => val.trim()))
|
||||
.nonempty("Principals array must not be empty")
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.principals),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.optional()
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.ttl),
|
||||
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.keyId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.serialNumber),
|
||||
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.signedKey)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { serialNumber, signedPublicKey, certificateTemplate, ttl, keyId } =
|
||||
await server.services.sshCertificateAuthority.signSshKey({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.SIGN_SSH_KEY,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id,
|
||||
certType: req.body.certType,
|
||||
principals: req.body.principals,
|
||||
ttl: String(ttl),
|
||||
keyId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/issue",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Issue SSH credentials (certificate + key)",
|
||||
body: z.object({
|
||||
certificateTemplateId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certificateTemplateId),
|
||||
keyAlgorithm: z
|
||||
.nativeEnum(CertKeyAlgorithm)
|
||||
.default(CertKeyAlgorithm.RSA_2048)
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm),
|
||||
certType: z
|
||||
.nativeEnum(SshCertType)
|
||||
.default(SshCertType.USER)
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certType),
|
||||
principals: z
|
||||
.array(z.string().transform((val) => val.trim()))
|
||||
.nonempty("Principals array must not be empty")
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.principals),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.optional()
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.ttl),
|
||||
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.serialNumber),
|
||||
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.signedKey),
|
||||
privateKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.privateKey),
|
||||
publicKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.publicKey),
|
||||
keyAlgorithm: z
|
||||
.nativeEnum(CertKeyAlgorithm)
|
||||
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { serialNumber, signedPublicKey, privateKey, publicKey, certificateTemplate, ttl, keyId } =
|
||||
await server.services.sshCertificateAuthority.issueSshCreds({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.ISSUE_SSH_CREDS,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id,
|
||||
keyAlgorithm: req.body.keyAlgorithm,
|
||||
certType: req.body.certType,
|
||||
principals: req.body.principals,
|
||||
ttl: String(ttl),
|
||||
keyId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey,
|
||||
privateKey,
|
||||
publicKey,
|
||||
keyAlgorithm: req.body.keyAlgorithm
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
258
backend/src/ee/routes/v1/ssh-certificate-template-router.ts
Normal file
258
backend/src/ee/routes/v1/ssh-certificate-template-router.ts
Normal file
@ -0,0 +1,258 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
|
||||
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||
import {
|
||||
isValidHostPattern,
|
||||
isValidUserPattern
|
||||
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
|
||||
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSshCertificateTemplateRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:certificateTemplateId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.GET.certificateTemplateId)
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedSshCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const certificateTemplate = await server.services.sshCertificateTemplate.getSshCertTemplate({
|
||||
id: req.params.certificateTemplateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: certificateTemplate.projectId,
|
||||
event: {
|
||||
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z
|
||||
.object({
|
||||
sshCaId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.sshCaId),
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(36)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Name must be a valid slug"
|
||||
})
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.name),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.default("1h")
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.ttl),
|
||||
maxTTL: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
|
||||
.default("30d")
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.maxTTL),
|
||||
allowedUsers: z
|
||||
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedUsers),
|
||||
allowedHosts: z
|
||||
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedHosts),
|
||||
allowUserCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowUserCertificates),
|
||||
allowHostCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowHostCertificates),
|
||||
allowCustomKeyIds: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowCustomKeyIds)
|
||||
})
|
||||
.refine((data) => ms(data.maxTTL) > ms(data.ttl), {
|
||||
message: "Max TLL must be greater than TTL",
|
||||
path: ["maxTTL"]
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedSshCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { certificateTemplate, ca } = await server.services.sshCertificateTemplate.createSshCertTemplate({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id,
|
||||
sshCaId: ca.id,
|
||||
name: certificateTemplate.name,
|
||||
ttl: certificateTemplate.ttl,
|
||||
maxTTL: certificateTemplate.maxTTL,
|
||||
allowedUsers: certificateTemplate.allowedUsers,
|
||||
allowedHosts: certificateTemplate.allowedHosts,
|
||||
allowUserCertificates: certificateTemplate.allowUserCertificates,
|
||||
allowHostCertificates: certificateTemplate.allowHostCertificates,
|
||||
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:certificateTemplateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
status: z.nativeEnum(SshCertTemplateStatus).optional(),
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(36)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.name),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.optional()
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.ttl),
|
||||
maxTTL: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
|
||||
.optional()
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.maxTTL),
|
||||
allowedUsers: z
|
||||
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
|
||||
.optional()
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedUsers),
|
||||
allowedHosts: z
|
||||
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
|
||||
.optional()
|
||||
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedHosts),
|
||||
allowUserCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowUserCertificates),
|
||||
allowHostCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowHostCertificates),
|
||||
allowCustomKeyIds: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowCustomKeyIds)
|
||||
}),
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.certificateTemplateId)
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedSshCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { certificateTemplate, projectId } = await server.services.sshCertificateTemplate.updateSshCertTemplate({
|
||||
...req.body,
|
||||
id: req.params.certificateTemplateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
status: certificateTemplate.status as SshCertTemplateStatus,
|
||||
certificateTemplateId: certificateTemplate.id,
|
||||
sshCaId: certificateTemplate.sshCaId,
|
||||
name: certificateTemplate.name,
|
||||
ttl: certificateTemplate.ttl,
|
||||
maxTTL: certificateTemplate.maxTTL,
|
||||
allowedUsers: certificateTemplate.allowedUsers,
|
||||
allowedHosts: certificateTemplate.allowedHosts,
|
||||
allowUserCertificates: certificateTemplate.allowUserCertificates,
|
||||
allowHostCertificates: certificateTemplate.allowHostCertificates,
|
||||
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:certificateTemplateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.DELETE.certificateTemplateId)
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedSshCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const certificateTemplate = await server.services.sshCertificateTemplate.deleteSshCertTemplate({
|
||||
id: req.params.certificateTemplateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: certificateTemplate.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
};
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/sanitizedSchema/user-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchema/identitiy-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
|
@ -36,7 +36,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
@ -91,7 +91,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ProjectType } from "@app/db/schemas";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@ -87,14 +87,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
projectId: project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
@ -193,7 +193,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
// Anyone in the project should be able to get the policies.
|
||||
await permissionService.getProjectPermission(actor, actorId, project.id, actorAuthMethod, actorOrgId);
|
||||
await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
|
||||
return accessApprovalPolicies;
|
||||
@ -237,14 +244,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
if (!accessApprovalPolicy) {
|
||||
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
||||
}
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
accessApprovalPolicy.projectId,
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
@ -321,14 +328,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const policy = await accessApprovalPolicyDAL.findById(policyId);
|
||||
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
||||
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
policy.projectId,
|
||||
projectId: policy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionSub.SecretApproval
|
||||
@ -372,13 +379,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
const { membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
projectId: project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
@ -411,13 +419,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
policy.projectId,
|
||||
projectId: policy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
|
||||
import { ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
@ -100,13 +100,14 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
// Anyone can create an access approval request.
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
const { membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
projectId: project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
@ -213,7 +214,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
);
|
||||
|
||||
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
|
||||
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
|
||||
const approvalUrl = `${cfg.SITE_URL}/secret-manager/${project.id}/approval`;
|
||||
|
||||
await triggerSlackNotification({
|
||||
projectId: project.id,
|
||||
@ -273,13 +274,14 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
const { membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
projectId: project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
@ -318,13 +320,14 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
accessApprovalRequest.projectId,
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
@ -422,13 +425,14 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
const { membership } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
projectId: project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
throw new Error(`Failed to connect with the source ${(err as Error)?.message}`);
|
||||
throw new BadRequestError({ message: `Failed to connect with upstream source: ${(err as Error)?.message}` });
|
||||
});
|
||||
const encryptedHeaders = headers ? infisicalSymmetricEncypt(JSON.stringify(headers)) : undefined;
|
||||
const logStream = await auditLogStreamDAL.create({
|
||||
|
@ -39,11 +39,13 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
offset = 0,
|
||||
actorId,
|
||||
actorType,
|
||||
secretPath,
|
||||
eventType,
|
||||
eventMetadata
|
||||
}: Omit<TFindQuery, "actor" | "eventType"> & {
|
||||
actorId?: string;
|
||||
actorType?: ActorType;
|
||||
secretPath?: string;
|
||||
eventType?: EventType[];
|
||||
eventMetadata?: Record<string, string>;
|
||||
},
|
||||
@ -88,6 +90,10 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
});
|
||||
}
|
||||
|
||||
if (projectId && secretPath) {
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
|
||||
}
|
||||
|
||||
// Filter by actor type
|
||||
if (actorType) {
|
||||
void sqlQuery.where("actor", actorType);
|
||||
@ -100,10 +106,10 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
|
||||
// Filter by date range
|
||||
if (startDate) {
|
||||
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, ">=", startDate);
|
||||
void sqlQuery.whereRaw(`"${TableName.AuditLog}"."createdAt" >= ?::timestamptz`, [startDate]);
|
||||
}
|
||||
if (endDate) {
|
||||
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, "<=", endDate);
|
||||
void sqlQuery.whereRaw(`"${TableName.AuditLog}"."createdAt" <= ?::timestamptz`, [endDate]);
|
||||
}
|
||||
|
||||
// we timeout long running queries to prevent DB resource issues (2 minutes)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
@ -26,13 +27,14 @@ export const auditLogServiceFactory = ({
|
||||
const listAuditLogs = async ({ actorAuthMethod, actorId, actorOrgId, actor, filter }: TListProjectAuditLogDTO) => {
|
||||
// Filter logs for specific project
|
||||
if (filter.projectId) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
filter.projectId,
|
||||
projectId: filter.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
} else {
|
||||
// Organization-wide logs
|
||||
@ -44,10 +46,6 @@ export const auditLogServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
/**
|
||||
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
|
||||
* to the organization level ✅
|
||||
*/
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
}
|
||||
|
||||
@ -62,6 +60,7 @@ export const auditLogServiceFactory = ({
|
||||
actorId: filter.auditLogActorId,
|
||||
actorType: filter.actorType,
|
||||
eventMetadata: filter.eventMetadata,
|
||||
secretPath: filter.secretPath,
|
||||
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
|
||||
});
|
||||
|
||||
@ -79,7 +78,8 @@ export const auditLogServiceFactory = ({
|
||||
}
|
||||
// add all cases in which project id or org id cannot be added
|
||||
if (data.event.type !== EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH) {
|
||||
if (!data.projectId && !data.orgId) throw new BadRequestError({ message: "Must either project id or org id" });
|
||||
if (!data.projectId && !data.orgId)
|
||||
throw new BadRequestError({ message: "Must specify either project id or org id" });
|
||||
}
|
||||
|
||||
return auditLogQueue.pushToLog(data);
|
||||
|
@ -2,12 +2,24 @@ import {
|
||||
TCreateProjectTemplateDTO,
|
||||
TUpdateProjectTemplateDTO
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { TCreateAppConnectionDTO, TUpdateAppConnectionDTO } from "@app/services/app-connection/app-connection-types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
||||
import { SecretSync, SecretSyncImportBehavior } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import {
|
||||
TCreateSecretSyncDTO,
|
||||
TDeleteSecretSyncDTO,
|
||||
TSecretSyncRaw,
|
||||
TUpdateSecretSyncDTO
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
@ -20,13 +32,14 @@ export type TListProjectAuditLogDTO = {
|
||||
projectId?: string;
|
||||
auditLogActorId?: string;
|
||||
actorType?: ActorType;
|
||||
secretPath?: string;
|
||||
eventMetadata?: Record<string, string>;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TCreateAuditLogDTO = {
|
||||
event: Event;
|
||||
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor;
|
||||
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor | UnknownUserActor;
|
||||
orgId?: string;
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
@ -143,6 +156,17 @@ export enum EventType {
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
SIGN_SSH_KEY = "sign-ssh-key",
|
||||
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||
GET_SSH_CA = "get-ssh-certificate-authority",
|
||||
UPDATE_SSH_CA = "update-ssh-certificate-authority",
|
||||
DELETE_SSH_CA = "delete-ssh-certificate-authority",
|
||||
GET_SSH_CA_CERTIFICATE_TEMPLATES = "get-ssh-certificate-authority-certificate-templates",
|
||||
CREATE_SSH_CERTIFICATE_TEMPLATE = "create-ssh-certificate-template",
|
||||
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
|
||||
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
|
||||
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
|
||||
CREATE_CA = "create-certificate-authority",
|
||||
GET_CA = "get-certificate-authority",
|
||||
UPDATE_CA = "update-certificate-authority",
|
||||
@ -199,6 +223,7 @@ export enum EventType {
|
||||
UPDATE_CMEK = "update-cmek",
|
||||
DELETE_CMEK = "delete-cmek",
|
||||
GET_CMEKS = "get-cmeks",
|
||||
GET_CMEK = "get-cmek",
|
||||
CMEK_ENCRYPT = "cmek-encrypt",
|
||||
CMEK_DECRYPT = "cmek-decrypt",
|
||||
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
||||
@ -208,7 +233,26 @@ export enum EventType {
|
||||
CREATE_PROJECT_TEMPLATE = "create-project-template",
|
||||
UPDATE_PROJECT_TEMPLATE = "update-project-template",
|
||||
DELETE_PROJECT_TEMPLATE = "delete-project-template",
|
||||
APPLY_PROJECT_TEMPLATE = "apply-project-template"
|
||||
APPLY_PROJECT_TEMPLATE = "apply-project-template",
|
||||
GET_APP_CONNECTIONS = "get-app-connections",
|
||||
GET_AVAILABLE_APP_CONNECTIONS_DETAILS = "get-available-app-connections-details",
|
||||
GET_APP_CONNECTION = "get-app-connection",
|
||||
CREATE_APP_CONNECTION = "create-app-connection",
|
||||
UPDATE_APP_CONNECTION = "update-app-connection",
|
||||
DELETE_APP_CONNECTION = "delete-app-connection",
|
||||
CREATE_SHARED_SECRET = "create-shared-secret",
|
||||
DELETE_SHARED_SECRET = "delete-shared-secret",
|
||||
READ_SHARED_SECRET = "read-shared-secret",
|
||||
GET_SECRET_SYNCS = "get-secret-syncs",
|
||||
GET_SECRET_SYNC = "get-secret-sync",
|
||||
CREATE_SECRET_SYNC = "create-secret-sync",
|
||||
UPDATE_SECRET_SYNC = "update-secret-sync",
|
||||
DELETE_SECRET_SYNC = "delete-secret-sync",
|
||||
SECRET_SYNC_SYNC_SECRETS = "secret-sync-sync-secrets",
|
||||
SECRET_SYNC_IMPORT_SECRETS = "secret-sync-import-secrets",
|
||||
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets",
|
||||
OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER = "oidc-group-membership-mapping-assign-user",
|
||||
OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER = "oidc-group-membership-mapping-remove-user"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -231,6 +275,8 @@ interface ScimClientActorMetadata {}
|
||||
|
||||
interface PlatformActorMetadata {}
|
||||
|
||||
interface UnknownUserActorMetadata {}
|
||||
|
||||
export interface UserActor {
|
||||
type: ActorType.USER;
|
||||
metadata: UserActorMetadata;
|
||||
@ -246,6 +292,11 @@ export interface PlatformActor {
|
||||
metadata: PlatformActorMetadata;
|
||||
}
|
||||
|
||||
export interface UnknownUserActor {
|
||||
type: ActorType.UNKNOWN_USER;
|
||||
metadata: UnknownUserActorMetadata;
|
||||
}
|
||||
|
||||
export interface IdentityActor {
|
||||
type: ActorType.IDENTITY;
|
||||
metadata: IdentityActorMetadata;
|
||||
@ -267,6 +318,8 @@ interface GetSecretsEvent {
|
||||
};
|
||||
}
|
||||
|
||||
type TSecretMetadata = { key: string; value: string }[];
|
||||
|
||||
interface GetSecretEvent {
|
||||
type: EventType.GET_SECRET;
|
||||
metadata: {
|
||||
@ -275,6 +328,7 @@ interface GetSecretEvent {
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
};
|
||||
}
|
||||
|
||||
@ -286,6 +340,7 @@ interface CreateSecretEvent {
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
};
|
||||
}
|
||||
|
||||
@ -294,7 +349,13 @@ interface CreateSecretBatchEvent {
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretPath?: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
@ -306,6 +367,7 @@ interface UpdateSecretEvent {
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
};
|
||||
}
|
||||
|
||||
@ -313,8 +375,14 @@ interface UpdateSecretBatchEvent {
|
||||
type: EventType.UPDATE_SECRETS;
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
|
||||
secretPath?: string;
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
secretPath?: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
@ -712,9 +780,9 @@ interface AddIdentityGcpAuthEvent {
|
||||
metadata: {
|
||||
identityId: string;
|
||||
type: string;
|
||||
allowedServiceAccounts: string;
|
||||
allowedProjects: string;
|
||||
allowedZones: string;
|
||||
allowedServiceAccounts?: string | null;
|
||||
allowedProjects?: string | null;
|
||||
allowedZones?: string | null;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
@ -734,9 +802,9 @@ interface UpdateIdentityGcpAuthEvent {
|
||||
metadata: {
|
||||
identityId: string;
|
||||
type?: string;
|
||||
allowedServiceAccounts?: string;
|
||||
allowedProjects?: string;
|
||||
allowedZones?: string;
|
||||
allowedServiceAccounts?: string | null;
|
||||
allowedProjects?: string | null;
|
||||
allowedZones?: string | null;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
@ -1206,6 +1274,117 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface SignSshKey {
|
||||
type: EventType.SIGN_SSH_KEY;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
certType: SshCertType;
|
||||
principals: string[];
|
||||
ttl: string;
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface IssueSshCreds {
|
||||
type: EventType.ISSUE_SSH_CREDS;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
keyAlgorithm: CertKeyAlgorithm;
|
||||
certType: SshCertType;
|
||||
principals: string[];
|
||||
ttl: string;
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSshCa {
|
||||
type: EventType.CREATE_SSH_CA;
|
||||
metadata: {
|
||||
sshCaId: string;
|
||||
friendlyName: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSshCa {
|
||||
type: EventType.GET_SSH_CA;
|
||||
metadata: {
|
||||
sshCaId: string;
|
||||
friendlyName: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateSshCa {
|
||||
type: EventType.UPDATE_SSH_CA;
|
||||
metadata: {
|
||||
sshCaId: string;
|
||||
friendlyName: string;
|
||||
status: SshCaStatus;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSshCa {
|
||||
type: EventType.DELETE_SSH_CA;
|
||||
metadata: {
|
||||
sshCaId: string;
|
||||
friendlyName: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSshCaCertificateTemplates {
|
||||
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES;
|
||||
metadata: {
|
||||
sshCaId: string;
|
||||
friendlyName: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSshCertificateTemplate {
|
||||
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
sshCaId: string;
|
||||
name: string;
|
||||
ttl: string;
|
||||
maxTTL: string;
|
||||
allowedUsers: string[];
|
||||
allowedHosts: string[];
|
||||
allowUserCertificates: boolean;
|
||||
allowHostCertificates: boolean;
|
||||
allowCustomKeyIds: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSshCertificateTemplate {
|
||||
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateSshCertificateTemplate {
|
||||
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
sshCaId: string;
|
||||
name: string;
|
||||
status: SshCertTemplateStatus;
|
||||
ttl: string;
|
||||
maxTTL: string;
|
||||
allowedUsers: string[];
|
||||
allowedHosts: string[];
|
||||
allowUserCertificates: boolean;
|
||||
allowHostCertificates: boolean;
|
||||
allowCustomKeyIds: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSshCertificateTemplate {
|
||||
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateCa {
|
||||
type: EventType.CREATE_CA;
|
||||
metadata: {
|
||||
@ -1676,6 +1855,13 @@ interface GetCmeksEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCmekEvent {
|
||||
type: EventType.GET_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CmekEncryptEvent {
|
||||
type: EventType.CMEK_ENCRYPT;
|
||||
metadata: {
|
||||
@ -1742,6 +1928,169 @@ interface ApplyProjectTemplateEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetAppConnectionsEvent {
|
||||
type: EventType.GET_APP_CONNECTIONS;
|
||||
metadata: {
|
||||
app?: AppConnection;
|
||||
count: number;
|
||||
connectionIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface GetAvailableAppConnectionsDetailsEvent {
|
||||
type: EventType.GET_AVAILABLE_APP_CONNECTIONS_DETAILS;
|
||||
metadata: {
|
||||
app?: AppConnection;
|
||||
count: number;
|
||||
connectionIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface GetAppConnectionEvent {
|
||||
type: EventType.GET_APP_CONNECTION;
|
||||
metadata: {
|
||||
connectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateAppConnectionEvent {
|
||||
type: EventType.CREATE_APP_CONNECTION;
|
||||
metadata: Omit<TCreateAppConnectionDTO, "credentials"> & { connectionId: string };
|
||||
}
|
||||
|
||||
interface UpdateAppConnectionEvent {
|
||||
type: EventType.UPDATE_APP_CONNECTION;
|
||||
metadata: Omit<TUpdateAppConnectionDTO, "credentials"> & { connectionId: string; credentialsUpdated: boolean };
|
||||
}
|
||||
|
||||
interface DeleteAppConnectionEvent {
|
||||
type: EventType.DELETE_APP_CONNECTION;
|
||||
metadata: {
|
||||
connectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSharedSecretEvent {
|
||||
type: EventType.CREATE_SHARED_SECRET;
|
||||
metadata: {
|
||||
id: string;
|
||||
accessType: string;
|
||||
name?: string;
|
||||
expiresAfterViews?: number;
|
||||
usingPassword: boolean;
|
||||
expiresAt: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSharedSecretEvent {
|
||||
type: EventType.DELETE_SHARED_SECRET;
|
||||
metadata: {
|
||||
id: string;
|
||||
name?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface ReadSharedSecretEvent {
|
||||
type: EventType.READ_SHARED_SECRET;
|
||||
metadata: {
|
||||
id: string;
|
||||
name?: string;
|
||||
accessType: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretSyncsEvent {
|
||||
type: EventType.GET_SECRET_SYNCS;
|
||||
metadata: {
|
||||
destination?: SecretSync;
|
||||
count: number;
|
||||
syncIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretSyncEvent {
|
||||
type: EventType.GET_SECRET_SYNC;
|
||||
metadata: {
|
||||
destination: SecretSync;
|
||||
syncId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretSyncEvent {
|
||||
type: EventType.CREATE_SECRET_SYNC;
|
||||
metadata: Omit<TCreateSecretSyncDTO, "projectId"> & { syncId: string };
|
||||
}
|
||||
|
||||
interface UpdateSecretSyncEvent {
|
||||
type: EventType.UPDATE_SECRET_SYNC;
|
||||
metadata: TUpdateSecretSyncDTO;
|
||||
}
|
||||
|
||||
interface DeleteSecretSyncEvent {
|
||||
type: EventType.DELETE_SECRET_SYNC;
|
||||
metadata: TDeleteSecretSyncDTO;
|
||||
}
|
||||
|
||||
interface SecretSyncSyncSecretsEvent {
|
||||
type: EventType.SECRET_SYNC_SYNC_SECRETS;
|
||||
metadata: Pick<
|
||||
TSecretSyncRaw,
|
||||
"syncOptions" | "destinationConfig" | "destination" | "syncStatus" | "connectionId" | "folderId"
|
||||
> & {
|
||||
syncId: string;
|
||||
syncMessage: string | null;
|
||||
jobId: string;
|
||||
jobRanAt: Date;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretSyncImportSecretsEvent {
|
||||
type: EventType.SECRET_SYNC_IMPORT_SECRETS;
|
||||
metadata: Pick<
|
||||
TSecretSyncRaw,
|
||||
"syncOptions" | "destinationConfig" | "destination" | "importStatus" | "connectionId" | "folderId"
|
||||
> & {
|
||||
syncId: string;
|
||||
importMessage: string | null;
|
||||
jobId: string;
|
||||
jobRanAt: Date;
|
||||
importBehavior: SecretSyncImportBehavior;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretSyncRemoveSecretsEvent {
|
||||
type: EventType.SECRET_SYNC_REMOVE_SECRETS;
|
||||
metadata: Pick<
|
||||
TSecretSyncRaw,
|
||||
"syncOptions" | "destinationConfig" | "destination" | "removeStatus" | "connectionId" | "folderId"
|
||||
> & {
|
||||
syncId: string;
|
||||
removeMessage: string | null;
|
||||
jobId: string;
|
||||
jobRanAt: Date;
|
||||
};
|
||||
}
|
||||
|
||||
interface OidcGroupMembershipMappingAssignUserEvent {
|
||||
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER;
|
||||
metadata: {
|
||||
assignedToGroups: { id: string; name: string }[];
|
||||
userId: string;
|
||||
userEmail: string;
|
||||
userGroupsClaim: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface OidcGroupMembershipMappingRemoveUserEvent {
|
||||
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER;
|
||||
metadata: {
|
||||
removedFromGroups: { id: string; name: string }[];
|
||||
userId: string;
|
||||
userEmail: string;
|
||||
userGroupsClaim: string[];
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -1837,6 +2186,17 @@ export type Event =
|
||||
| SecretApprovalClosed
|
||||
| SecretApprovalRequest
|
||||
| SecretApprovalReopened
|
||||
| SignSshKey
|
||||
| IssueSshCreds
|
||||
| CreateSshCa
|
||||
| GetSshCa
|
||||
| UpdateSshCa
|
||||
| DeleteSshCa
|
||||
| GetSshCaCertificateTemplates
|
||||
| CreateSshCertificateTemplate
|
||||
| UpdateSshCertificateTemplate
|
||||
| GetSshCertificateTemplate
|
||||
| DeleteSshCertificateTemplate
|
||||
| CreateCa
|
||||
| GetCa
|
||||
| UpdateCa
|
||||
@ -1892,6 +2252,7 @@ export type Event =
|
||||
| CreateCmekEvent
|
||||
| UpdateCmekEvent
|
||||
| DeleteCmekEvent
|
||||
| GetCmekEvent
|
||||
| GetCmeksEvent
|
||||
| CmekEncryptEvent
|
||||
| CmekDecryptEvent
|
||||
@ -1902,4 +2263,23 @@ export type Event =
|
||||
| CreateProjectTemplateEvent
|
||||
| UpdateProjectTemplateEvent
|
||||
| DeleteProjectTemplateEvent
|
||||
| ApplyProjectTemplateEvent;
|
||||
| ApplyProjectTemplateEvent
|
||||
| GetAppConnectionsEvent
|
||||
| GetAvailableAppConnectionsDetailsEvent
|
||||
| GetAppConnectionEvent
|
||||
| CreateAppConnectionEvent
|
||||
| UpdateAppConnectionEvent
|
||||
| DeleteAppConnectionEvent
|
||||
| CreateSharedSecretEvent
|
||||
| DeleteSharedSecretEvent
|
||||
| ReadSharedSecretEvent
|
||||
| GetSecretSyncsEvent
|
||||
| GetSecretSyncEvent
|
||||
| CreateSecretSyncEvent
|
||||
| UpdateSecretSyncEvent
|
||||
| DeleteSecretSyncEvent
|
||||
| SecretSyncSyncSecretsEvent
|
||||
| SecretSyncImportSecretsEvent
|
||||
| SecretSyncRemoveSecretsEvent
|
||||
| OidcGroupMembershipMappingAssignUserEvent
|
||||
| OidcGroupMembershipMappingRemoveUserEvent;
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
@ -66,13 +67,14 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
projectId: ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.CertificateManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
|
@ -37,11 +37,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
|
||||
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
|
||||
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
|
||||
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
|
||||
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
|
||||
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
|
||||
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
|
||||
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
|
||||
db.ref("encryptedInput").withSchema(TableName.DynamicSecret).as("dynEncryptedInput"),
|
||||
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
|
||||
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
|
||||
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
|
||||
@ -59,11 +55,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
type: doc.dynType,
|
||||
defaultTTL: doc.dynDefaultTTL,
|
||||
maxTTL: doc.dynMaxTTL,
|
||||
inputIV: doc.dynInputIV,
|
||||
inputTag: doc.dynInputTag,
|
||||
inputCiphertext: doc.dynInputCiphertext,
|
||||
algorithm: doc.dynAlgorithm,
|
||||
keyEncoding: doc.dynKeyEncoding,
|
||||
encryptedInput: doc.dynEncryptedInput,
|
||||
folderId: doc.dynFolderId,
|
||||
status: doc.dynStatus,
|
||||
statusDetails: doc.dynStatusDetails,
|
||||
|
@ -1,8 +1,10 @@
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
||||
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
|
||||
@ -14,6 +16,8 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
|
||||
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
|
||||
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
|
||||
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
|
||||
@ -22,7 +26,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
queueService,
|
||||
dynamicSecretDAL,
|
||||
dynamicSecretProviders,
|
||||
dynamicSecretLeaseDAL
|
||||
dynamicSecretLeaseDAL,
|
||||
kmsService,
|
||||
folderDAL
|
||||
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
|
||||
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
|
||||
await queueService.queue(
|
||||
@ -76,15 +82,21 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const folder = await folderDAL.findById(dynamicSecretLease.dynamicSecret.folderId);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find folder with ${dynamicSecretLease.dynamicSecret.folderId}`
|
||||
});
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: folder.projectId
|
||||
});
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
|
||||
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
|
||||
@ -100,16 +112,22 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
|
||||
throw new DisableRotationErrors({ message: "Document not deleted" });
|
||||
|
||||
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find folder with ${dynamicSecretCfg.folderId}`
|
||||
});
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: folder.projectId
|
||||
});
|
||||
|
||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
|
||||
if (dynamicSecretLeases.length) {
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
|
||||
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
@ -9,9 +9,10 @@ import {
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@ -37,6 +38,7 @@ type TDynamicSecretLeaseServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
|
||||
@ -49,7 +51,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
kmsService
|
||||
}: TDynamicSecretLeaseServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
environmentSlug,
|
||||
@ -67,14 +70,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -104,13 +107,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
@ -147,19 +151,24 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan?.dynamicSecret) {
|
||||
throw new BadRequestError({
|
||||
@ -181,12 +190,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
@ -227,19 +231,24 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
@ -253,12 +262,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const revokeResponse = await selectedProvider
|
||||
@ -297,13 +301,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -339,13 +344,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
|
@ -1,15 +1,16 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@ -42,6 +43,7 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
|
||||
@ -54,7 +56,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
dynamicSecretProviders,
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
kmsService
|
||||
}: TDynamicSecretServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
path,
|
||||
@ -73,14 +76,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.CreateRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -108,16 +111,15 @@ export const dynamicSecretServiceFactory = ({
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.create({
|
||||
type: provider.type,
|
||||
version: 1,
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
folderId: folder.id,
|
||||
@ -145,14 +147,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -180,15 +182,15 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (existingDynamicSecret)
|
||||
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
|
||||
}
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||
@ -196,13 +198,8 @@ export const dynamicSecretServiceFactory = ({
|
||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
|
||||
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(updatedInput)) }).cipherTextBlob,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
name: newName ?? name,
|
||||
@ -229,14 +226,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -290,13 +287,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -314,13 +312,13 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (!dynamicSecretCfg) {
|
||||
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
|
||||
}
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
|
||||
@ -340,13 +338,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
isInternal
|
||||
}: TListDynamicSecretsMultiEnvDTO) => {
|
||||
if (!isInternal) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
@ -383,13 +382,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
search,
|
||||
projectId
|
||||
}: TGetDynamicSecretsCountDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -431,13 +431,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
projectId = project.id;
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -462,13 +463,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
{ folderMappings, filters, projectId }: TListDynamicSecretsByFolderMappingsDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor: actor.type,
|
||||
actorId: actor.id,
|
||||
projectId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
actorAuthMethod: actor.authMethod,
|
||||
actorOrgId: actor.orgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) =>
|
||||
permission.can(
|
||||
@ -507,13 +509,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
...params
|
||||
}: TListDynamicSecretsMultiEnvDTO) => {
|
||||
if (!isInternal) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
|
@ -34,6 +34,8 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const isMsSQLClient = providerInputs.client === SqlProviders.MsSQL;
|
||||
|
||||
const db = knex({
|
||||
client: providerInputs.client,
|
||||
connection: {
|
||||
@ -43,7 +45,16 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
ssl,
|
||||
pool: { min: 0, max: 1 }
|
||||
pool: { min: 0, max: 1 },
|
||||
// @ts-expect-error this is because of knexjs type signature issue. This is directly passed to driver
|
||||
// https://github.com/knex/knex/blob/b6507a7129d2b9fafebf5f831494431e64c6a8a0/lib/dialects/mssql/index.js#L66
|
||||
// https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19
|
||||
options: isMsSQLClient
|
||||
? {
|
||||
trustServerCertificate: !providerInputs.ca,
|
||||
cryptoCredentialsDetails: providerInputs.ca ? { ca: providerInputs.ca } : {}
|
||||
}
|
||||
: undefined
|
||||
},
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
});
|
||||
|
@ -1,7 +1,9 @@
|
||||
import { KMSServiceException } from "@aws-sdk/client-kms";
|
||||
import { STSServiceException } from "@aws-sdk/client-sts";
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
@ -71,7 +73,16 @@ export const externalKmsServiceFactory = ({
|
||||
switch (provider.type) {
|
||||
case KmsProviders.Aws:
|
||||
{
|
||||
const externalKms = await AwsKmsProviderFactory({ inputs: provider.inputs });
|
||||
const externalKms = await AwsKmsProviderFactory({ inputs: provider.inputs }).catch((error) => {
|
||||
if (error instanceof STSServiceException || error instanceof KMSServiceException) {
|
||||
throw new InternalServerError({
|
||||
message: error.message ? `AWS error: ${error.message}` : ""
|
||||
});
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
|
||||
// if missing kms key this generate a new kms key id and returns new provider input
|
||||
const newProviderInput = await externalKms.generateInputKmsKey();
|
||||
sanitizedProviderInput = JSON.stringify(newProviderInput);
|
||||
|
@ -111,7 +111,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
if (search) {
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike '%${search}%'`);
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike ?`, [`%${search}%`]);
|
||||
} else if (username) {
|
||||
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
|
||||
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
@ -32,7 +33,7 @@ type TGroupServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findUserEncKeyByUserIdsBatch" | "transaction" | "findOne">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
"create" | "findOne" | "update" | "delete" | "findAllGroupPossibleMembers" | "findById"
|
||||
"create" | "findOne" | "update" | "delete" | "findAllGroupPossibleMembers" | "findById" | "transaction"
|
||||
>;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "countAllOrgMembers">;
|
||||
@ -45,6 +46,7 @@ type TGroupServiceFactoryDep = {
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne">;
|
||||
};
|
||||
|
||||
export type TGroupServiceFactory = ReturnType<typeof groupServiceFactory>;
|
||||
@ -59,7 +61,8 @@ export const groupServiceFactory = ({
|
||||
projectBotDAL,
|
||||
projectKeyDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
oidcConfigDAL
|
||||
}: TGroupServiceFactoryDep) => {
|
||||
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
@ -88,12 +91,26 @@ export const groupServiceFactory = ({
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
|
||||
|
||||
const group = await groupDAL.create({
|
||||
name,
|
||||
slug: slug || slugify(`${name}-${alphaNumericNanoId(4)}`),
|
||||
orgId: actorOrgId,
|
||||
role: isCustomRole ? OrgMembershipRole.Custom : role,
|
||||
roleId: customRole?.id
|
||||
const group = await groupDAL.transaction(async (tx) => {
|
||||
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
|
||||
if (existingGroup) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to create group with name '${name}'. Group with the same name already exists`
|
||||
});
|
||||
}
|
||||
|
||||
const newGroup = await groupDAL.create(
|
||||
{
|
||||
name,
|
||||
slug: slug || slugify(`${name}-${alphaNumericNanoId(4)}`),
|
||||
orgId: actorOrgId,
|
||||
role: isCustomRole ? OrgMembershipRole.Custom : role,
|
||||
roleId: customRole?.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return newGroup;
|
||||
});
|
||||
|
||||
return group;
|
||||
@ -145,21 +162,36 @@ export const groupServiceFactory = ({
|
||||
if (isCustomRole) customRole = customOrgRole;
|
||||
}
|
||||
|
||||
const [updatedGroup] = await groupDAL.update(
|
||||
{
|
||||
id: group.id
|
||||
},
|
||||
{
|
||||
name,
|
||||
slug: slug ? slugify(slug) : undefined,
|
||||
...(role
|
||||
? {
|
||||
role: customRole ? OrgMembershipRole.Custom : role,
|
||||
roleId: customRole?.id ?? null
|
||||
}
|
||||
: {})
|
||||
const updatedGroup = await groupDAL.transaction(async (tx) => {
|
||||
if (name) {
|
||||
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
|
||||
|
||||
if (existingGroup && existingGroup.id !== id) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to update group with name '${name}'. Group with the same name already exists`
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const [updated] = await groupDAL.update(
|
||||
{
|
||||
id: group.id
|
||||
},
|
||||
{
|
||||
name,
|
||||
slug: slug ? slugify(slug) : undefined,
|
||||
...(role
|
||||
? {
|
||||
role: customRole ? OrgMembershipRole.Custom : role,
|
||||
roleId: customRole?.id ?? null
|
||||
}
|
||||
: {})
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return updated;
|
||||
});
|
||||
|
||||
return updatedGroup;
|
||||
};
|
||||
@ -282,6 +314,18 @@ export const groupServiceFactory = ({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const oidcConfig = await oidcConfigDAL.findOne({
|
||||
orgId: group.orgId,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (oidcConfig?.manageGroupMemberships) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot add user to group: OIDC group membership mapping is enabled - user must be assigned to this group in your OIDC provider."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
@ -337,6 +381,18 @@ export const groupServiceFactory = ({
|
||||
message: `Failed to find group with ID ${id}`
|
||||
});
|
||||
|
||||
const oidcConfig = await oidcConfigDAL.findOne({
|
||||
orgId: group.orgId,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
if (oidcConfig?.manageGroupMemberships) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot remove user from group: OIDC group membership mapping is enabled - user must be removed from this group in your OIDC provider."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
|
@ -1,25 +1,23 @@
|
||||
import * as pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmModule } from "./hsm-types";
|
||||
|
||||
export const initializeHsmModule = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured" | "HSM_LIB_PATH">) => {
|
||||
// Create a new instance of PKCS11 module
|
||||
const pkcs11 = new pkcs11js.PKCS11();
|
||||
let isInitialized = false;
|
||||
|
||||
const initialize = () => {
|
||||
if (!appCfg.isHsmConfigured) {
|
||||
if (!envConfig.isHsmConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Load the PKCS#11 module
|
||||
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
||||
pkcs11.load(envConfig.HSM_LIB_PATH!);
|
||||
|
||||
// Initialize the module
|
||||
pkcs11.C_Initialize();
|
||||
|
@ -1,12 +1,13 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||
|
||||
type THsmServiceFactoryDep = {
|
||||
hsmModule: HsmModule;
|
||||
envConfig: Pick<TEnvConfig, "HSM_PIN" | "HSM_SLOT" | "HSM_LIB_PATH" | "HSM_KEY_LABEL" | "isHsmConfigured">;
|
||||
};
|
||||
|
||||
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||
@ -15,9 +16,7 @@ type SyncOrAsync<T> = T | Promise<T>;
|
||||
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envConfig }: THsmServiceFactoryDep) => {
|
||||
// Constants for buffer structures
|
||||
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||
const BLOCK_SIZE = 16;
|
||||
@ -63,11 +62,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
throw new Error("No slots available");
|
||||
}
|
||||
|
||||
if (appCfg.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
||||
if (envConfig.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${envConfig.HSM_SLOT} not found or not initialized`);
|
||||
}
|
||||
|
||||
const slotId = slots[appCfg.HSM_SLOT];
|
||||
const slotId = slots[envConfig.HSM_SLOT];
|
||||
|
||||
const startTime = Date.now();
|
||||
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||
@ -78,7 +77,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
|
||||
// Login
|
||||
try {
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, envConfig.HSM_PIN);
|
||||
logger.info("HSM: Successfully authenticated");
|
||||
break;
|
||||
} catch (error) {
|
||||
@ -86,7 +85,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${envConfig.HSM_SLOT}`);
|
||||
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||
}
|
||||
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||
@ -133,7 +132,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
||||
const label = type === HsmKeyType.HMAC ? `${envConfig.HSM_KEY_LABEL}_HMAC` : envConfig.HSM_KEY_LABEL;
|
||||
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||
|
||||
const template = [
|
||||
@ -360,7 +359,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const isActive = async () => {
|
||||
if (!isInitialized || !appCfg.isHsmConfigured) {
|
||||
if (!isInitialized || !envConfig.isHsmConfigured) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -372,11 +371,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||
}
|
||||
|
||||
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
return envConfig.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
};
|
||||
|
||||
const startService = async () => {
|
||||
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
if (!envConfig.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
|
||||
try {
|
||||
await $withSession(async (sessionHandle) => {
|
||||
@ -395,7 +394,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_LABEL, value: envConfig.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||
...genericAttributes
|
||||
@ -410,7 +409,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
keyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
||||
logger.info(`HSM: Master key created successfully with label: ${envConfig.HSM_KEY_LABEL}`);
|
||||
}
|
||||
|
||||
// Check if HMAC key exists, create if not
|
||||
@ -419,7 +418,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${envConfig.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||
...genericAttributes
|
||||
@ -434,7 +433,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
hmacKeyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${envConfig.HSM_KEY_LABEL}_HMAC`);
|
||||
}
|
||||
|
||||
// Get slot info to check supported mechanisms
|
||||
|
@ -2,10 +2,10 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -55,24 +55,26 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
identityProjectMembership.projectId,
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.IDENTITY,
|
||||
actorId: identityId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
@ -135,24 +137,26 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
identityProjectMembership.projectId,
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.IDENTITY,
|
||||
actorId: identityProjectMembership.identityId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
@ -215,24 +219,26 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
identityProjectMembership.projectId,
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.IDENTITY,
|
||||
actorId: identityProjectMembership.identityId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
@ -260,13 +266,14 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
@ -294,13 +301,14 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
@ -329,13 +337,14 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
const identityProjectMembership = await identityProjectDAL.findOne({ identityId, projectId });
|
||||
if (!identityProjectMembership)
|
||||
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
identityProjectMembership.projectId,
|
||||
projectId: identityProjectMembership.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user