mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-17 19:37:38 +00:00
Compare commits
696 Commits
daniel/sec
...
test-cli-u
Author | SHA1 | Date | |
---|---|---|---|
e35135e4e3 | |||
c95dd69167 | |||
a50b8120fd | |||
f1ee53d417 | |||
229ad79f49 | |||
d7dbd01ecf | |||
026fd21fd4 | |||
9b9c1a52b3 | |||
98aa424e2e | |||
2cd5df1ab3 | |||
e0d863e06e | |||
d991af557b | |||
ae54d04357 | |||
fa590ba697 | |||
1da2896bb0 | |||
423a2f38ea | |||
db0a72f7b4 | |||
4a202d180a | |||
33103f1e95 | |||
ce8a4bc50e | |||
141a821091 | |||
b3dd5410d7 | |||
74574c6c29 | |||
4f32756951 | |||
961fe09a6e | |||
5ab853d3e6 | |||
0e073cc9fc | |||
433b1a49f0 | |||
b0b255461d | |||
c2f2dc1e72 | |||
0ee1b425df | |||
46e72e9fba | |||
06fc4e955d | |||
ece294c483 | |||
2e40ee76d0 | |||
9a712b5c85 | |||
1ec427053b | |||
6c636415bb | |||
9b083a5dfb | |||
e323cb4630 | |||
e87a1bd402 | |||
3b09173bb1 | |||
2a8e159f51 | |||
cd333a7923 | |||
e11fdf8f3a | |||
4725108319 | |||
715441908b | |||
3f190426fe | |||
954e94cd87 | |||
9dd2379fb3 | |||
6bf9ab5937 | |||
ee536717c0 | |||
a0cb4889ca | |||
271a8de4c0 | |||
b18f7b957d | |||
e6349474aa | |||
d6da108e32 | |||
577c81be65 | |||
93baf9728b | |||
064322936b | |||
7634fc94a6 | |||
ecd39abdc1 | |||
d8313a161e | |||
d82b06c72b | |||
b8e79f20dc | |||
0088217fa9 | |||
13485cecbb | |||
85e9952a4c | |||
3d072c2f48 | |||
ebcf4761b6 | |||
82b828c10e | |||
bf20556b17 | |||
dcde10a401 | |||
e0373cf416 | |||
ea038f26df | |||
f95c446651 | |||
59ab4de24a | |||
d2295c47f5 | |||
47dc4f0c47 | |||
4b0e0d4de5 | |||
6128301622 | |||
766c1242fd | |||
8c318f51e4 | |||
be51e358fc | |||
e8dd8a908d | |||
fd20cb1e38 | |||
a07f168c36 | |||
530045aaf2 | |||
cd4f2cccf8 | |||
ff4ff0588f | |||
993024662a | |||
a03c152abf | |||
45d2cc05b3 | |||
74200bf860 | |||
c59cecdb45 | |||
483f26d863 | |||
da094383b8 | |||
fce772bc20 | |||
5e1a7cfb6e | |||
323d5d2d27 | |||
dd79d0385a | |||
0a28ac4a7d | |||
196c616986 | |||
bf6060d353 | |||
438e2dfa07 | |||
3ad50a4386 | |||
ed94e7a8e7 | |||
09ad1cce96 | |||
d7f9cff43e | |||
5d8d75ac93 | |||
db5a85d3ca | |||
a1a931d3dd | |||
e639f5ee49 | |||
a2c9c4529b | |||
0a338ee539 | |||
2a7679005e | |||
838d132898 | |||
b0cacc5a4a | |||
68d07f0136 | |||
10a3c7015e | |||
03b0334fa0 | |||
10a3658328 | |||
e8ece6be3f | |||
c765c20539 | |||
5cdabd3e61 | |||
2f4c42482d | |||
75ca093b24 | |||
6c0889f117 | |||
8f49d45309 | |||
a4a162ab65 | |||
5b11232325 | |||
8b53f63d69 | |||
6c0975554d | |||
042a472f59 | |||
697543e4a2 | |||
53c015988d | |||
73b5ca5b4f | |||
a1318d54b1 | |||
44afe2fc1d | |||
956d0f6c5d | |||
c376add0fa | |||
fb0b6b00dd | |||
977c02357b | |||
d4125443a3 | |||
8e3ac6ca29 | |||
a5f198a3d5 | |||
fa9bdd21ff | |||
accf42de2e | |||
2f060407ab | |||
c516ce8196 | |||
95ccd35f61 | |||
348a412cda | |||
c5a5ad93a8 | |||
d55ddcd577 | |||
67a0e5ae68 | |||
37cbb4c55b | |||
d5741b4a72 | |||
506b56b657 | |||
351304fda6 | |||
2af515c486 | |||
cdfec32195 | |||
8d6bd5d537 | |||
4654a17e5f | |||
b6d67df966 | |||
3897f0ece5 | |||
7719ebb112 | |||
f03f02786d | |||
c60840e979 | |||
6fe7a5f069 | |||
14b7d763ad | |||
bc1b7ddcc5 | |||
dff729ffc1 | |||
786f5d9e09 | |||
ef6abedfe0 | |||
9a5633fda4 | |||
f8a96576c9 | |||
dd2fee3eca | |||
802cf79af5 | |||
88d3d62894 | |||
ac40dcc2c6 | |||
6482e88dfc | |||
a01249e903 | |||
7b3e1f12bd | |||
031c8d67b1 | |||
778b0d4368 | |||
95b57e144d | |||
1d26269993 | |||
ffee1701fc | |||
871be7132a | |||
5fe3c9868f | |||
c936aa7157 | |||
05005f4258 | |||
c179d7e5ae | |||
c8553fba2b | |||
26a9d68823 | |||
af5b3aa171 | |||
d4728e31c1 | |||
f9a5b46365 | |||
d65deab0af | |||
61591742e4 | |||
54b13a9daa | |||
4adf0aa1e2 | |||
3d3ee746cf | |||
07e4358d00 | |||
962dd5d919 | |||
52bd1afb0a | |||
d918dd8967 | |||
e2e0f6a346 | |||
326cb99732 | |||
341b63c61c | |||
81b026865c | |||
f50c72c033 | |||
e1046e2d56 | |||
ed3fa8add1 | |||
d123283849 | |||
d7fd44b845 | |||
cefcd872ee | |||
3ffee049ee | |||
9924ef3a71 | |||
524462d7bc | |||
4955e2064d | |||
6ebc766308 | |||
6f9a66a0d7 | |||
cca7b68dd0 | |||
ab39f13e03 | |||
351e573fea | |||
f1bc26e2e5 | |||
8aeb607f6e | |||
e530b7a788 | |||
bf61090b5a | |||
106b068a51 | |||
6f0a97a2fa | |||
5d604be091 | |||
905cf47d90 | |||
2c40d316f4 | |||
32521523c1 | |||
3a2e8939b1 | |||
e5947fcab9 | |||
a6d9c74054 | |||
07bd527cc1 | |||
f7cf2bb78f | |||
ff24e76a32 | |||
6ac802b6c9 | |||
ff92e00503 | |||
b20474c505 | |||
e19ffc91c6 | |||
61eb66efca | |||
fa7843983f | |||
2d5b7afda7 | |||
15999daa24 | |||
82520a7f0a | |||
af236ba892 | |||
ec31211bca | |||
0ecf6044d9 | |||
6c512f47bf | |||
c4b7d4618d | |||
003f2b003d | |||
33b135f02c | |||
eed7cc6408 | |||
440ada464f | |||
4f08801ae8 | |||
6b7abbbeb9 | |||
cfe2bbe125 | |||
29dcf229d8 | |||
3944e20a5b | |||
747b5ec68d | |||
2079913511 | |||
ed0dc324a3 | |||
1c13ed54af | |||
049f0f56a0 | |||
9ad725fd6c | |||
9a954c8f15 | |||
81a64d081c | |||
43804f62e6 | |||
67089af17a | |||
8abfea0409 | |||
ce4adccc80 | |||
dcd3b5df56 | |||
d83240749f | |||
36144d8c42 | |||
f6425480ca | |||
a3e9392a2f | |||
633a2ae985 | |||
4478dc8659 | |||
510ddf2b1a | |||
5363f8c6ff | |||
7d9de6acba | |||
bac944133a | |||
f059d65b45 | |||
c487b2b34a | |||
015a193330 | |||
8e20531b40 | |||
d91add2e7b | |||
8ead2aa774 | |||
1b2128e3cc | |||
6d72524896 | |||
1ec11d5963 | |||
ad6f285b59 | |||
d4842dd273 | |||
78f83cb478 | |||
e67a8f9c05 | |||
c8a871de7c | |||
64c0951df3 | |||
c185414a3c | |||
f9695741f1 | |||
b7c4b11260 | |||
ad110f490c | |||
81f3613393 | |||
a7fe79c046 | |||
ed6306747a | |||
64569ab44b | |||
9eb89bb46d | |||
c4da1ce32d | |||
2d1d6f5ce8 | |||
add97c9b38 | |||
768ba4f4dc | |||
18c32d872c | |||
1fd40ab6ab | |||
9d258f57ce | |||
45ccbaf4c9 | |||
6ef358b172 | |||
838c1af448 | |||
8de7261c9a | |||
67b1b79fe3 | |||
31477f4d2b | |||
f200372d74 | |||
f955b68519 | |||
9269b63943 | |||
8f96653273 | |||
7dffc08eba | |||
126b0ce7e7 | |||
0b71f7f297 | |||
e53439d586 | |||
c86e508817 | |||
6426b85c1e | |||
cc7d0d752f | |||
b89212a0c9 | |||
d4c69d8e5d | |||
3d6da1e548 | |||
7e46fe8148 | |||
3756a1901d | |||
9c8adf75ec | |||
f461eaa432 | |||
a1fbc140ee | |||
ea27870ce3 | |||
48943b4d78 | |||
fd1afc2cbe | |||
6905029455 | |||
3741201b87 | |||
e89fb33981 | |||
2ef77c737a | |||
0f31fa3128 | |||
1da5a5f417 | |||
5ebf142e3e | |||
94d7d2b029 | |||
e39d1a0530 | |||
4c5f3859d6 | |||
16866d46bf | |||
4f4764dfcd | |||
bdceea4c91 | |||
32fa6866e4 | |||
b4faef797c | |||
08732cab62 | |||
81d5f639ae | |||
25b83d4b86 | |||
155e59e571 | |||
8fbd3f2fce | |||
a500f00a49 | |||
63d325c208 | |||
2149c0a9d1 | |||
430f8458cb | |||
6842f7aa8b | |||
bdb7cb4cbf | |||
ad207786e2 | |||
ace8c37c25 | |||
f15e61dbd9 | |||
4c82408b51 | |||
8146dcef16 | |||
2e90addbc5 | |||
427201a634 | |||
0b55ac141c | |||
aecfa268ae | |||
fdfc020efc | |||
62aa80a104 | |||
cf9d8035bd | |||
d0c9f1ca53 | |||
2ecc7424d9 | |||
c04b97c689 | |||
7600a86dfc | |||
8924eaf251 | |||
82e9504285 | |||
c4e10df754 | |||
ce60e96008 | |||
54d002d718 | |||
dc2358bbaa | |||
930b59cb4f | |||
ec363a5ad4 | |||
c0de4ae3ee | |||
de7e92ccfc | |||
fc651f6645 | |||
522d81ae1a | |||
ef22b39421 | |||
02153ffb32 | |||
1d14cdf334 | |||
39b323dd9c | |||
b0b55344ce | |||
cc2c4b16bf | |||
d9d62384e7 | |||
76f34501dc | |||
7415bb93b8 | |||
7a1c08a7f2 | |||
568aadef75 | |||
84f9eb5f9f | |||
87ac723fcb | |||
a6dab47552 | |||
79d8a9debb | |||
08bac83bcc | |||
46c90f03f0 | |||
d7722f7587 | |||
a42bcb3393 | |||
192dba04a5 | |||
0cc3240956 | |||
667580546b | |||
9fd662b7f7 | |||
a56cbbc02f | |||
dc30465afb | |||
f1caab2d00 | |||
1d186b1950 | |||
9cf5908cc1 | |||
f1b6c3764f | |||
4e6c860c69 | |||
eda9ed257e | |||
38cf43176e | |||
f5c7943f2f | |||
3c59f7f350 | |||
84cc7bcd6c | |||
159c27ac67 | |||
de5a432745 | |||
387780aa94 | |||
3887ce800b | |||
1a06b3e1f5 | |||
5f0dd31334 | |||
7e14c58931 | |||
627e17b3ae | |||
39b7a4a111 | |||
e7c512999e | |||
c19016e6e6 | |||
20477ce2b0 | |||
e04b2220be | |||
edf6a37fe5 | |||
f5749e326a | |||
75e0a68b68 | |||
71b8e3dbce | |||
4dc56033b1 | |||
ed37b99756 | |||
6fa41a609b | |||
e46f10292c | |||
acb22cdf36 | |||
c9da8477c8 | |||
5e4b478b74 | |||
765be2d99d | |||
719a18c218 | |||
1b05b7cf2c | |||
16d3bbb67a | |||
872a3fe48d | |||
c7414e00f9 | |||
ad1dd55b8b | |||
497761a0e5 | |||
483fb458dd | |||
17cf602a65 | |||
23f6f5dfd4 | |||
b9b76579ac | |||
761965696b | |||
ace2500885 | |||
4eff7d8ea5 | |||
c4512ae111 | |||
78c349c09a | |||
09df440613 | |||
a8fc0e540a | |||
46ce46b5a0 | |||
dc88115d43 | |||
955657e172 | |||
f1ba64aa66 | |||
d74197aeb4 | |||
97567d06d4 | |||
3986df8e8a | |||
3fcd84b592 | |||
29e39b558b | |||
9458c8b04f | |||
3b95c5d859 | |||
de8f315211 | |||
9960d58e1b | |||
0057404562 | |||
47ca1b3011 | |||
716cd090c4 | |||
e870bb3ade | |||
98c9e98082 | |||
a814f459ab | |||
dcc3509a33 | |||
9dbe45a730 | |||
66817a40db | |||
20bd2ca71c | |||
004a8b71a2 | |||
7875bcc067 | |||
9c702b27b2 | |||
db8a4bd26d | |||
f0fce3086e | |||
a9e7db6fc0 | |||
2bd681d58f | |||
51fef3ce60 | |||
df9e7bf6ee | |||
04479bb70a | |||
cdc90411e5 | |||
dcb05a3093 | |||
b055cda64d | |||
f68602280e | |||
f9483afe95 | |||
d742534f6a | |||
99eb8eb8ed | |||
1dea024880 | |||
699e03c1a9 | |||
f6372249b4 | |||
2b7e1b465f | |||
0f42fcd688 | |||
b7b294f024 | |||
2e02f8bea8 | |||
8203158c63 | |||
ada04ed4fc | |||
cc9cc70125 | |||
045debeaf3 | |||
3fb8ad2fac | |||
795d9e4413 | |||
67f2e4671a | |||
cbe3acde74 | |||
de480b5771 | |||
07b93c5cec | |||
77431b4719 | |||
50610945be | |||
57f54440d6 | |||
9711e73a06 | |||
214f837041 | |||
58ebebb162 | |||
65ddddb6de | |||
a55b26164a | |||
6cd448b8a5 | |||
c48c9ae628 | |||
7003ad608a | |||
104edca6f1 | |||
75345d91c0 | |||
abc2ffca57 | |||
b7640f2d03 | |||
2ee4d68fd0 | |||
3ca931acf1 | |||
a3fb7c9f00 | |||
7f6715643d | |||
8e311658d4 | |||
5ed164de24 | |||
9116acd37b | |||
0513307d98 | |||
28c2f1874e | |||
596378208e | |||
efc3b6d474 | |||
07e1d1b130 | |||
7f76779124 | |||
30bcf1f204 | |||
706feafbf2 | |||
fc4e3f1f72 | |||
dcd5f20325 | |||
58f3e116a3 | |||
7bc5aad8ec | |||
a16dc3aef6 | |||
da7746c639 | |||
cd5b6da541 | |||
2dda7180a9 | |||
30ccfbfc8e | |||
aa76924ee6 | |||
d8f679e72d | |||
bf6cfbac7a | |||
8e82813894 | |||
df21a1fb81 | |||
bdbb6346cb | |||
ea9da6d2a8 | |||
3c2c70912f | |||
b607429b99 | |||
16c1516979 | |||
f5dbbaf1fd | |||
2a292455ef | |||
4d040706a9 | |||
5183f76397 | |||
4b3efb43b0 | |||
96046726b2 | |||
a86a951acc | |||
5e70860160 | |||
abbd427ee2 | |||
8fd5fdbc6a | |||
77e1ccc8d7 | |||
711cc438f6 | |||
8447190bf8 | |||
12b447425b | |||
9cb1a31287 | |||
b00413817d | |||
2a8bd74e88 | |||
f28f4f7561 | |||
f0b05c683b | |||
3e8f02a4f9 | |||
50ee60a3ea | |||
21bdecdf2a | |||
bf09461416 | |||
1ff615913c | |||
281cedf1a2 | |||
a8d847f139 | |||
2a0c0590f1 | |||
2e6d525d27 | |||
7fd4249d00 | |||
90cfc44592 | |||
8c403780c2 | |||
b69c091f2f | |||
4a66395ce6 | |||
8c18753e3f | |||
85c5d69c36 | |||
94fe577046 | |||
a0a579834c | |||
b5575f4c20 | |||
f98f212ecf | |||
b331a4a708 | |||
e351a16b5a | |||
2cfca823f2 | |||
a8398a7009 | |||
8c054cedfc | |||
24d4f8100c | |||
08f23e2d3c | |||
d1ad605ac4 | |||
9dd5857ff5 | |||
babbacdc96 | |||
76427f43f7 | |||
3badcea95b | |||
1a4c0fe8d9 | |||
04f6864abc | |||
fcbe0f59d2 | |||
e95b6fdeaa | |||
5391bcd3b2 | |||
48fd9e2a56 | |||
7b5926d865 | |||
034123bcdf | |||
f3786788fd | |||
c406f6d78d | |||
eb66295dd4 | |||
798215e84c | |||
53f7491441 | |||
53f6ab118b | |||
0f5a1b13a6 | |||
5c606fe45f | |||
bbf60169eb | |||
e004be22e3 | |||
016cb4a7ba | |||
9bfc2a5dd2 | |||
72dbef97fb | |||
f376eaae13 | |||
026f883d21 | |||
e42f860261 | |||
08ec8c9b73 | |||
1512d4f496 | |||
9f7b42ad91 | |||
3045477c32 | |||
be4adc2759 | |||
4eba80905a | |||
b023bc7442 | |||
a0029ab469 | |||
53605c3880 | |||
e5bca5b5df | |||
4091bc19e9 | |||
23bd048bb9 | |||
17a4674821 | |||
ec9631107d | |||
3fa450b9a7 | |||
3b9c62c366 | |||
cb3d171d48 | |||
c29841fbcf | |||
fcccf1bd8d | |||
4382825162 | |||
f80ef1dcc8 | |||
7abf3e3642 | |||
82ef35bd08 | |||
4eb668b5a5 | |||
18edea9f26 | |||
68646bcdf8 | |||
9989ceb6d1 | |||
95d7ba5f22 | |||
ce4c5d8ea1 | |||
c54eafc128 | |||
757942aefc | |||
1d57629036 | |||
8061066e27 | |||
c993b1bbe3 | |||
2cbf33ac14 | |||
943d0ddb69 |
@ -112,4 +112,11 @@ INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
|
3
.envrc
Normal file
3
.envrc
Normal file
@ -0,0 +1,3 @@
|
||||
# Learn more at https://direnv.net
|
||||
# We instruct direnv to use our Nix flake for a consistent development environment.
|
||||
use flake
|
@ -32,10 +32,23 @@ jobs:
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
@ -43,35 +56,48 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
go-version: "1.21.5"
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
run: go install github.com/oasdiff/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
102
.github/workflows/codeql.yml
vendored
Normal file
102
.github/workflows/codeql.yml
vendored
Normal file
@ -0,0 +1,102 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL Advanced"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main", "development" ]
|
||||
pull_request:
|
||||
branches: [ "main", "development" ]
|
||||
schedule:
|
||||
- cron: '33 7 * * 3'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: actions
|
||||
build-mode: none
|
||||
- language: go
|
||||
build-mode: autobuild
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Add any setup steps before running the `github/codeql-action/init` action.
|
||||
# This includes steps like installing compilers or runtimes (`actions/setup-node`
|
||||
# or others). This is typically only required for manual builds.
|
||||
# - name: Setup runtime (example)
|
||||
# uses: actions/setup-example@v1
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
258
.github/workflows/release_build_infisical_cli.yml
vendored
258
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,131 +1,147 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
# cli-integration-tests:
|
||||
# name: Run tests before deployment
|
||||
# uses: ./.github/workflows/run-cli-tests.yml
|
||||
# secrets:
|
||||
# CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
# CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
# CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
# CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
# CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
# CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
# CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
# CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
# npm-release:
|
||||
# runs-on: ubuntu-latest
|
||||
# env:
|
||||
# working-directory: ./npm
|
||||
# needs:
|
||||
# - cli-integration-tests
|
||||
# - goreleaser
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
|
||||
# - name: Extract version
|
||||
# run: |
|
||||
# VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
# echo "Version extracted: $VERSION"
|
||||
# echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
# - name: Print version
|
||||
# run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
# - name: Setup Node
|
||||
# uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
# with:
|
||||
# node-version: 20
|
||||
# cache: "npm"
|
||||
# cache-dependency-path: ./npm/package-lock.json
|
||||
# - name: Install dependencies
|
||||
# working-directory: ${{ env.working-directory }}
|
||||
# run: npm install --ignore-scripts
|
||||
|
||||
# - name: Set NPM version
|
||||
# working-directory: ${{ env.working-directory }}
|
||||
# run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
# - name: Setup NPM
|
||||
# working-directory: ${{ env.working-directory }}
|
||||
# run: |
|
||||
# echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
# echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
# echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
# echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
# env:
|
||||
# NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
# - name: Pack NPM
|
||||
# working-directory: ${{ env.working-directory }}
|
||||
# run: npm pack
|
||||
|
||||
# - name: Publish NPM
|
||||
# working-directory: ${{ env.working-directory }}
|
||||
# run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
# env:
|
||||
# NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
# NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
|
||||
with:
|
||||
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
- name: Install deb-s3
|
||||
run: gem install deb-s3
|
||||
- name: Configure GPG Key
|
||||
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --passphrase "$GPG_SIGNING_KEY_PASSPHRASE" --import
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
|
||||
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
|
143
.github/workflows/release_docker_k8_operator.yaml
vendored
143
.github/workflows/release_docker_k8_operator.yaml
vendored
@ -1,52 +1,107 @@
|
||||
name: Release image + Helm chart K8s Operator
|
||||
name: Release K8 Operator Docker Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
release-image:
|
||||
name: Generate Helm Chart PR
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
# Dependency for helm generation
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# Dependency for helm generation
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
# Install binaries for helm generation
|
||||
- name: Install dependencies
|
||||
working-directory: k8-operator
|
||||
run: |
|
||||
make helmify
|
||||
make kustomize
|
||||
make controller-gen
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
echo "Current git status:"
|
||||
git status
|
||||
echo ""
|
||||
echo "Modified files:"
|
||||
git diff --name-only
|
||||
|
||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
||||
if [ -z "$(git diff --name-only)" ]; then
|
||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Helm Chart PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
||||
delete-branch: true
|
||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
body: |
|
||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
||||
|
||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
||||
base: main
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
8
.github/workflows/run-backend-tests.yml
vendored
8
.github/workflows/run-backend-tests.yml
vendored
@ -34,7 +34,10 @@ jobs:
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
- name: Run unit test
|
||||
run: npm run test:unit
|
||||
working-directory: backend
|
||||
- name: Run integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
@ -44,4 +47,5 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
|
||||
|
254
.goreleaser.yaml
254
.goreleaser.yaml
@ -16,23 +16,23 @@ monorepo:
|
||||
dir: cli
|
||||
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=/home/runner/work/osxcross/target/bin/o64-clang
|
||||
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
|
||||
goos:
|
||||
- darwin
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
# - id: darwin-build
|
||||
# binary: infisical
|
||||
# ldflags:
|
||||
# - -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
# - -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
# flags:
|
||||
# - -trimpath
|
||||
# env:
|
||||
# - CGO_ENABLED=1
|
||||
# - CC=/home/runner/work/osxcross/target/bin/o64-clang
|
||||
# - CXX=/home/runner/work/osxcross/target/bin/o64-clang++
|
||||
# goos:
|
||||
# - darwin
|
||||
# ignore:
|
||||
# - goos: darwin
|
||||
# goarch: "386"
|
||||
# dir: ./cli
|
||||
|
||||
- id: all-other-builds
|
||||
env:
|
||||
@ -44,11 +44,11 @@ builds:
|
||||
flags:
|
||||
- -trimpath
|
||||
goos:
|
||||
- freebsd
|
||||
# - freebsd
|
||||
- linux
|
||||
- netbsd
|
||||
- openbsd
|
||||
- windows
|
||||
# - netbsd
|
||||
# - openbsd
|
||||
# - windows
|
||||
goarch:
|
||||
- "386"
|
||||
- amd64
|
||||
@ -75,8 +75,10 @@ archives:
|
||||
- ../completions/*
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: "replace"
|
||||
# replace_existing_draft: true
|
||||
# mode: "replace"
|
||||
disable: true
|
||||
skip_upload: true
|
||||
|
||||
checksum:
|
||||
name_template: "checksums.txt"
|
||||
@ -91,39 +93,39 @@ snapshot:
|
||||
# dir: "{{ dir .ArtifactPath }}"
|
||||
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
|
||||
|
||||
brews:
|
||||
- name: infisical
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
- name: "infisical@{{.Version}}"
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
# brews:
|
||||
# - name: infisical
|
||||
# tap:
|
||||
# owner: Infisical
|
||||
# name: homebrew-get-cli
|
||||
# commit_author:
|
||||
# name: "Infisical"
|
||||
# email: ai@infisical.com
|
||||
# folder: Formula
|
||||
# homepage: "https://infisical.com"
|
||||
# description: "The official Infisical CLI"
|
||||
# install: |-
|
||||
# bin.install "infisical"
|
||||
# bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
# zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
# fish_completion.install "completions/infisical.fish"
|
||||
# man1.install "manpages/infisical.1.gz"
|
||||
# - name: "infisical@{{.Version}}"
|
||||
# tap:
|
||||
# owner: Infisical
|
||||
# name: homebrew-get-cli
|
||||
# commit_author:
|
||||
# name: "Infisical"
|
||||
# email: ai@infisical.com
|
||||
# folder: Formula
|
||||
# homepage: "https://infisical.com"
|
||||
# description: "The official Infisical CLI"
|
||||
# install: |-
|
||||
# bin.install "infisical"
|
||||
# bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
# zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
# fish_completion.install "completions/infisical.fish"
|
||||
# man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
@ -136,10 +138,10 @@ nfpms:
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
# - rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
# - apk
|
||||
# - archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
@ -151,73 +153,73 @@ nfpms:
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
scoop:
|
||||
bucket:
|
||||
owner: Infisical
|
||||
name: scoop-infisical
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
license: MIT
|
||||
# scoop:
|
||||
# bucket:
|
||||
# owner: Infisical
|
||||
# name: scoop-infisical
|
||||
# commit_author:
|
||||
# name: "Infisical"
|
||||
# email: ai@infisical.com
|
||||
# homepage: "https://infisical.com"
|
||||
# description: "The official Infisical CLI"
|
||||
# license: MIT
|
||||
|
||||
aurs:
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
# license
|
||||
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
|
||||
# completions
|
||||
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
# aurs:
|
||||
# - name: infisical-bin
|
||||
# homepage: "https://infisical.com"
|
||||
# description: "The official Infisical CLI"
|
||||
# maintainers:
|
||||
# - Infisical, Inc <support@infisical.com>
|
||||
# license: MIT
|
||||
# private_key: "{{ .Env.AUR_KEY }}"
|
||||
# git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
# package: |-
|
||||
# # bin
|
||||
# install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
# # license
|
||||
# install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
|
||||
# # completions
|
||||
# mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
|
||||
# mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
# mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
# install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
# install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
# install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# # man pages
|
||||
# install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
# dockers:
|
||||
# - dockerfile: docker/alpine
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# use: buildx
|
||||
# ids:
|
||||
# - all-other-builds
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
# - "infisical/cli:latest-amd64"
|
||||
# build_flag_templates:
|
||||
# - "--pull"
|
||||
# - "--platform=linux/amd64"
|
||||
# - dockerfile: docker/alpine
|
||||
# goos: linux
|
||||
# goarch: amd64
|
||||
# use: buildx
|
||||
# ids:
|
||||
# - all-other-builds
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
# - "infisical/cli:latest-arm64"
|
||||
# build_flag_templates:
|
||||
# - "--pull"
|
||||
# - "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
# docker_manifests:
|
||||
# - name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
# image_templates:
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
# - "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
# - name_template: "infisical/cli:latest"
|
||||
# image_templates:
|
||||
# - "infisical/cli:latest-amd64"
|
||||
# - "infisical/cli:latest-arm64"
|
||||
|
@ -8,3 +8,9 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
||||
docs/cli/commands/bootstrap.mdx:jwt:86
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
|
||||
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
|
||||
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
|
||||
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
|
||||
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
|
||||
|
@ -161,6 +161,9 @@ COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
|
@ -3,13 +3,10 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
@ -45,8 +42,8 @@ RUN npm run build
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
@ -56,21 +53,23 @@ USER non-root-user
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -86,18 +85,19 @@ FROM base AS backend-runner
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -109,34 +109,36 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
openssh
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
@ -154,11 +156,11 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
@ -166,6 +168,7 @@ ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
@ -1,23 +1,22 @@
|
||||
# Build stage
|
||||
FROM node:20-alpine AS build
|
||||
FROM node:20-slim AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
libc-dev
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -26,36 +25,36 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
FROM node:20-slim
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
libc-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM node:20-alpine
|
||||
FROM node:20-slim
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
@ -45,16 +45,18 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
85
backend/Dockerfile.dev.fips
Normal file
85
backend/Dockerfile.dev.fips
Normal file
@ -0,0 +1,85 @@
|
||||
FROM node:20-slim
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config \
|
||||
perl \
|
||||
wget
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
WORKDIR /openssl-build
|
||||
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
&& tar -xf openssl-3.1.2.tar.gz \
|
||||
&& cd openssl-3.1.2 \
|
||||
&& ./Configure enable-fips \
|
||||
&& make \
|
||||
&& make install_fips
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package.json
|
||||
COPY package-lock.json package-lock.json
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
ENV OPENSSL_CONF=/app/nodejs.cnf
|
||||
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
|
||||
ENV NODE_OPTIONS=--force-fips
|
||||
|
||||
CMD ["npm", "run", "dev:docker"]
|
@ -11,6 +11,7 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
schedulePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
|
@ -120,4 +120,3 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
16
backend/nodejs.cnf
Normal file
16
backend/nodejs.cnf
Normal file
@ -0,0 +1,16 @@
|
||||
nodejs_conf = nodejs_init
|
||||
|
||||
.include /usr/local/ssl/fipsmodule.cnf
|
||||
|
||||
[nodejs_init]
|
||||
providers = provider_sect
|
||||
|
||||
[provider_sect]
|
||||
default = default_sect
|
||||
fips = fips_sect
|
||||
|
||||
[default_sect]
|
||||
activate = 1
|
||||
|
||||
[algorithm_sect]
|
||||
default_properties = fips=yes
|
2405
backend/package-lock.json
generated
2405
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -40,6 +40,7 @@
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
@ -70,6 +71,7 @@
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@ -145,17 +147,18 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
@ -176,6 +179,7 @@
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -184,6 +188,7 @@
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
|
@ -1,7 +0,0 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
9
backend/src/@types/fastify.d.ts
vendored
9
backend/src/@types/fastify.d.ts
vendored
@ -33,6 +33,7 @@ import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
@ -100,6 +101,13 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
identityAuthInfo?: {
|
||||
identityId: string;
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
}
|
||||
}
|
||||
|
||||
@ -230,6 +238,7 @@ declare module "fastify" {
|
||||
kmip: TKmipServiceFactory;
|
||||
kmipOperation: TKmipOperationServiceFactory;
|
||||
gateway: TGatewayServiceFactory;
|
||||
secretRotationV2: TSecretRotationV2ServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
52
backend/src/@types/knex.d.ts
vendored
52
backend/src/@types/knex.d.ts
vendored
@ -17,6 +17,9 @@ import {
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAppConnections,
|
||||
TAppConnectionsInsert,
|
||||
TAppConnectionsUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
@ -65,6 +68,9 @@ import {
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
@ -299,6 +305,12 @@ import {
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate,
|
||||
TSecretRotationsV2,
|
||||
TSecretRotationsV2Insert,
|
||||
TSecretRotationsV2Update,
|
||||
TSecretRotationV2SecretMappings,
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
@ -320,15 +332,27 @@ import {
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate,
|
||||
TSecretsUpdate,
|
||||
TSecretsV2,
|
||||
TSecretsV2Insert,
|
||||
TSecretsV2Update,
|
||||
TSecretSyncs,
|
||||
TSecretSyncsInsert,
|
||||
TSecretSyncsUpdate,
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate,
|
||||
TSecretTags,
|
||||
TSecretTagsInsert,
|
||||
TSecretTagsUpdate,
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate,
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate,
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update,
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate,
|
||||
@ -387,24 +411,6 @@ import {
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
} from "@app/db/schemas/secret-v2-tag-junction";
|
||||
import {
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
} from "@app/db/schemas/secret-versions-v2";
|
||||
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
@ -950,5 +956,15 @@ declare module "knex/types/tables" {
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate
|
||||
>;
|
||||
[TableName.SecretRotationV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationsV2,
|
||||
TSecretRotationsV2Insert,
|
||||
TSecretRotationsV2Update
|
||||
>;
|
||||
[TableName.SecretRotationV2SecretMapping]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationV2SecretMappings,
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
|
@ -85,7 +85,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
// not setting a foreign constraint so that cascade effects are not triggered
|
||||
if (!doesGatewayColExist) {
|
||||
|
@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasAuthConsentContentCol) {
|
||||
t.text("authConsentContent");
|
||||
}
|
||||
if (!hasPageFrameContentCol) {
|
||||
t.text("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasAuthConsentContentCol) {
|
||||
t.dropColumn("authConsentContent");
|
||||
}
|
||||
if (hasPageFrameContentCol) {
|
||||
t.dropColumn("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.string("comment");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.dropColumn("comment");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,45 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (!hasSecretVersionV2UserActorId) {
|
||||
t.uuid("userActorId");
|
||||
t.foreign("userActorId").references("id").inTable(TableName.Users);
|
||||
}
|
||||
if (!hasSecretVersionV2IdentityActorId) {
|
||||
t.uuid("identityActorId");
|
||||
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
|
||||
}
|
||||
if (!hasSecretVersionV2ActorType) {
|
||||
t.string("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (hasSecretVersionV2UserActorId) {
|
||||
t.dropColumn("userActorId");
|
||||
}
|
||||
if (hasSecretVersionV2IdentityActorId) {
|
||||
t.dropColumn("identityActorId");
|
||||
}
|
||||
if (hasSecretVersionV2ActorType) {
|
||||
t.dropColumn("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
|
||||
if (!hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
if (hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("allowSecretSharingOutsideOrganization");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem");
|
||||
t.string("privilegeUpgradeInitiatedByUsername");
|
||||
t.dateTime("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
|
||||
await knex(TableName.Organization).update({
|
||||
shouldUseNewPrivilegeSystem: false
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("shouldUseNewPrivilegeSystem");
|
||||
t.dropColumn("privilegeUpgradeInitiatedByUsername");
|
||||
t.dropColumn("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (!hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.jsonb("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.dropColumn("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.specificType("adminIdentityIds", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("adminIdentityIds");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.index(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropIndex(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasReviewerJwtCol = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
if (hasReviewerJwtCol) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// we can't make it back to non nullable, it will fail
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials"))) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.boolean("isPlatformManagedCredentials").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials")) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.dropColumn("isPlatformManagedCredentials");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretRotationV2))) {
|
||||
await knex.schema.createTable(TableName.SecretRotationV2, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name", 32).notNullable();
|
||||
t.string("description");
|
||||
t.string("type").notNullable();
|
||||
t.jsonb("parameters").notNullable();
|
||||
t.jsonb("secretsMapping").notNullable();
|
||||
t.binary("encryptedGeneratedCredentials").notNullable();
|
||||
t.boolean("isAutoRotationEnabled").notNullable().defaultTo(true);
|
||||
t.integer("activeIndex").notNullable().defaultTo(0);
|
||||
t.uuid("folderId").notNullable();
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
|
||||
t.uuid("connectionId").notNullable();
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.timestamps(true, true, true);
|
||||
t.integer("rotationInterval").notNullable();
|
||||
t.jsonb("rotateAtUtc").notNullable(); // { hours: number; minutes: number }
|
||||
t.string("rotationStatus").notNullable();
|
||||
t.datetime("lastRotationAttemptedAt").notNullable();
|
||||
t.datetime("lastRotatedAt").notNullable();
|
||||
t.binary("encryptedLastRotationMessage"); // we encrypt this because it may contain sensitive info (SQL errors showing credentials)
|
||||
t.string("lastRotationJobId");
|
||||
t.datetime("nextRotationAt");
|
||||
t.boolean("isLastRotationManual").notNullable().defaultTo(true); // creation is considered a "manual" rotation
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretRotationV2);
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretRotationV2, (t) => {
|
||||
t.unique(["folderId", "name"]);
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretRotationV2SecretMapping))) {
|
||||
await knex.schema.createTable(TableName.SecretRotationV2SecretMapping, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("secretId").notNullable();
|
||||
// scott: this is deferred to block secret deletion but not prevent folder/environment/project deletion
|
||||
// ie, if rotation is being deleted as well we permit it, otherwise throw
|
||||
t.foreign("secretId").references("id").inTable(TableName.SecretV2).deferrable("deferred");
|
||||
t.uuid("rotationId").notNullable();
|
||||
t.foreign("rotationId").references("id").inTable(TableName.SecretRotationV2).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretRotationV2SecretMapping);
|
||||
await knex.schema.dropTableIfExists(TableName.SecretRotationV2);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretRotationV2);
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
|
||||
if (!hasCol) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.datetime("lastSecretModified");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
|
||||
if (hasCol) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("lastSecretModified");
|
||||
});
|
||||
}
|
||||
}
|
@ -16,7 +16,8 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||
|
@ -19,7 +19,8 @@ export const AppConnectionsSchema = z.object({
|
||||
version: z.number().default(1),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
isPlatformManagedCredentials: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
|
||||
|
@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
|
@ -26,7 +26,8 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional(),
|
||||
claimMetadataMapping: z.unknown().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
@ -3,6 +3,7 @@ export * from "./access-approval-policies-approvers";
|
||||
export * from "./access-approval-requests";
|
||||
export * from "./access-approval-requests-reviewers";
|
||||
export * from "./api-keys";
|
||||
export * from "./app-connections";
|
||||
export * from "./audit-log-streams";
|
||||
export * from "./audit-logs";
|
||||
export * from "./auth-token-sessions";
|
||||
@ -19,6 +20,7 @@ export * from "./certificate-templates";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./external-group-org-role-mappings";
|
||||
export * from "./external-kms";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
@ -97,13 +99,16 @@ export * from "./secret-references";
|
||||
export * from "./secret-references-v2";
|
||||
export * from "./secret-rotation-output-v2";
|
||||
export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotation-v2-secret-mappings";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-rotations-v2";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
export * from "./secret-sharing";
|
||||
export * from "./secret-snapshot-folders";
|
||||
export * from "./secret-snapshot-secrets";
|
||||
export * from "./secret-snapshot-secrets-v2";
|
||||
export * from "./secret-snapshots";
|
||||
export * from "./secret-syncs";
|
||||
export * from "./secret-tag-junction";
|
||||
export * from "./secret-tags";
|
||||
export * from "./secret-v2-tag-junction";
|
||||
|
@ -140,7 +140,9 @@ export enum TableName {
|
||||
KmipClient = "kmip_clients",
|
||||
KmipOrgConfig = "kmip_org_configs",
|
||||
KmipOrgServerCertificates = "kmip_org_server_certificates",
|
||||
KmipClientCertificates = "kmip_client_certificates"
|
||||
KmipClientCertificates = "kmip_client_certificates",
|
||||
SecretRotationV2 = "secret_rotations_v2",
|
||||
SecretRotationV2SecretMapping = "secret_rotation_v2_secret_mappings"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
@ -233,3 +235,8 @@ export enum ActionProjectType {
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
||||
export enum SortDirection {
|
||||
ASC = "asc",
|
||||
DESC = "desc"
|
||||
}
|
||||
|
@ -22,7 +22,11 @@ export const OrganizationsSchema = z.object({
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
selectedMfaMethod: z.string().nullable().optional(),
|
||||
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional(),
|
||||
shouldUseNewPrivilegeSystem: z.boolean().default(true),
|
||||
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
|
||||
privilegeUpgradeInitiatedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -16,7 +16,8 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid()
|
||||
reviewerUserId: z.string().uuid(),
|
||||
comment: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@ -15,7 +15,9 @@ export const SecretFoldersSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
isReserved: z.boolean().default(false).nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
lastSecretModified: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
23
backend/src/db/schemas/secret-rotation-v2-secret-mappings.ts
Normal file
23
backend/src/db/schemas/secret-rotation-v2-secret-mappings.ts
Normal file
@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationV2SecretMappingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
secretId: z.string().uuid(),
|
||||
rotationId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretRotationV2SecretMappings = z.infer<typeof SecretRotationV2SecretMappingsSchema>;
|
||||
export type TSecretRotationV2SecretMappingsInsert = Omit<
|
||||
z.input<typeof SecretRotationV2SecretMappingsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TSecretRotationV2SecretMappingsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretRotationV2SecretMappingsSchema>, TImmutableDBKeys>
|
||||
>;
|
39
backend/src/db/schemas/secret-rotations-v2.ts
Normal file
39
backend/src/db/schemas/secret-rotations-v2.ts
Normal file
@ -0,0 +1,39 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationsV2Schema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
parameters: z.unknown(),
|
||||
secretsMapping: z.unknown(),
|
||||
encryptedGeneratedCredentials: zodBuffer,
|
||||
isAutoRotationEnabled: z.boolean().default(true),
|
||||
activeIndex: z.number().default(0),
|
||||
folderId: z.string().uuid(),
|
||||
connectionId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
rotationInterval: z.number(),
|
||||
rotateAtUtc: z.unknown(),
|
||||
rotationStatus: z.string(),
|
||||
lastRotationAttemptedAt: z.date(),
|
||||
lastRotatedAt: z.date(),
|
||||
encryptedLastRotationMessage: zodBuffer.nullable().optional(),
|
||||
lastRotationJobId: z.string().nullable().optional(),
|
||||
nextRotationAt: z.date().nullable().optional(),
|
||||
isLastRotationManual: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TSecretRotationsV2 = z.infer<typeof SecretRotationsV2Schema>;
|
||||
export type TSecretRotationsV2Insert = Omit<z.input<typeof SecretRotationsV2Schema>, TImmutableDBKeys>;
|
||||
export type TSecretRotationsV2Update = Partial<Omit<z.input<typeof SecretRotationsV2Schema>, TImmutableDBKeys>>;
|
@ -12,7 +12,6 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const SecretSharingSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedValue: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
iv: z.string().nullable().optional(),
|
||||
tag: z.string().nullable().optional(),
|
||||
hashedHex: z.string().nullable().optional(),
|
||||
@ -27,7 +26,8 @@ export const SecretSharingSchema = z.object({
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional()
|
||||
identifier: z.string().nullable().optional(),
|
||||
type: z.string().default("share")
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -25,7 +25,10 @@ export const SecretVersionsV2Schema = z.object({
|
||||
folderId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
userActorId: z.string().uuid().nullable().optional(),
|
||||
identityActorId: z.string().uuid().nullable().optional(),
|
||||
actorType: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;
|
||||
|
@ -23,7 +23,10 @@ export const SuperAdminSchema = z.object({
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
|
||||
authConsentContent: z.string().nullable().optional(),
|
||||
pageFrameContent: z.string().nullable().optional(),
|
||||
adminIdentityIds: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -16,7 +16,7 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) =
|
||||
// for CSRs sent in PEM, we leave them as is
|
||||
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
|
||||
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
|
||||
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
|
||||
csrBody = csrBody.replaceAll("\n", "").replaceAll(" ", "");
|
||||
}
|
||||
|
||||
done(null, csrBody);
|
||||
|
@ -29,7 +29,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -147,7 +148,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -110,7 +110,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretPath: z.string().nullish(),
|
||||
envId: z.string(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
|
@ -1,4 +1,3 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
@ -6,6 +5,7 @@ import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/pro
|
||||
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -1,11 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { KmipClientsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
|
||||
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -61,8 +61,8 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
if (ldapConfig.groupSearchBase) {
|
||||
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
|
||||
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
|
||||
.replace(/{{\.Username}}/g, user.uid)
|
||||
.replace(/{{\.UserDN}}/g, user.dn);
|
||||
.replaceAll("{{.Username}}", user.uid)
|
||||
.replaceAll("{{.UserDN}}", user.dn);
|
||||
|
||||
if (!isValidLdapFilter(groupSearchFilter)) {
|
||||
throw new Error("Generated LDAP search filter is invalid.");
|
||||
|
@ -25,7 +25,7 @@ type TSAMLConfig = {
|
||||
callbackUrl: string;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
cert: string;
|
||||
idpCert: string;
|
||||
audience: string;
|
||||
wantAuthnResponseSigned?: boolean;
|
||||
wantAssertionsSigned?: boolean;
|
||||
@ -72,7 +72,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${ssoConfig.id}`,
|
||||
entryPoint: ssoConfig.entryPoint,
|
||||
issuer: ssoConfig.issuer,
|
||||
cert: ssoConfig.cert,
|
||||
idpCert: ssoConfig.cert,
|
||||
audience: appCfg.SITE_URL || ""
|
||||
};
|
||||
if (ssoConfig.authProvider === SamlProviders.JUMPCLOUD_SAML) {
|
||||
@ -302,15 +302,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const saml = await server.services.saml.createSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.createSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
|
||||
@ -337,15 +343,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const saml = await server.services.saml.updateSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.updateSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -35,7 +35,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -85,7 +86,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -1,16 +1,11 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
SecretApprovalRequestsReviewersSchema,
|
||||
SecretApprovalRequestsSchema,
|
||||
SecretTagsSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
@ -54,7 +49,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
committerUser: approvalRequestUser,
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
@ -159,7 +155,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
|
||||
comment: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -175,8 +172,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
approvalId: req.params.id,
|
||||
status: req.body.status
|
||||
status: req.body.status,
|
||||
comment: req.body.comment
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: review.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
|
||||
metadata: {
|
||||
secretApprovalRequestId: review.requestId,
|
||||
reviewedBy: review.reviewerUserId,
|
||||
status: review.status as ApprovalStatus,
|
||||
comment: review.comment || ""
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { review };
|
||||
}
|
||||
});
|
||||
@ -232,14 +246,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}
|
||||
});
|
||||
|
||||
const tagSchema = SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
@ -262,18 +268,21 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser,
|
||||
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })
|
||||
.extend({
|
||||
secretValue: z.string().optional(),
|
||||
isRotatedSecret: z.boolean().optional(),
|
||||
op: z.string(),
|
||||
tags: tagSchema,
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.nullish(),
|
||||
secret: z
|
||||
.object({
|
||||
@ -292,7 +301,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretKey: z.string(),
|
||||
secretValue: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
tags: tagSchema,
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.nullish()
|
||||
})
|
||||
.optional()
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretRotationOutputsSchema, SecretRotationsSchema } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -40,16 +41,10 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const secretRotation = await server.services.secretRotation.createRotation({
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
projectId: req.body.workspaceId
|
||||
handler: async () => {
|
||||
throw new BadRequestError({
|
||||
message: `This version of Secret Rotations has been deprecated. Please see docs for new version.`
|
||||
});
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import z from "zod";
|
||||
|
||||
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||
import { RAW_SECRETS } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const AccessListEntrySchema = z
|
||||
.object({
|
||||
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
|
||||
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
|
||||
id: z.string(),
|
||||
membershipId: z.string(),
|
||||
name: z.string()
|
||||
|
@ -22,7 +22,11 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretVersions: secretRawSchema.array()
|
||||
secretVersions: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -37,6 +41,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
offset: req.query.offset,
|
||||
secretId: req.params.secretId
|
||||
});
|
||||
|
||||
return { secretVersions };
|
||||
}
|
||||
});
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
|
||||
import { SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
@ -31,12 +31,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
secretVersions: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true })
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretId: z.string(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
}).array()
|
||||
tags: SanitizedTagSchema.array(),
|
||||
isRotatedSecret: z.boolean().optional()
|
||||
})
|
||||
.array(),
|
||||
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
|
||||
@ -55,6 +53,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.secretSnapshotId
|
||||
});
|
||||
|
||||
return { secretSnapshot };
|
||||
}
|
||||
});
|
||||
|
@ -1,13 +1,15 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -73,6 +75,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SignSshKey,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey
|
||||
@ -152,6 +164,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.IssueSshCreds,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey,
|
||||
|
@ -1,5 +1,4 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -10,6 +9,7 @@ import {
|
||||
isValidUserPattern
|
||||
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
|
||||
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
@ -1,10 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -23,7 +24,9 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
body: z.object({
|
||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -81,7 +84,8 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -1,10 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -30,7 +31,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -94,7 +97,8 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -1,3 +1,8 @@
|
||||
import {
|
||||
registerSecretRotationV2Router,
|
||||
SECRET_ROTATION_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
|
||||
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
|
||||
@ -13,4 +18,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerIdentityProjectAdditionalPrivilegeRouter, {
|
||||
prefix: "/identity-project-additional-privilege"
|
||||
});
|
||||
|
||||
await server.register(
|
||||
async (secretRotationV2Router) => {
|
||||
// register generic secret rotation endpoints
|
||||
await secretRotationV2Router.register(registerSecretRotationV2Router);
|
||||
|
||||
// register service specific secret rotation endpoints (secret-rotations/postgres-credentials, etc.)
|
||||
for await (const [type, router] of Object.entries(SECRET_ROTATION_REGISTER_ROUTER_MAP)) {
|
||||
await secretRotationV2Router.register(router, { prefix: `/${type}` });
|
||||
}
|
||||
},
|
||||
{ prefix: "/secret-rotations" }
|
||||
);
|
||||
};
|
||||
|
@ -2,6 +2,7 @@ import { packRules } from "@casl/ability/extra";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -37,7 +38,9 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -92,7 +95,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.UPDATE.permissions)
|
||||
.optional()
|
||||
.superRefine(checkForInvalidPermissionCombination)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
14
backend/src/ee/routes/v2/secret-rotation-v2-routers/index.ts
Normal file
14
backend/src/ee/routes/v2/secret-rotation-v2-routers/index.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
|
||||
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
|
||||
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
|
||||
|
||||
export * from "./secret-rotation-v2-router";
|
||||
|
||||
export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
|
||||
SecretRotation,
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
|
||||
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter
|
||||
};
|
@ -0,0 +1,19 @@
|
||||
import {
|
||||
CreateMsSqlCredentialsRotationSchema,
|
||||
MsSqlCredentialsRotationSchema,
|
||||
UpdateMsSqlCredentialsRotationSchema
|
||||
} from "@app/ee/services/secret-rotation-v2/mssql-credentials";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
|
||||
|
||||
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
|
||||
|
||||
export const registerMsSqlCredentialsRotationRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretRotationEndpoints({
|
||||
type: SecretRotation.MsSqlCredentials,
|
||||
server,
|
||||
responseSchema: MsSqlCredentialsRotationSchema,
|
||||
createSchema: CreateMsSqlCredentialsRotationSchema,
|
||||
updateSchema: UpdateMsSqlCredentialsRotationSchema,
|
||||
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
|
||||
});
|
@ -0,0 +1,19 @@
|
||||
import {
|
||||
CreatePostgresCredentialsRotationSchema,
|
||||
PostgresCredentialsRotationSchema,
|
||||
UpdatePostgresCredentialsRotationSchema
|
||||
} from "@app/ee/services/secret-rotation-v2/postgres-credentials";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
|
||||
|
||||
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
|
||||
|
||||
export const registerPostgresCredentialsRotationRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretRotationEndpoints({
|
||||
type: SecretRotation.PostgresCredentials,
|
||||
server,
|
||||
responseSchema: PostgresCredentialsRotationSchema,
|
||||
createSchema: CreatePostgresCredentialsRotationSchema,
|
||||
updateSchema: UpdatePostgresCredentialsRotationSchema,
|
||||
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
|
||||
});
|
@ -0,0 +1,429 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { SECRET_ROTATION_NAME_MAP } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
|
||||
import {
|
||||
TRotateAtUtc,
|
||||
TSecretRotationV2,
|
||||
TSecretRotationV2GeneratedCredentials,
|
||||
TSecretRotationV2Input
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { SecretRotations } from "@app/lib/api-docs";
|
||||
import { startsWithVowel } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSecretRotationEndpoints = <
|
||||
T extends TSecretRotationV2,
|
||||
I extends TSecretRotationV2Input,
|
||||
C extends TSecretRotationV2GeneratedCredentials
|
||||
>({
|
||||
server,
|
||||
type,
|
||||
createSchema,
|
||||
updateSchema,
|
||||
responseSchema,
|
||||
generatedCredentialsSchema
|
||||
}: {
|
||||
type: SecretRotation;
|
||||
server: FastifyZodProvider;
|
||||
createSchema: z.ZodType<{
|
||||
name: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
connectionId: string;
|
||||
parameters: I["parameters"];
|
||||
secretsMapping: I["secretsMapping"];
|
||||
description?: string | null;
|
||||
isAutoRotationEnabled?: boolean;
|
||||
rotationInterval: number;
|
||||
rotateAtUtc?: TRotateAtUtc;
|
||||
}>;
|
||||
updateSchema: z.ZodType<{
|
||||
connectionId?: string;
|
||||
name?: string;
|
||||
environment?: string;
|
||||
secretPath?: string;
|
||||
parameters?: I["parameters"];
|
||||
secretsMapping?: I["secretsMapping"];
|
||||
description?: string | null;
|
||||
isAutoRotationEnabled?: boolean;
|
||||
rotationInterval?: number;
|
||||
rotateAtUtc?: TRotateAtUtc;
|
||||
}>;
|
||||
responseSchema: z.ZodTypeAny;
|
||||
generatedCredentialsSchema: z.ZodTypeAny;
|
||||
}) => {
|
||||
const rotationType = SECRET_ROTATION_NAME_MAP[type];
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `List the ${rotationType} Rotations for the specified project.`,
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretRotations.LIST(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotations: responseSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId }
|
||||
} = req;
|
||||
|
||||
const secretRotations = (await server.services.secretRotationV2.listSecretRotationsByProjectId(
|
||||
{ projectId, type },
|
||||
req.permission
|
||||
)) as T[];
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATIONS,
|
||||
metadata: {
|
||||
type,
|
||||
count: secretRotations.length,
|
||||
rotationIds: secretRotations.map((rotation) => rotation.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotations };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:rotationId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Get the specified ${rotationType} Rotation by ID.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.GET_BY_ID(type).rotationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.findSecretRotationById(
|
||||
{ rotationId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId,
|
||||
type,
|
||||
secretPath: secretRotation.folder.path,
|
||||
environment: secretRotation.environment.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/rotation-name/:rotationName`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Get the specified ${rotationType} Rotation by name, secret path, environment and project ID.`,
|
||||
params: z.object({
|
||||
rotationName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Rotation name required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).rotationName)
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).projectId),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Secret path required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).secretPath),
|
||||
environment: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Environment required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).environment)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationName } = req.params;
|
||||
const { projectId, secretPath, environment } = req.query;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.findSecretRotationByName(
|
||||
{ rotationName, projectId, type, secretPath, environment },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId: secretRotation.id,
|
||||
type,
|
||||
secretPath,
|
||||
environment
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Create ${
|
||||
startsWithVowel(rotationType) ? "an" : "a"
|
||||
} ${rotationType} Rotation for the specified project.`,
|
||||
body: createSchema,
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const secretRotation = (await server.services.secretRotationV2.createSecretRotation(
|
||||
{ ...req.body, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId: secretRotation.id,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:rotationId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Update the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.UPDATE(type).rotationId)
|
||||
}),
|
||||
body: updateSchema,
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.updateSecretRotation(
|
||||
{ ...req.body, rotationId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: `/:rotationId`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Delete the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.DELETE(type).rotationId)
|
||||
}),
|
||||
querystring: z.object({
|
||||
deleteSecrets: z
|
||||
.enum(["true", "false"])
|
||||
.transform((value) => value === "true")
|
||||
.describe(SecretRotations.DELETE(type).deleteSecrets),
|
||||
revokeGeneratedCredentials: z
|
||||
.enum(["true", "false"])
|
||||
.transform((value) => value === "true")
|
||||
.describe(SecretRotations.DELETE(type).revokeGeneratedCredentials)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
const { deleteSecrets, revokeGeneratedCredentials } = req.query;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.deleteSecretRotation(
|
||||
{ type, rotationId, deleteSecrets, revokeGeneratedCredentials },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_SECRET_ROTATION,
|
||||
metadata: {
|
||||
type,
|
||||
rotationId,
|
||||
deleteSecrets,
|
||||
revokeGeneratedCredentials
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:rotationId/generated-credentials",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Get the generated credentials for the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.GET_GENERATED_CREDENTIALS_BY_ID(type).rotationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
generatedCredentials: generatedCredentialsSchema,
|
||||
activeIndex: z.number(),
|
||||
rotationId: z.string().uuid(),
|
||||
type: z.literal(type)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const {
|
||||
generatedCredentials,
|
||||
secretRotation: { activeIndex, projectId, folder, environment }
|
||||
} = await server.services.secretRotationV2.findSecretRotationGeneratedCredentialsById(
|
||||
{
|
||||
rotationId,
|
||||
type
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATION_GENERATED_CREDENTIALS,
|
||||
metadata: {
|
||||
type,
|
||||
rotationId,
|
||||
secretPath: folder.path,
|
||||
environment: environment.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { generatedCredentials: generatedCredentials as C, activeIndex, rotationId, type };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:rotationId/rotate-secrets",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Rotate the generated credentials for the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.ROTATE(type).rotationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.rotateSecretRotation(
|
||||
{
|
||||
rotationId,
|
||||
type,
|
||||
auditLogInfo: req.auditLogInfo
|
||||
},
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
};
|
@ -0,0 +1,81 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
|
||||
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
|
||||
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
|
||||
import { SecretRotations } from "@app/lib/api-docs";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
|
||||
PostgresCredentialsRotationListItemSchema,
|
||||
MsSqlCredentialsRotationListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretRotationV2Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/options",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List the available Secret Rotation Options.",
|
||||
response: {
|
||||
200: z.object({
|
||||
secretRotationOptions: SecretRotationV2OptionsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: () => {
|
||||
const secretRotationOptions = server.services.secretRotationV2.listSecretRotationOptions();
|
||||
return { secretRotationOptions };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List all the Secret Rotations for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretRotations.LIST().projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotations: SecretRotationV2Schema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const secretRotations = await server.services.secretRotationV2.listSecretRotationsByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATIONS,
|
||||
metadata: {
|
||||
rotationIds: secretRotations.map((sync) => sync.id),
|
||||
count: secretRotations.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotations };
|
||||
}
|
||||
});
|
||||
};
|
@ -65,7 +65,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TCreateAccessApprovalPolicy) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
@ -153,7 +154,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -216,7 +218,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -262,7 +265,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -26,6 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
@ -35,6 +36,7 @@ export type TUpdateAccessApprovalPolicy = {
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteAccessApprovalPolicy = {
|
||||
|
@ -61,6 +61,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
)
|
||||
@ -119,6 +120,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: doc.policyApprovals,
|
||||
secretPath: doc.policySecretPath,
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
|
||||
envId: doc.policyEnvId,
|
||||
deletedAt: doc.policyDeletedAt
|
||||
},
|
||||
@ -254,6 +256,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
@ -275,6 +278,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
deletedAt: el.policyDeletedAt
|
||||
},
|
||||
requestedByUser: {
|
||||
|
@ -1,9 +1,10 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import msFn from "ms";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -246,7 +247,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary,
|
||||
...(isTemporary && {
|
||||
expiresIn: ms(ms(temporaryRange || ""), { long: true })
|
||||
expiresIn: msFn(ms(temporaryRange || ""), { long: true })
|
||||
}),
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
@ -319,6 +320,11 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
message: "The policy associated with this access request has been deleted."
|
||||
});
|
||||
}
|
||||
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to review access approval request. Users are not authorized to review their own request."
|
||||
});
|
||||
}
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
|
@ -45,7 +45,6 @@ export const auditLogStreamServiceFactory = ({
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams) {
|
||||
throw new BadRequestError({
|
||||
@ -62,9 +61,8 @@ export const auditLogStreamServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
if (appCfg.isCloud) {
|
||||
blockLocalAndPrivateIpAddresses(url);
|
||||
}
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
@ -135,9 +133,8 @@ export const auditLogStreamServiceFactory = ({
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
|
||||
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
@ -9,13 +9,14 @@ import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { EventType } from "./audit-log-types";
|
||||
import { EventType, filterableSecretEvents } from "./audit-log-types";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
type TFindQuery = {
|
||||
actor?: string;
|
||||
projectId?: string;
|
||||
environment?: string;
|
||||
orgId?: string;
|
||||
eventType?: string;
|
||||
startDate?: string;
|
||||
@ -32,6 +33,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
orgId,
|
||||
projectId,
|
||||
environment,
|
||||
userAgentType,
|
||||
startDate,
|
||||
endDate,
|
||||
@ -40,12 +42,14 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
actorId,
|
||||
actorType,
|
||||
secretPath,
|
||||
secretKey,
|
||||
eventType,
|
||||
eventMetadata
|
||||
}: Omit<TFindQuery, "actor" | "eventType"> & {
|
||||
actorId?: string;
|
||||
actorType?: ActorType;
|
||||
secretPath?: string;
|
||||
secretKey?: string;
|
||||
eventType?: EventType[];
|
||||
eventMetadata?: Record<string, string>;
|
||||
},
|
||||
@ -90,8 +94,29 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
});
|
||||
}
|
||||
|
||||
if (projectId && secretPath) {
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
|
||||
const eventIsSecretType = !eventType?.length || eventType.some((event) => filterableSecretEvents.includes(event));
|
||||
// We only want to filter for environment/secretPath/secretKey if the user is either checking for all event types
|
||||
|
||||
// ? Note(daniel): use the `eventMetadata" @> ?::jsonb` approach to properly use our GIN index
|
||||
if (projectId && eventIsSecretType) {
|
||||
if (environment || secretPath) {
|
||||
// Handle both environment and secret path together to only use the GIN index once
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> ?::jsonb`, [
|
||||
JSON.stringify({
|
||||
...(environment && { environment }),
|
||||
...(secretPath && { secretPath })
|
||||
})
|
||||
]);
|
||||
}
|
||||
|
||||
// Handle secret key separately to include the OR condition
|
||||
if (secretKey) {
|
||||
void sqlQuery.whereRaw(
|
||||
`("eventMetadata" @> ?::jsonb
|
||||
OR "eventMetadata"->'secrets' @> ?::jsonb)`,
|
||||
[JSON.stringify({ secretKey }), JSON.stringify([{ secretKey }])]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by actor type
|
||||
|
@ -1,8 +1,10 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
@ -61,6 +63,8 @@ export const auditLogServiceFactory = ({
|
||||
actorType: filter.actorType,
|
||||
eventMetadata: filter.eventMetadata,
|
||||
secretPath: filter.secretPath,
|
||||
secretKey: filter.secretKey,
|
||||
environment: filter.environment,
|
||||
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
|
||||
});
|
||||
|
||||
@ -81,8 +85,12 @@ export const auditLogServiceFactory = ({
|
||||
if (!data.projectId && !data.orgId)
|
||||
throw new BadRequestError({ message: "Must specify either project id or org id" });
|
||||
}
|
||||
|
||||
return auditLogQueue.pushToLog(data);
|
||||
const el = { ...data };
|
||||
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
|
||||
const permissionMetadata = requestContext.get("identityPermissionMetadata");
|
||||
el.actor.metadata.permission = permissionMetadata;
|
||||
}
|
||||
return auditLogQueue.pushToLog(el);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -2,6 +2,13 @@ import {
|
||||
TCreateProjectTemplateDTO,
|
||||
TUpdateProjectTemplateDTO
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { SecretRotation, SecretRotationStatus } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import {
|
||||
TCreateSecretRotationV2DTO,
|
||||
TDeleteSecretRotationV2DTO,
|
||||
TSecretRotationV2Raw,
|
||||
TUpdateSecretRotationV2DTO
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
@ -22,6 +29,7 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
@ -32,9 +40,11 @@ export type TListProjectAuditLogDTO = {
|
||||
endDate?: string;
|
||||
startDate?: string;
|
||||
projectId?: string;
|
||||
environment?: string;
|
||||
auditLogActorId?: string;
|
||||
actorType?: ActorType;
|
||||
secretPath?: string;
|
||||
secretKey?: string;
|
||||
eventMetadata?: Record<string, string>;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
@ -53,6 +63,8 @@ export type TCreateAuditLogDTO = {
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
|
||||
export type AuditLogInfo = Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
|
||||
interface BaseAuthData {
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
@ -165,6 +177,7 @@ export enum EventType {
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
|
||||
SIGN_SSH_KEY = "sign-ssh-key",
|
||||
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||
@ -281,13 +294,34 @@ export enum EventType {
|
||||
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
|
||||
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
|
||||
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
|
||||
KMIP_OPERATION_REGISTER = "kmip-operation-register"
|
||||
KMIP_OPERATION_REGISTER = "kmip-operation-register",
|
||||
|
||||
GET_SECRET_ROTATIONS = "get-secret-rotations",
|
||||
GET_SECRET_ROTATION = "get-secret-rotation",
|
||||
GET_SECRET_ROTATION_GENERATED_CREDENTIALS = "get-secret-rotation-generated-credentials",
|
||||
CREATE_SECRET_ROTATION = "create-secret-rotation",
|
||||
UPDATE_SECRET_ROTATION = "update-secret-rotation",
|
||||
DELETE_SECRET_ROTATION = "delete-secret-rotation",
|
||||
SECRET_ROTATION_ROTATE_SECRETS = "secret-rotation-rotate-secrets",
|
||||
|
||||
PROJECT_ACCESS_REQUEST = "project-access-request"
|
||||
}
|
||||
|
||||
export const filterableSecretEvents: EventType[] = [
|
||||
EventType.GET_SECRET,
|
||||
EventType.DELETE_SECRETS,
|
||||
EventType.CREATE_SECRETS,
|
||||
EventType.UPDATE_SECRETS,
|
||||
EventType.CREATE_SECRET,
|
||||
EventType.UPDATE_SECRET,
|
||||
EventType.DELETE_SECRET
|
||||
];
|
||||
|
||||
interface UserActorMetadata {
|
||||
userId: string;
|
||||
email?: string | null;
|
||||
username: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ServiceActorMetadata {
|
||||
@ -298,6 +332,7 @@ interface ServiceActorMetadata {
|
||||
interface IdentityActorMetadata {
|
||||
identityId: string;
|
||||
name: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ScimClientActorMetadata {}
|
||||
@ -964,6 +999,7 @@ interface LoginIdentityOidcAuthEvent {
|
||||
identityId: string;
|
||||
identityOidcAuthId: string;
|
||||
identityAccessTokenId: string;
|
||||
oidcClaimsReceived: Record<string, unknown>;
|
||||
};
|
||||
}
|
||||
|
||||
@ -976,6 +1012,7 @@ interface AddIdentityOidcAuthEvent {
|
||||
boundIssuer: string;
|
||||
boundAudiences: string;
|
||||
boundClaims: Record<string, string>;
|
||||
claimMetadataMapping: Record<string, string>;
|
||||
boundSubject: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
@ -1000,6 +1037,7 @@ interface UpdateIdentityOidcAuthEvent {
|
||||
boundIssuer?: string;
|
||||
boundAudiences?: string;
|
||||
boundClaims?: Record<string, string>;
|
||||
claimMetadataMapping?: Record<string, string>;
|
||||
boundSubject?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
@ -1142,6 +1180,7 @@ interface CreateFolderEvent {
|
||||
folderId: string;
|
||||
folderName: string;
|
||||
folderPath: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1313,6 +1352,16 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretApprovalRequestReview {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
|
||||
metadata: {
|
||||
secretApprovalRequestId: string;
|
||||
reviewedBy: string;
|
||||
status: ApprovalStatus;
|
||||
comment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignSshKey {
|
||||
type: EventType.SIGN_SSH_KEY;
|
||||
metadata: {
|
||||
@ -2247,6 +2296,15 @@ interface KmipOperationRegisterEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface ProjectAccessRequestEvent {
|
||||
type: EventType.PROJECT_ACCESS_REQUEST;
|
||||
metadata: {
|
||||
projectId: string;
|
||||
requesterId: string;
|
||||
requesterEmail: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SetupKmipEvent {
|
||||
type: EventType.SETUP_KMIP;
|
||||
metadata: {
|
||||
@ -2272,6 +2330,63 @@ interface RegisterKmipServerEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretRotationsEvent {
|
||||
type: EventType.GET_SECRET_ROTATIONS;
|
||||
metadata: {
|
||||
type?: SecretRotation;
|
||||
count: number;
|
||||
rotationIds: string[];
|
||||
secretPath?: string;
|
||||
environment?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretRotationEvent {
|
||||
type: EventType.GET_SECRET_ROTATION;
|
||||
metadata: {
|
||||
type: SecretRotation;
|
||||
rotationId: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretRotationCredentialsEvent {
|
||||
type: EventType.GET_SECRET_ROTATION_GENERATED_CREDENTIALS;
|
||||
metadata: {
|
||||
type: SecretRotation;
|
||||
rotationId: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretRotationEvent {
|
||||
type: EventType.CREATE_SECRET_ROTATION;
|
||||
metadata: Omit<TCreateSecretRotationV2DTO, "projectId"> & { rotationId: string };
|
||||
}
|
||||
|
||||
interface UpdateSecretRotationEvent {
|
||||
type: EventType.UPDATE_SECRET_ROTATION;
|
||||
metadata: TUpdateSecretRotationV2DTO;
|
||||
}
|
||||
|
||||
interface DeleteSecretRotationEvent {
|
||||
type: EventType.DELETE_SECRET_ROTATION;
|
||||
metadata: TDeleteSecretRotationV2DTO;
|
||||
}
|
||||
|
||||
interface RotateSecretRotationEvent {
|
||||
type: EventType.SECRET_ROTATION_ROTATE_SECRETS;
|
||||
metadata: Pick<TSecretRotationV2Raw, "parameters" | "secretsMapping" | "type" | "connectionId" | "folderId"> & {
|
||||
status: SecretRotationStatus;
|
||||
rotationId: string;
|
||||
jobId?: string | undefined;
|
||||
occurredAt: Date;
|
||||
message?: string | null | undefined;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -2481,4 +2596,13 @@ export type Event =
|
||||
| KmipOperationRevokeEvent
|
||||
| KmipOperationLocateEvent
|
||||
| KmipOperationRegisterEvent
|
||||
| CreateSecretRequestEvent;
|
||||
| ProjectAccessRequestEvent
|
||||
| CreateSecretRequestEvent
|
||||
| SecretApprovalRequestReview
|
||||
| GetSecretRotationsEvent
|
||||
| GetSecretRotationEvent
|
||||
| GetSecretRotationCredentialsEvent
|
||||
| CreateSecretRotationEvent
|
||||
| UpdateSecretRotationEvent
|
||||
| DeleteSecretRotationEvent
|
||||
| RotateSecretRotationEvent;
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
@ -67,9 +68,7 @@ export const certificateEstServiceFactory = ({
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new UnauthorizedError({ message: "Missing client certificate" });
|
||||
@ -88,10 +87,7 @@ export const certificateEstServiceFactory = ({
|
||||
const verifiedChains = await Promise.all(
|
||||
caCertChains.map((chain) => {
|
||||
const caCert = new x509.X509Certificate(chain.certificate);
|
||||
const caChain =
|
||||
chain.certificateChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((c) => new x509.X509Certificate(c)) || [];
|
||||
const caChain = extractX509CertFromChain(chain.certificateChain)?.map((c) => new x509.X509Certificate(c)) || [];
|
||||
|
||||
return isCertChainValid([cert, caCert, ...caChain]);
|
||||
})
|
||||
@ -172,19 +168,15 @@ export const certificateEstServiceFactory = ({
|
||||
}
|
||||
|
||||
if (!estConfig.disableBootstrapCertValidation) {
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
const caCerts = extractX509CertFromChain(estConfig.caChain)?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
|
||||
if (!caCerts) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
@ -250,13 +242,7 @@ export const certificateEstServiceFactory = ({
|
||||
kmsService
|
||||
});
|
||||
|
||||
const certificates = caCertChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
const certificates = extractX509CertFromChain(caCertChain).map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
const caCertificate = new x509.X509Certificate(caCert);
|
||||
return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]);
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
@ -11,6 +10,7 @@ import {
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -183,7 +183,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
});
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) {
|
||||
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id) {
|
||||
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
|
||||
}
|
||||
|
||||
@ -256,7 +256,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
});
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease)
|
||||
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id)
|
||||
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
|
@ -1,31 +1,53 @@
|
||||
import crypto from "node:crypto";
|
||||
import dns from "node:dns/promises";
|
||||
import net from "node:net";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string, isGateway = false) => {
|
||||
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
// no need for validation when it's dev
|
||||
if (appCfg.NODE_ENV === "development") return;
|
||||
|
||||
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (appCfg.isDevelopmentMode) return [host];
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
!isGateway &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
|
||||
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
|
||||
getDbConnectionHost(appCfg.REDIS_URL),
|
||||
getDbConnectionHost(appCfg.AUDIT_LOGS_DB_CONNECTION_URI)
|
||||
);
|
||||
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
// get host db ip
|
||||
const exclusiveIps: string[] = [];
|
||||
for await (const el of reservedHosts) {
|
||||
if (el) {
|
||||
if (net.isIPv4(el)) {
|
||||
exclusiveIps.push(el);
|
||||
} else {
|
||||
const resolvedIps = await dns.resolve4(el);
|
||||
exclusiveIps.push(...resolvedIps);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedHost = host.split(":")[0];
|
||||
const inputHostIps: string[] = [];
|
||||
if (net.isIPv4(host)) {
|
||||
inputHostIps.push(host);
|
||||
} else {
|
||||
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
const resolvedIps = await dns.resolve4(host);
|
||||
inputHostIps.push(...resolvedIps);
|
||||
}
|
||||
|
||||
if (!isGateway && !appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP) {
|
||||
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
|
||||
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
|
||||
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
|
||||
return inputHostIps;
|
||||
};
|
||||
|
@ -13,6 +13,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -144,6 +145,14 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
|
||||
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
|
||||
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
|
||||
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
@ -3,9 +3,10 @@ import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
@ -20,14 +21,28 @@ const generateUsername = () => {
|
||||
export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
const hostIps = await Promise.all(
|
||||
providerInputs.host
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
|
||||
);
|
||||
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return providerInputs;
|
||||
return { ...providerInputs, hostIps };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -40,7 +55,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
},
|
||||
keyspace: providerInputs.keyspace,
|
||||
localDataCenter: providerInputs?.localDataCenter,
|
||||
contactPoints: providerInputs.host.split(",").filter(Boolean)
|
||||
contactPoints: providerInputs.hostIps
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
@ -19,15 +19,14 @@ const generateUsername = () => {
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
|
@ -1,5 +1,16 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export type PasswordRequirements = {
|
||||
length: number;
|
||||
required: {
|
||||
lowercase: number;
|
||||
uppercase: number;
|
||||
digits: number;
|
||||
symbols: number;
|
||||
};
|
||||
allowedSymbols?: string;
|
||||
};
|
||||
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
@ -100,6 +111,28 @@ export const DynamicSecretSqlDBSchema = z.object({
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
passwordRequirements: z
|
||||
.object({
|
||||
length: z.number().min(1).max(250),
|
||||
required: z
|
||||
.object({
|
||||
lowercase: z.number().min(0),
|
||||
uppercase: z.number().min(0),
|
||||
digits: z.number().min(0),
|
||||
symbols: z.number().min(0)
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
|
||||
return total <= 250;
|
||||
}, "Sum of required characters cannot exceed 250"),
|
||||
allowedSymbols: z.string().optional()
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
|
||||
return total <= data.length;
|
||||
}, "Sum of required characters cannot exceed the total length")
|
||||
.optional()
|
||||
.describe("Password generation requirements"),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
|
@ -19,15 +19,15 @@ const generateUsername = () => {
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
? `mongodb+srv://${providerInputs.hostIp}`
|
||||
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
|
@ -3,7 +3,6 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
@ -79,14 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
@ -51,16 +52,28 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
username: providerInputs.username,
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca && {
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
@ -27,14 +28,25 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const $getClient = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
|
||||
useMaster?: boolean
|
||||
) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`SERVER=${providerInputs.hostIp};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
@ -83,7 +95,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
@ -104,7 +116,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
@ -11,6 +11,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||
@ -28,13 +29,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user