mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-02 16:55:02 +00:00
Compare commits
526 Commits
doc/add-gi
...
infisical/
Author | SHA1 | Date | |
---|---|---|---|
5ab0c66dee | |||
f5a0641671 | |||
2843818395 | |||
2357f3bc1f | |||
cde813aafb | |||
bbc8091d44 | |||
ce5e591457 | |||
5ae74f9761 | |||
eef331bbd1 | |||
d5c2e9236a | |||
13eef7e524 | |||
f97f98b2c3 | |||
3fa84c578c | |||
c22ed04733 | |||
64fac1979f | |||
2d60f389c2 | |||
7798e5a2ad | |||
ed78227725 | |||
89848a2f5c | |||
1936f7cc4f | |||
1adeb5a70d | |||
058475fc3f | |||
ee4eb7f84b | |||
8122433f5c | |||
a0411e3ba8 | |||
62968c5e43 | |||
f3cf1a3f50 | |||
b4b417658f | |||
fed99a14a8 | |||
d4cfee99a6 | |||
e70ca57510 | |||
06f321e4bf | |||
3c3fcd0db8 | |||
21eb2bed7e | |||
31a21a432d | |||
381960b0bd | |||
7eb05afe2a | |||
0b54948b15 | |||
39e598e408 | |||
b735618601 | |||
3a5e862def | |||
d1c4a9c75a | |||
5532844ee7 | |||
dd5aab973f | |||
ced12baf5d | |||
7db1e62654 | |||
0ab3ae442e | |||
ed9472efc8 | |||
e094844601 | |||
e761b49964 | |||
6a8be75b79 | |||
a92e61575d | |||
761007208d | |||
cc3e0d1922 | |||
765280eef6 | |||
215761ca6b | |||
0977ff1e36 | |||
c6081900a4 | |||
86800c0cdb | |||
1fa99e5585 | |||
7947e73569 | |||
8f5bb44ff4 | |||
3f70f08e8c | |||
078eaff164 | |||
221aa99374 | |||
6a681dcf6a | |||
b99b98b6a4 | |||
d7271b9631 | |||
379e526200 | |||
1f151a9b05 | |||
6b2eb9c6c9 | |||
52ce90846a | |||
be36827392 | |||
68a3291235 | |||
471f47d260 | |||
ccb757ec3e | |||
b669b0a9f8 | |||
9e768640cd | |||
35f7420447 | |||
c6a0e36318 | |||
181ba75f2a | |||
c00f6601bd | |||
111605a945 | |||
2ac110f00e | |||
0366506213 | |||
e3d29b637d | |||
9cd0dc8970 | |||
f8f5000bad | |||
40919ccf59 | |||
44303aca6a | |||
4bd50c3548 | |||
fb253d00eb | |||
097512c691 | |||
64a982d5e0 | |||
1080438ad8 | |||
eb3acae332 | |||
a0b3520899 | |||
2f6f359ddf | |||
df8c1e54e0 | |||
cac060deff | |||
47269bc95b | |||
8502e9a1d8 | |||
d89eb4fa84 | |||
ca7ab4eaf1 | |||
c57fc5e3f1 | |||
9b4e1f561e | |||
097fcad5ae | |||
d1547564f9 | |||
24acb98978 | |||
0fd8274ff0 | |||
a857375cc1 | |||
69bf9dc20f | |||
5151c91760 | |||
f12d8b6f89 | |||
695c499448 | |||
1cbf030e6c | |||
dc715cc238 | |||
d873f2e50f | |||
16ea757928 | |||
8713643bc1 | |||
c35657ed49 | |||
5b4487fae8 | |||
474731d8ef | |||
e9f254f81b | |||
639057415f | |||
c38dae2319 | |||
25191cff38 | |||
a6898717f4 | |||
cc77175188 | |||
fcb944d964 | |||
a8ad8707ac | |||
4568370552 | |||
c000a6f707 | |||
1ace8eebf8 | |||
3b3482b280 | |||
422fd27b9a | |||
a7b25f3bd8 | |||
7896b4e85e | |||
ba5e6fe28a | |||
8d79fa3529 | |||
1a55909b73 | |||
b2efb2845a | |||
c680030f01 | |||
cf1070c65e | |||
3a8219db03 | |||
f5920f416a | |||
3b2154bab4 | |||
7c8f2e5548 | |||
9d9f6ec268 | |||
c5816014a6 | |||
a730b16318 | |||
cc3d132f5d | |||
56aab172d3 | |||
c8ee06341a | |||
e32716c258 | |||
7f0d27e3dc | |||
48174e2500 | |||
7cf297344b | |||
5d9b99bee7 | |||
8fdc438940 | |||
d2b909b72b | |||
68988a3e78 | |||
3c954ea257 | |||
a92de1273e | |||
97f85fa8d9 | |||
84c26581a6 | |||
a808b6d4a0 | |||
826916399b | |||
7d5aba258a | |||
40d69d4620 | |||
42249726d4 | |||
3f6b1fe3bd | |||
c648235390 | |||
3c588beebe | |||
6614721d34 | |||
bbd8a049fb | |||
a91f64f742 | |||
1bc508b286 | |||
ec1ce3dc06 | |||
82a4b89bb5 | |||
ff3d8c896b | |||
6e720c2f64 | |||
d3d30eba80 | |||
623a99be0e | |||
f80023f8f3 | |||
98289f56ae | |||
c40f195c1d | |||
fbfe694fc0 | |||
2098bd3be2 | |||
39f71f9488 | |||
ef82c664a6 | |||
fcbedfaf1b | |||
882f6b22f5 | |||
bcd778457d | |||
0a1242db75 | |||
a078cb6059 | |||
095b26c8c9 | |||
fcdfcd0219 | |||
5b618b07fa | |||
a5a1f57284 | |||
132de1479d | |||
d4a76b3621 | |||
331dcd4d79 | |||
025f64f068 | |||
05d7f94518 | |||
b58e32c754 | |||
4ace30aecd | |||
8b2a866994 | |||
b4386af2e0 | |||
2b44e32ac1 | |||
ec5e6eb7b4 | |||
48cb5f6e9b | |||
3c63312944 | |||
0842901d4f | |||
32d6826ade | |||
a750f48922 | |||
67662686f3 | |||
11c96245a7 | |||
a63191e11d | |||
7a13c155f5 | |||
8327f6154e | |||
20a9fc113c | |||
8edfa9ad0b | |||
00ce755996 | |||
3b2173a098 | |||
07d9398aad | |||
fb6a085bf9 | |||
6c533f89d3 | |||
5ceb30f43f | |||
7728a4793b | |||
d3523ed1d6 | |||
35a9b2a38d | |||
4fc8c509ac | |||
16a9f8c194 | |||
9557639bfe | |||
1049f95952 | |||
e618d5ca5f | |||
d659250ce8 | |||
87363eabfe | |||
d1b9c316d8 | |||
b9867c0d06 | |||
afa2f383c5 | |||
39f7354fec | |||
c46c0cb1e8 | |||
6905ffba4e | |||
64fd423c61 | |||
da1a7466d1 | |||
d3f3f34129 | |||
c8fba7ce4c | |||
82c3e943eb | |||
dc3903ff15 | |||
a9c01dcf1f | |||
ae51fbb8f2 | |||
62910e93ca | |||
586b9d9a56 | |||
9e3c632a1f | |||
bb094f60c1 | |||
6d709fba62 | |||
27beca7099 | |||
28e7e4c52d | |||
cfc0ca1f03 | |||
b96593d0ab | |||
2de5896ba4 | |||
3455ad3898 | |||
c7a32a3b05 | |||
1ebfed8c11 | |||
a18f3c2919 | |||
16d215b588 | |||
a852b15a1e | |||
cacd9041b0 | |||
cfeffebd46 | |||
1dceedcdb4 | |||
14f03c38c3 | |||
be9f096e75 | |||
49133a044f | |||
b7fe3743db | |||
c5fded361c | |||
e676acbadf | |||
9b31a7bbb1 | |||
345be85825 | |||
f82b11851a | |||
b466b3073b | |||
46105fc315 | |||
3cf8fd2ff8 | |||
5277a50b3e | |||
dab8f0b261 | |||
4293665130 | |||
8afa65c272 | |||
4c739fd57f | |||
bcc2840020 | |||
8b3af92d23 | |||
9ca58894f0 | |||
d131314de0 | |||
9c03144f19 | |||
5495ffd78e | |||
a200469c72 | |||
85c3074216 | |||
cfc55ff283 | |||
7179b7a540 | |||
6c4cb5e084 | |||
9cfb044178 | |||
105eb70fd9 | |||
18a2547b24 | |||
588b3c77f9 | |||
a04834c7c9 | |||
9df9f4a5da | |||
afdc704423 | |||
57261cf0c8 | |||
06f6004993 | |||
f3bfb9cc5a | |||
48fb77be49 | |||
c3956c60e9 | |||
f55bcb93ba | |||
d3fb2a6a74 | |||
6a23b74481 | |||
602cf4b3c4 | |||
84ff71fef2 | |||
4c01bddf0e | |||
add5742b8c | |||
68f3964206 | |||
90374971ae | |||
3a1eadba8c | |||
5305017ce2 | |||
cf5f49d14e | |||
4f4b5be8ea | |||
ecea79f040 | |||
586b901318 | |||
ad8d247cdc | |||
3b47d7698b | |||
aa9a86df71 | |||
33411335ed | |||
ca55f19926 | |||
3794521c56 | |||
728f023263 | |||
229706f57f | |||
6cf2488326 | |||
2c402fbbb6 | |||
92ce05283b | |||
39d92ce6ff | |||
44a026446e | |||
bbf52c9a48 | |||
539e5b1907 | |||
44b02d5324 | |||
71fb6f1d11 | |||
e64100fab1 | |||
e4b149a849 | |||
5bcf07b32b | |||
3b0c48052b | |||
df50e3b0f9 | |||
bdf2ae40b6 | |||
b6c05a2f25 | |||
960efb9cf9 | |||
aa8d58abad | |||
cfb0cc4fea | |||
7712df296c | |||
7c38932121 | |||
69ad9845e1 | |||
7321c237d7 | |||
32430a6a16 | |||
3d6ea3251e | |||
f034adba76 | |||
463eb0014e | |||
be39e63832 | |||
21403f6fe5 | |||
2f9e542b31 | |||
089d6812fd | |||
464a3ccd53 | |||
71c9c0fa1e | |||
46ad1d47a9 | |||
2b977eeb33 | |||
a692148597 | |||
b762816e66 | |||
cf275979ba | |||
64bfa4f334 | |||
e3eb14bfd9 | |||
24b50651c9 | |||
1cd459fda7 | |||
38917327d9 | |||
63fac39fff | |||
7c62a776fb | |||
d7b494c6f8 | |||
93208afb36 | |||
1a084d8fcf | |||
269f851cbf | |||
7a61995dd4 | |||
dd4f133c6c | |||
c41d27e1ae | |||
1866ed8d23 | |||
7b3b232dde | |||
9d618b4ae9 | |||
5330ab2171 | |||
662e588c22 | |||
90057d80ff | |||
1eda7aaaac | |||
00dcadbc08 | |||
7a7289ebd0 | |||
e5d4677fd6 | |||
bce3f3d676 | |||
300372fa98 | |||
47a4f8bae9 | |||
863719f296 | |||
7317dc1cf5 | |||
75df898e78 | |||
0de6add3f7 | |||
0c008b6393 | |||
0c3894496c | |||
35fbd5d49d | |||
d03b453e3d | |||
96e331b678 | |||
d4d468660d | |||
75a4965928 | |||
660c09ded4 | |||
b5287d91c0 | |||
6a17763237 | |||
f2bd3daea2 | |||
7f70f96936 | |||
73e0a54518 | |||
0d295a2824 | |||
9a62efea4f | |||
506c30bcdb | |||
735ad4ff65 | |||
41e36dfcef | |||
421d8578b7 | |||
6685f8aa0a | |||
d6c37c1065 | |||
54f3f94185 | |||
907537f7c0 | |||
61263b9384 | |||
d71c85e052 | |||
b6d8be2105 | |||
0693f81d0a | |||
61d516ef35 | |||
31fc64fb4c | |||
8bf7e4c4d1 | |||
2027d4b44e | |||
d401c9074e | |||
afe35dbbb5 | |||
6ff1602fd5 | |||
6603364749 | |||
53bea22b85 | |||
7c84adc1c2 | |||
fa8d6735a1 | |||
a6137f267d | |||
d521ee7b7e | |||
827931e416 | |||
faa83344a7 | |||
3be3d807d2 | |||
9f7ea3c4e5 | |||
e67218f170 | |||
269c40c67c | |||
089a7e880b | |||
64ec741f1a | |||
c98233ddaf | |||
ae17981c41 | |||
6c49c7da3c | |||
2de04b6fe5 | |||
5c9ec1e4be | |||
ba89491d4c | |||
483e596a7a | |||
65f122bd41 | |||
682b552fdc | |||
d4cfd0b6ed | |||
ba1fd8a3f7 | |||
e8f09d2c7b | |||
774371a218 | |||
c4b54de303 | |||
433971a72d | |||
ed7fc0e5cd | |||
1ae6213387 | |||
4acf9413f0 | |||
f0549cab98 | |||
d75e49dce5 | |||
8819abd710 | |||
796f76da46 | |||
d6e1ed4d1e | |||
1295b68d80 | |||
c79f84c064 | |||
d0c50960ef | |||
85089a08e1 | |||
4053078d95 | |||
6bae3628c0 | |||
4cb935dae7 | |||
ccad684ab2 | |||
fd77708cad | |||
9aebd712d1 | |||
05f07b25ac | |||
5b0dbf04b2 | |||
b050db84ab | |||
8fef6911f1 | |||
44ba31a743 | |||
6bdbac4750 | |||
60fb195706 | |||
c8109b4e84 | |||
1f2b0443cc | |||
dd1cabf9f6 | |||
8b781b925a | |||
ddcf5b576b | |||
7138b392f2 | |||
bfce1021fb | |||
93c0313b28 | |||
8cfc217519 | |||
d272c6217a | |||
2fe2ddd9fc | |||
e330ddd5ee | |||
7aba9c1a50 | |||
4cd8e0fa67 | |||
ea3d164ead | |||
df468e4865 | |||
66e96018c4 | |||
3b02eedca6 | |||
a55fe2b788 | |||
5d7a267f1d | |||
b16ab6f763 | |||
334a728259 | |||
4a3143e689 | |||
14810de054 | |||
8cfcbaa12c | |||
ada63b9e7d | |||
3f6a0c77f1 | |||
9e4b66e215 | |||
8a14914bc3 | |||
fc3a409164 | |||
ffc58b0313 | |||
9a7e05369c | |||
33b49f4466 | |||
60895537a7 |
25
.env.example
25
.env.example
@ -74,9 +74,34 @@ CAPTCHA_SECRET=
|
|||||||
|
|
||||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||||
|
|
||||||
|
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||||
|
OTEL_EXPORT_TYPE=prometheus
|
||||||
|
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||||
|
OTEL_OTLP_PUSH_INTERVAL=
|
||||||
|
|
||||||
|
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||||
|
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||||
|
|
||||||
PLAIN_API_KEY=
|
PLAIN_API_KEY=
|
||||||
PLAIN_WISH_LABEL_IDS=
|
PLAIN_WISH_LABEL_IDS=
|
||||||
|
|
||||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||||
|
|
||||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||||
|
|
||||||
|
# App Connections
|
||||||
|
|
||||||
|
# aws assume-role
|
||||||
|
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||||
|
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||||
|
|
||||||
|
# github oauth
|
||||||
|
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||||
|
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||||
|
|
||||||
|
#github app
|
||||||
|
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||||
|
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||||
|
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||||
|
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||||
|
INF_APP_CONNECTION_GITHUB_APP_ID=
|
@ -10,8 +10,7 @@ on:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
# packages: write
|
|
||||||
# issues: write
|
|
||||||
jobs:
|
jobs:
|
||||||
cli-integration-tests:
|
cli-integration-tests:
|
||||||
name: Run tests before deployment
|
name: Run tests before deployment
|
||||||
@ -26,6 +25,63 @@ jobs:
|
|||||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||||
|
|
||||||
|
npm-release:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
env:
|
||||||
|
working-directory: ./npm
|
||||||
|
needs:
|
||||||
|
- cli-integration-tests
|
||||||
|
- goreleaser
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Extract version
|
||||||
|
run: |
|
||||||
|
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||||
|
echo "Version extracted: $VERSION"
|
||||||
|
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Print version
|
||||||
|
run: echo ${{ env.CLI_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: "npm"
|
||||||
|
cache-dependency-path: ./npm/package-lock.json
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm install --ignore-scripts
|
||||||
|
|
||||||
|
- name: Set NPM version
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||||
|
|
||||||
|
- name: Setup NPM
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: |
|
||||||
|
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||||
|
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||||
|
|
||||||
|
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||||
|
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||||
|
env:
|
||||||
|
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
||||||
|
- name: Pack NPM
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm pack
|
||||||
|
|
||||||
|
- name: Publish NPM
|
||||||
|
working-directory: ${{ env.working-directory }}
|
||||||
|
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||||
|
env:
|
||||||
|
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [cli-integration-tests]
|
needs: [cli-integration-tests]
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -71,3 +71,5 @@ frontend-build
|
|||||||
cli/infisical-merge
|
cli/infisical-merge
|
||||||
cli/test/infisical-merge
|
cli/test/infisical-merge
|
||||||
/backend/binary
|
/backend/binary
|
||||||
|
|
||||||
|
/npm/bin
|
||||||
|
@ -1,6 +1,12 @@
|
|||||||
#!/usr/bin/env sh
|
#!/usr/bin/env sh
|
||||||
. "$(dirname -- "$0")/_/husky.sh"
|
. "$(dirname -- "$0")/_/husky.sh"
|
||||||
|
|
||||||
|
# Check if infisical is installed
|
||||||
|
if ! command -v infisical >/dev/null 2>&1; then
|
||||||
|
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
npx lint-staged
|
npx lint-staged
|
||||||
|
|
||||||
infisical scan git-changes --staged -v
|
infisical scan git-changes --staged -v
|
||||||
|
@ -69,13 +69,21 @@ RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Required for pkcs11js
|
# Required for pkcs11js and ODBC
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++ \
|
||||||
|
unixodbc \
|
||||||
|
unixodbc-dev \
|
||||||
|
freetds-dev \
|
||||||
|
freetds-bin \
|
||||||
|
tdsodbc \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Configure ODBC
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
@ -91,13 +99,21 @@ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Required for pkcs11js
|
# Required for pkcs11js and ODBC
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++ \
|
||||||
|
unixodbc \
|
||||||
|
unixodbc-dev \
|
||||||
|
freetds-dev \
|
||||||
|
freetds-bin \
|
||||||
|
tdsodbc \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Configure ODBC
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
@ -108,13 +124,25 @@ RUN mkdir frontend-build
|
|||||||
# Production stage
|
# Production stage
|
||||||
FROM base AS production
|
FROM base AS production
|
||||||
|
|
||||||
# Install necessary packages
|
# Install necessary packages including ODBC
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
git \
|
git \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++ \
|
||||||
|
unixodbc \
|
||||||
|
unixodbc-dev \
|
||||||
|
freetds-dev \
|
||||||
|
freetds-bin \
|
||||||
|
tdsodbc \
|
||||||
|
openssh \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Configure ODBC in production
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
# Install Infisical CLI
|
# Install Infisical CLI
|
||||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||||
|
@ -72,8 +72,16 @@ RUN addgroup --system --gid 1001 nodejs \
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Required for pkcs11js
|
# Install all required dependencies for build
|
||||||
RUN apk add --no-cache python3 make g++
|
RUN apk --update add \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++ \
|
||||||
|
unixodbc \
|
||||||
|
freetds \
|
||||||
|
unixodbc-dev \
|
||||||
|
libc-dev \
|
||||||
|
freetds-dev
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
@ -88,8 +96,19 @@ FROM base AS backend-runner
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Required for pkcs11js
|
# Install all required dependencies for runtime
|
||||||
RUN apk add --no-cache python3 make g++
|
RUN apk --update add \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++ \
|
||||||
|
unixodbc \
|
||||||
|
freetds \
|
||||||
|
unixodbc-dev \
|
||||||
|
libc-dev \
|
||||||
|
freetds-dev
|
||||||
|
|
||||||
|
# Configure ODBC
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
@ -100,11 +119,33 @@ RUN mkdir frontend-build
|
|||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
FROM base AS production
|
FROM base AS production
|
||||||
|
|
||||||
RUN apk add --upgrade --no-cache ca-certificates
|
RUN apk add --upgrade --no-cache ca-certificates
|
||||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
# Install all required runtime dependencies
|
||||||
|
RUN apk --update add \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++ \
|
||||||
|
unixodbc \
|
||||||
|
freetds \
|
||||||
|
unixodbc-dev \
|
||||||
|
libc-dev \
|
||||||
|
freetds-dev \
|
||||||
|
bash \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
openssh
|
||||||
|
|
||||||
|
# Configure ODBC in production
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
|
# Setup user permissions
|
||||||
RUN addgroup --system --gid 1001 nodejs \
|
RUN addgroup --system --gid 1001 nodejs \
|
||||||
&& adduser --system --uid 1001 non-root-user
|
&& adduser --system --uid 1001 non-root-user
|
||||||
|
|
||||||
@ -127,7 +168,6 @@ ARG CAPTCHA_SITE_KEY
|
|||||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
WORKDIR /
|
|
||||||
|
|
||||||
COPY --from=backend-runner /app /backend
|
COPY --from=backend-runner /app /backend
|
||||||
|
|
||||||
@ -149,4 +189,4 @@ EXPOSE 443
|
|||||||
|
|
||||||
USER non-root-user
|
USER non-root-user
|
||||||
|
|
||||||
CMD ["./standalone-entrypoint.sh"]
|
CMD ["./standalone-entrypoint.sh"]
|
4
Makefile
4
Makefile
@ -10,6 +10,9 @@ up-dev:
|
|||||||
up-dev-ldap:
|
up-dev-ldap:
|
||||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||||
|
|
||||||
|
up-dev-metrics:
|
||||||
|
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||||
|
|
||||||
up-prod:
|
up-prod:
|
||||||
docker-compose -f docker-compose.prod.yml up --build
|
docker-compose -f docker-compose.prod.yml up --build
|
||||||
|
|
||||||
@ -27,4 +30,3 @@ reviewable-api:
|
|||||||
npm run type:check
|
npm run type:check
|
||||||
|
|
||||||
reviewable: reviewable-ui reviewable-api
|
reviewable: reviewable-ui reviewable-api
|
||||||
|
|
||||||
|
11
README.md
11
README.md
@ -14,15 +14,6 @@
|
|||||||
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
|
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
|
|
||||||
<img src=".github/images/deploy-to-aws.png" width="137" />
|
|
||||||
</a>
|
|
||||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
|
|
||||||
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<h4 align="center">
|
<h4 align="center">
|
||||||
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
|
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
|
||||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
|
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
|
||||||
@ -75,7 +66,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
|||||||
|
|
||||||
### Key Management (KMS):
|
### Key Management (KMS):
|
||||||
|
|
||||||
- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||||
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
||||||
|
|
||||||
### General Platform:
|
### General Platform:
|
||||||
|
@ -7,7 +7,17 @@ WORKDIR /app
|
|||||||
RUN apk --update add \
|
RUN apk --update add \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++
|
g++ \
|
||||||
|
openssh
|
||||||
|
|
||||||
|
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
unixodbc \
|
||||||
|
freetds \
|
||||||
|
unixodbc-dev \
|
||||||
|
libc-dev \
|
||||||
|
freetds-dev
|
||||||
|
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
@ -28,6 +38,17 @@ RUN apk --update add \
|
|||||||
make \
|
make \
|
||||||
g++
|
g++
|
||||||
|
|
||||||
|
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
unixodbc \
|
||||||
|
freetds \
|
||||||
|
unixodbc-dev \
|
||||||
|
libc-dev \
|
||||||
|
freetds-dev
|
||||||
|
|
||||||
|
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
RUN npm ci --only-production && npm cache clean --force
|
RUN npm ci --only-production && npm cache clean --force
|
||||||
|
|
||||||
COPY --from=build /app .
|
COPY --from=build /app .
|
||||||
|
@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0
|
|||||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||||
|
|
||||||
# install build dependencies including python3
|
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||||
RUN apk --update add \
|
RUN apk --update add \
|
||||||
alpine-sdk \
|
alpine-sdk \
|
||||||
autoconf \
|
autoconf \
|
||||||
@ -17,9 +17,22 @@ RUN apk --update add \
|
|||||||
openssl-dev \
|
openssl-dev \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++
|
g++ \
|
||||||
|
openssh
|
||||||
|
|
||||||
|
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
unixodbc \
|
||||||
|
freetds \
|
||||||
|
unixodbc-dev \
|
||||||
|
libc-dev \
|
||||||
|
freetds-dev
|
||||||
|
|
||||||
|
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
# build and install SoftHSM2
|
# build and install SoftHSM2
|
||||||
|
|
||||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||||
WORKDIR ${SOFTHSM2_SOURCES}
|
WORKDIR ${SOFTHSM2_SOURCES}
|
||||||
|
|
||||||
|
@ -10,17 +10,22 @@ export const mockQueue = (): TQueueServiceFactory => {
|
|||||||
queue: async (name, jobData) => {
|
queue: async (name, jobData) => {
|
||||||
job[name] = jobData;
|
job[name] = jobData;
|
||||||
},
|
},
|
||||||
|
queuePg: async () => {},
|
||||||
|
initialize: async () => {},
|
||||||
shutdown: async () => undefined,
|
shutdown: async () => undefined,
|
||||||
stopRepeatableJob: async () => true,
|
stopRepeatableJob: async () => true,
|
||||||
start: (name, jobFn) => {
|
start: (name, jobFn) => {
|
||||||
queues[name] = jobFn;
|
queues[name] = jobFn;
|
||||||
workers[name] = jobFn;
|
workers[name] = jobFn;
|
||||||
},
|
},
|
||||||
|
startPg: async () => {},
|
||||||
listen: (name, event) => {
|
listen: (name, event) => {
|
||||||
events[name] = event;
|
events[name] = event;
|
||||||
},
|
},
|
||||||
|
getRepeatableJobs: async () => [],
|
||||||
clearQueue: async () => {},
|
clearQueue: async () => {},
|
||||||
stopJobById: async () => {},
|
stopJobById: async () => {},
|
||||||
stopRepeatableJobByJobId: async () => true
|
stopRepeatableJobByJobId: async () => true,
|
||||||
|
stopRepeatableJobByKey: async () => true
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
|
|||||||
return {
|
return {
|
||||||
sendMail: async (data) => {
|
sendMail: async (data) => {
|
||||||
storage.push(data);
|
storage.push(data);
|
||||||
|
},
|
||||||
|
verify: async () => {
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
86
backend/e2e-test/routes/v3/secret-recursive.spec.ts
Normal file
86
backend/e2e-test/routes/v3/secret-recursive.spec.ts
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||||
|
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||||
|
|
||||||
|
import { seedData1 } from "@app/db/seed-data";
|
||||||
|
|
||||||
|
describe("Secret Recursive Testing", async () => {
|
||||||
|
const projectId = seedData1.projectV3.id;
|
||||||
|
const folderAndSecretNames = [
|
||||||
|
{ name: "deep1", path: "/", expectedSecretCount: 4 },
|
||||||
|
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
|
||||||
|
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
|
||||||
|
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
|
||||||
|
];
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const rootFolderIds: string[] = [];
|
||||||
|
for (const folder of folderAndSecretNames) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const createdFolder = await createFolder({
|
||||||
|
authToken: jwtAuthToken,
|
||||||
|
environmentSlug: "prod",
|
||||||
|
workspaceId: projectId,
|
||||||
|
secretPath: folder.path,
|
||||||
|
name: folder.name
|
||||||
|
});
|
||||||
|
|
||||||
|
if (folder.path === "/") {
|
||||||
|
rootFolderIds.push(createdFolder.id);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await createSecretV2({
|
||||||
|
secretPath: folder.path,
|
||||||
|
authToken: jwtAuthToken,
|
||||||
|
environmentSlug: "prod",
|
||||||
|
workspaceId: projectId,
|
||||||
|
key: folder.name,
|
||||||
|
value: folder.name
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return async () => {
|
||||||
|
await Promise.all(
|
||||||
|
rootFolderIds.map((id) =>
|
||||||
|
deleteFolder({
|
||||||
|
authToken: jwtAuthToken,
|
||||||
|
secretPath: "/",
|
||||||
|
id,
|
||||||
|
workspaceId: projectId,
|
||||||
|
environmentSlug: "prod"
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
await deleteSecretV2({
|
||||||
|
authToken: jwtAuthToken,
|
||||||
|
secretPath: "/",
|
||||||
|
workspaceId: projectId,
|
||||||
|
environmentSlug: "prod",
|
||||||
|
key: folderAndSecretNames[0].name
|
||||||
|
});
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
|
||||||
|
const secrets = await getSecretsV2({
|
||||||
|
authToken: jwtAuthToken,
|
||||||
|
secretPath: path,
|
||||||
|
workspaceId: projectId,
|
||||||
|
environmentSlug: "prod",
|
||||||
|
recursive: true
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(secrets.secrets.length).toEqual(expectedSecretCount);
|
||||||
|
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
|
||||||
|
folderAndSecretNames
|
||||||
|
.filter((el) => el.path.startsWith(path))
|
||||||
|
.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
|
.map((el) =>
|
||||||
|
expect.objectContaining({
|
||||||
|
secretKey: el.name,
|
||||||
|
secretValue: el.name
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
@ -97,6 +97,7 @@ export const getSecretsV2 = async (dto: {
|
|||||||
environmentSlug: string;
|
environmentSlug: string;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
authToken: string;
|
authToken: string;
|
||||||
|
recursive?: boolean;
|
||||||
}) => {
|
}) => {
|
||||||
const getSecretsResponse = await testServer.inject({
|
const getSecretsResponse = await testServer.inject({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
@ -109,7 +110,8 @@ export const getSecretsV2 = async (dto: {
|
|||||||
environment: dto.environmentSlug,
|
environment: dto.environmentSlug,
|
||||||
secretPath: dto.secretPath,
|
secretPath: dto.secretPath,
|
||||||
expandSecretReferences: "true",
|
expandSecretReferences: "true",
|
||||||
include_imports: "true"
|
include_imports: "true",
|
||||||
|
recursive: String(dto.recursive || false)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
expect(getSecretsResponse.statusCode).toBe(200);
|
expect(getSecretsResponse.statusCode).toBe(200);
|
||||||
|
@ -53,13 +53,13 @@ export default {
|
|||||||
extension: "ts"
|
extension: "ts"
|
||||||
});
|
});
|
||||||
const smtp = mockSmtpServer();
|
const smtp = mockSmtpServer();
|
||||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||||
|
|
||||||
const hsmModule = initializeHsmModule();
|
const hsmModule = initializeHsmModule();
|
||||||
hsmModule.initialize();
|
hsmModule.initialize();
|
||||||
|
|
||||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||||
|
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
globalThis.testServer = server;
|
globalThis.testServer = server;
|
||||||
|
1883
backend/package-lock.json
generated
1883
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -50,6 +50,7 @@
|
|||||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||||
|
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||||
@ -58,6 +59,7 @@
|
|||||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||||
|
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||||
@ -130,21 +132,31 @@
|
|||||||
"@fastify/multipart": "8.3.0",
|
"@fastify/multipart": "8.3.0",
|
||||||
"@fastify/passport": "^2.4.0",
|
"@fastify/passport": "^2.4.0",
|
||||||
"@fastify/rate-limit": "^9.0.0",
|
"@fastify/rate-limit": "^9.0.0",
|
||||||
|
"@fastify/request-context": "^5.1.0",
|
||||||
"@fastify/session": "^10.7.0",
|
"@fastify/session": "^10.7.0",
|
||||||
"@fastify/swagger": "^8.14.0",
|
"@fastify/swagger": "^8.14.0",
|
||||||
"@fastify/swagger-ui": "^2.1.0",
|
"@fastify/swagger-ui": "^2.1.0",
|
||||||
|
"@google-cloud/kms": "^4.5.0",
|
||||||
"@node-saml/passport-saml": "^4.0.4",
|
"@node-saml/passport-saml": "^4.0.4",
|
||||||
"@octokit/auth-app": "^7.1.1",
|
"@octokit/auth-app": "^7.1.1",
|
||||||
"@octokit/plugin-retry": "^5.0.5",
|
"@octokit/plugin-retry": "^5.0.5",
|
||||||
"@octokit/rest": "^20.0.2",
|
"@octokit/rest": "^20.0.2",
|
||||||
"@octokit/webhooks-types": "^7.3.1",
|
"@octokit/webhooks-types": "^7.3.1",
|
||||||
|
"@octopusdeploy/api-client": "^3.4.1",
|
||||||
|
"@opentelemetry/api": "^1.9.0",
|
||||||
|
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||||
|
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||||
|
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||||
|
"@opentelemetry/instrumentation": "^0.55.0",
|
||||||
|
"@opentelemetry/resources": "^1.28.0",
|
||||||
|
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||||
|
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||||
"@peculiar/asn1-schema": "^2.3.8",
|
"@peculiar/asn1-schema": "^2.3.8",
|
||||||
"@peculiar/x509": "^1.12.1",
|
"@peculiar/x509": "^1.12.1",
|
||||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||||
"@sindresorhus/slugify": "1.1.0",
|
"@sindresorhus/slugify": "1.1.0",
|
||||||
"@slack/oauth": "^3.0.1",
|
"@slack/oauth": "^3.0.1",
|
||||||
"@slack/web-api": "^7.3.4",
|
"@slack/web-api": "^7.3.4",
|
||||||
"@team-plain/typescript-sdk": "^4.6.1",
|
|
||||||
"@ucast/mongo2js": "^1.3.4",
|
"@ucast/mongo2js": "^1.3.4",
|
||||||
"ajv": "^8.12.0",
|
"ajv": "^8.12.0",
|
||||||
"argon2": "^0.31.2",
|
"argon2": "^0.31.2",
|
||||||
@ -178,14 +190,17 @@
|
|||||||
"mysql2": "^3.9.8",
|
"mysql2": "^3.9.8",
|
||||||
"nanoid": "^3.3.4",
|
"nanoid": "^3.3.4",
|
||||||
"nodemailer": "^6.9.9",
|
"nodemailer": "^6.9.9",
|
||||||
|
"odbc": "^2.4.9",
|
||||||
"openid-client": "^5.6.5",
|
"openid-client": "^5.6.5",
|
||||||
"ora": "^7.0.1",
|
"ora": "^7.0.1",
|
||||||
"oracledb": "^6.4.0",
|
"oracledb": "^6.4.0",
|
||||||
|
"otplib": "^12.0.1",
|
||||||
"passport-github": "^1.1.0",
|
"passport-github": "^1.1.0",
|
||||||
"passport-gitlab2": "^5.0.0",
|
"passport-gitlab2": "^5.0.0",
|
||||||
"passport-google-oauth20": "^2.0.0",
|
"passport-google-oauth20": "^2.0.0",
|
||||||
"passport-ldapauth": "^3.0.1",
|
"passport-ldapauth": "^3.0.1",
|
||||||
"pg": "^8.11.3",
|
"pg": "^8.11.3",
|
||||||
|
"pg-boss": "^10.1.5",
|
||||||
"pg-query-stream": "^4.5.3",
|
"pg-query-stream": "^4.5.3",
|
||||||
"picomatch": "^3.0.1",
|
"picomatch": "^3.0.1",
|
||||||
"pino": "^8.16.2",
|
"pino": "^8.16.2",
|
||||||
|
@ -8,61 +8,80 @@ const prompt = promptSync({
|
|||||||
sigint: true
|
sigint: true
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const sanitizeInputParam = (value: string) => {
|
||||||
|
// Escape double quotes and wrap the entire value in double quotes
|
||||||
|
if (value) {
|
||||||
|
return `"${value.replace(/"/g, '\\"')}"`;
|
||||||
|
}
|
||||||
|
return '""';
|
||||||
|
};
|
||||||
|
|
||||||
const exportDb = () => {
|
const exportDb = () => {
|
||||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
const exportPort = sanitizeInputParam(
|
||||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
);
|
||||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
const exportUser = sanitizeInputParam(
|
||||||
|
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||||
|
);
|
||||||
|
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||||
|
const exportDatabase = sanitizeInputParam(
|
||||||
|
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||||
|
);
|
||||||
|
|
||||||
// we do not include the audit_log and secret_sharing entries
|
// we do not include the audit_log and secret_sharing entries
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||||
__dirname,
|
__dirname,
|
||||||
"../src/db/dump.sql"
|
"../src/db/backup.dump"
|
||||||
)}`,
|
)}`,
|
||||||
{ stdio: "inherit" }
|
{ stdio: "inherit" }
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const importDbForOrg = () => {
|
const importDbForOrg = () => {
|
||||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
const importUser = sanitizeInputParam(
|
||||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
);
|
||||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||||
|
const importDatabase = sanitizeInputParam(
|
||||||
|
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||||
|
);
|
||||||
|
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||||
|
|
||||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||||
console.log("File not found, please export the database first.");
|
console.log("File not found, please export the database first.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||||
__dirname,
|
__dirname,
|
||||||
"../src/db/dump.sql"
|
"../src/db/backup.dump"
|
||||||
)}`
|
)}`,
|
||||||
|
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||||
);
|
);
|
||||||
|
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||||
);
|
);
|
||||||
|
|
||||||
// delete global/instance-level resources not relevant to the organization to migrate
|
// delete global/instance-level resources not relevant to the organization to migrate
|
||||||
// users
|
// users
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||||
);
|
);
|
||||||
|
|
||||||
// identities
|
// identities
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||||
);
|
);
|
||||||
|
|
||||||
// reset slack configuration in superAdmin
|
// reset slack configuration in superAdmin
|
||||||
execSync(
|
execSync(
|
||||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log("Organization migrated successfully.");
|
console.log("Organization migrated successfully.");
|
||||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import "@fastify/request-context";
|
||||||
|
|
||||||
|
declare module "@fastify/request-context" {
|
||||||
|
interface RequestContextData {
|
||||||
|
reqId: string;
|
||||||
|
}
|
||||||
|
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
|||||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||||
import { Logger } from "pino";
|
|
||||||
|
|
||||||
|
import { CustomLogger } from "@app/lib/logger/logger";
|
||||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
@ -8,7 +8,7 @@ declare global {
|
|||||||
RawServerDefault,
|
RawServerDefault,
|
||||||
RawRequestDefaultExpression<RawServerDefault>,
|
RawRequestDefaultExpression<RawServerDefault>,
|
||||||
RawReplyDefaultExpression<RawServerDefault>,
|
RawReplyDefaultExpression<RawServerDefault>,
|
||||||
Readonly<Logger>,
|
Readonly<CustomLogger>,
|
||||||
ZodTypeProvider
|
ZodTypeProvider
|
||||||
>;
|
>;
|
||||||
|
|
||||||
|
17
backend/src/@types/fastify.d.ts
vendored
17
backend/src/@types/fastify.d.ts
vendored
@ -1,5 +1,7 @@
|
|||||||
import "fastify";
|
import "fastify";
|
||||||
|
|
||||||
|
import { Redis } from "ioredis";
|
||||||
|
|
||||||
import { TUsers } from "@app/db/schemas";
|
import { TUsers } from "@app/db/schemas";
|
||||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||||
@ -29,9 +31,12 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
|
|||||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||||
|
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||||
|
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||||
|
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||||
@ -50,6 +55,7 @@ import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-acces
|
|||||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||||
|
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
|
||||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||||
@ -79,6 +85,7 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
|
|||||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||||
|
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||||
@ -86,6 +93,10 @@ import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
|||||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||||
|
|
||||||
declare module "fastify" {
|
declare module "fastify" {
|
||||||
|
interface Session {
|
||||||
|
callbackPort: string;
|
||||||
|
}
|
||||||
|
|
||||||
interface FastifyRequest {
|
interface FastifyRequest {
|
||||||
realIp: string;
|
realIp: string;
|
||||||
// used for mfa session authentication
|
// used for mfa session authentication
|
||||||
@ -114,6 +125,7 @@ declare module "fastify" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface FastifyInstance {
|
interface FastifyInstance {
|
||||||
|
redis: Redis;
|
||||||
services: {
|
services: {
|
||||||
login: TAuthLoginFactory;
|
login: TAuthLoginFactory;
|
||||||
password: TAuthPasswordFactory;
|
password: TAuthPasswordFactory;
|
||||||
@ -154,6 +166,7 @@ declare module "fastify" {
|
|||||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||||
|
identityJwtAuth: TIdentityJwtAuthServiceFactory;
|
||||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||||
@ -167,6 +180,8 @@ declare module "fastify" {
|
|||||||
auditLogStream: TAuditLogStreamServiceFactory;
|
auditLogStream: TAuditLogStreamServiceFactory;
|
||||||
certificate: TCertificateServiceFactory;
|
certificate: TCertificateServiceFactory;
|
||||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||||
|
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
|
||||||
|
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
|
||||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||||
certificateEst: TCertificateEstServiceFactory;
|
certificateEst: TCertificateEstServiceFactory;
|
||||||
@ -193,6 +208,8 @@ declare module "fastify" {
|
|||||||
migration: TExternalMigrationServiceFactory;
|
migration: TExternalMigrationServiceFactory;
|
||||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||||
projectTemplate: TProjectTemplateServiceFactory;
|
projectTemplate: TProjectTemplateServiceFactory;
|
||||||
|
totp: TTotpServiceFactory;
|
||||||
|
appConnection: TAppConnectionServiceFactory;
|
||||||
};
|
};
|
||||||
// this is exclusive use for middlewares in which we need to inject data
|
// this is exclusive use for middlewares in which we need to inject data
|
||||||
// everywhere else access using service layer
|
// everywhere else access using service layer
|
||||||
|
66
backend/src/@types/knex.d.ts
vendored
66
backend/src/@types/knex.d.ts
vendored
@ -98,6 +98,9 @@ import {
|
|||||||
TIdentityGcpAuths,
|
TIdentityGcpAuths,
|
||||||
TIdentityGcpAuthsInsert,
|
TIdentityGcpAuthsInsert,
|
||||||
TIdentityGcpAuthsUpdate,
|
TIdentityGcpAuthsUpdate,
|
||||||
|
TIdentityJwtAuths,
|
||||||
|
TIdentityJwtAuthsInsert,
|
||||||
|
TIdentityJwtAuthsUpdate,
|
||||||
TIdentityKubernetesAuths,
|
TIdentityKubernetesAuths,
|
||||||
TIdentityKubernetesAuthsInsert,
|
TIdentityKubernetesAuthsInsert,
|
||||||
TIdentityKubernetesAuthsUpdate,
|
TIdentityKubernetesAuthsUpdate,
|
||||||
@ -199,6 +202,9 @@ import {
|
|||||||
TProjectSlackConfigs,
|
TProjectSlackConfigs,
|
||||||
TProjectSlackConfigsInsert,
|
TProjectSlackConfigsInsert,
|
||||||
TProjectSlackConfigsUpdate,
|
TProjectSlackConfigsUpdate,
|
||||||
|
TProjectSplitBackfillIds,
|
||||||
|
TProjectSplitBackfillIdsInsert,
|
||||||
|
TProjectSplitBackfillIdsUpdate,
|
||||||
TProjectsUpdate,
|
TProjectsUpdate,
|
||||||
TProjectTemplates,
|
TProjectTemplates,
|
||||||
TProjectTemplatesInsert,
|
TProjectTemplatesInsert,
|
||||||
@ -311,9 +317,27 @@ import {
|
|||||||
TSlackIntegrations,
|
TSlackIntegrations,
|
||||||
TSlackIntegrationsInsert,
|
TSlackIntegrationsInsert,
|
||||||
TSlackIntegrationsUpdate,
|
TSlackIntegrationsUpdate,
|
||||||
|
TSshCertificateAuthorities,
|
||||||
|
TSshCertificateAuthoritiesInsert,
|
||||||
|
TSshCertificateAuthoritiesUpdate,
|
||||||
|
TSshCertificateAuthoritySecrets,
|
||||||
|
TSshCertificateAuthoritySecretsInsert,
|
||||||
|
TSshCertificateAuthoritySecretsUpdate,
|
||||||
|
TSshCertificateBodies,
|
||||||
|
TSshCertificateBodiesInsert,
|
||||||
|
TSshCertificateBodiesUpdate,
|
||||||
|
TSshCertificates,
|
||||||
|
TSshCertificatesInsert,
|
||||||
|
TSshCertificatesUpdate,
|
||||||
|
TSshCertificateTemplates,
|
||||||
|
TSshCertificateTemplatesInsert,
|
||||||
|
TSshCertificateTemplatesUpdate,
|
||||||
TSuperAdmin,
|
TSuperAdmin,
|
||||||
TSuperAdminInsert,
|
TSuperAdminInsert,
|
||||||
TSuperAdminUpdate,
|
TSuperAdminUpdate,
|
||||||
|
TTotpConfigs,
|
||||||
|
TTotpConfigsInsert,
|
||||||
|
TTotpConfigsUpdate,
|
||||||
TTrustedIps,
|
TTrustedIps,
|
||||||
TTrustedIpsInsert,
|
TTrustedIpsInsert,
|
||||||
TTrustedIpsUpdate,
|
TTrustedIpsUpdate,
|
||||||
@ -339,6 +363,7 @@ import {
|
|||||||
TWorkflowIntegrationsInsert,
|
TWorkflowIntegrationsInsert,
|
||||||
TWorkflowIntegrationsUpdate
|
TWorkflowIntegrationsUpdate
|
||||||
} from "@app/db/schemas";
|
} from "@app/db/schemas";
|
||||||
|
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
||||||
import {
|
import {
|
||||||
TExternalGroupOrgRoleMappings,
|
TExternalGroupOrgRoleMappings,
|
||||||
TExternalGroupOrgRoleMappingsInsert,
|
TExternalGroupOrgRoleMappingsInsert,
|
||||||
@ -369,6 +394,31 @@ declare module "knex/types/tables" {
|
|||||||
interface Tables {
|
interface Tables {
|
||||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||||
|
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||||
|
TSshCertificateAuthorities,
|
||||||
|
TSshCertificateAuthoritiesInsert,
|
||||||
|
TSshCertificateAuthoritiesUpdate
|
||||||
|
>;
|
||||||
|
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||||
|
TSshCertificateAuthoritySecrets,
|
||||||
|
TSshCertificateAuthoritySecretsInsert,
|
||||||
|
TSshCertificateAuthoritySecretsUpdate
|
||||||
|
>;
|
||||||
|
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||||
|
TSshCertificateTemplates,
|
||||||
|
TSshCertificateTemplatesInsert,
|
||||||
|
TSshCertificateTemplatesUpdate
|
||||||
|
>;
|
||||||
|
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
|
||||||
|
TSshCertificates,
|
||||||
|
TSshCertificatesInsert,
|
||||||
|
TSshCertificatesUpdate
|
||||||
|
>;
|
||||||
|
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
|
||||||
|
TSshCertificateBodies,
|
||||||
|
TSshCertificateBodiesInsert,
|
||||||
|
TSshCertificateBodiesUpdate
|
||||||
|
>;
|
||||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||||
TCertificateAuthorities,
|
TCertificateAuthorities,
|
||||||
TCertificateAuthoritiesInsert,
|
TCertificateAuthoritiesInsert,
|
||||||
@ -587,6 +637,11 @@ declare module "knex/types/tables" {
|
|||||||
TIdentityOidcAuthsInsert,
|
TIdentityOidcAuthsInsert,
|
||||||
TIdentityOidcAuthsUpdate
|
TIdentityOidcAuthsUpdate
|
||||||
>;
|
>;
|
||||||
|
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
|
||||||
|
TIdentityJwtAuths,
|
||||||
|
TIdentityJwtAuthsInsert,
|
||||||
|
TIdentityJwtAuthsUpdate
|
||||||
|
>;
|
||||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||||
TIdentityUaClientSecrets,
|
TIdentityUaClientSecrets,
|
||||||
TIdentityUaClientSecretsInsert,
|
TIdentityUaClientSecretsInsert,
|
||||||
@ -826,5 +881,16 @@ declare module "knex/types/tables" {
|
|||||||
TProjectTemplatesInsert,
|
TProjectTemplatesInsert,
|
||||||
TProjectTemplatesUpdate
|
TProjectTemplatesUpdate
|
||||||
>;
|
>;
|
||||||
|
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||||
|
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
|
||||||
|
TProjectSplitBackfillIds,
|
||||||
|
TProjectSplitBackfillIdsInsert,
|
||||||
|
TProjectSplitBackfillIdsUpdate
|
||||||
|
>;
|
||||||
|
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
|
||||||
|
TAppConnections,
|
||||||
|
TAppConnectionsInsert,
|
||||||
|
TAppConnectionsUpdate
|
||||||
|
>;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -64,23 +64,25 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
|
||||||
t.uuid("caCertId").nullable();
|
if (!hasCaCertIdColumn) {
|
||||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||||
});
|
t.uuid("caCertId").nullable();
|
||||||
|
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||||
|
});
|
||||||
|
|
||||||
await knex.raw(`
|
await knex.raw(`
|
||||||
UPDATE "${TableName.Certificate}" cert
|
UPDATE "${TableName.Certificate}" cert
|
||||||
SET "caCertId" = (
|
SET "caCertId" = (
|
||||||
SELECT caCert.id
|
SELECT caCert.id
|
||||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||||
WHERE caCert."caId" = cert."caId"
|
WHERE caCert."caId" = cert."caId"
|
||||||
)
|
)`);
|
||||||
`);
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||||
t.uuid("caCertId").notNullable().alter();
|
t.uuid("caCertId").notNullable().alter();
|
||||||
});
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ import { Knex } from "knex";
|
|||||||
|
|
||||||
import { TableName } from "../schemas";
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
const BATCH_SIZE = 30_000;
|
const BATCH_SIZE = 10_000;
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
export async function up(knex: Knex): Promise<void> {
|
||||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||||
@ -12,7 +12,18 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
t.string("authMethod").nullable();
|
t.string("authMethod").nullable();
|
||||||
});
|
});
|
||||||
|
|
||||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
// first we remove identities without auth method that is unused
|
||||||
|
// ! We delete all access tokens where the identity has no auth method set!
|
||||||
|
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||||
|
await knex(TableName.IdentityAccessToken)
|
||||||
|
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||||
|
.whereNull(`${TableName.Identity}.authMethod`)
|
||||||
|
.delete();
|
||||||
|
|
||||||
|
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||||
|
.whereNull("authMethod")
|
||||||
|
.limit(BATCH_SIZE)
|
||||||
|
.select("id");
|
||||||
let totalUpdated = 0;
|
let totalUpdated = 0;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
@ -33,24 +44,15 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||||
|
.whereNull("authMethod")
|
||||||
|
.limit(BATCH_SIZE)
|
||||||
|
.select("id");
|
||||||
|
|
||||||
totalUpdated += batchIds.length;
|
totalUpdated += batchIds.length;
|
||||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||||
} while (nullableAccessTokens.length > 0);
|
} while (nullableAccessTokens.length > 0);
|
||||||
|
|
||||||
// ! We delete all access tokens where the identity has no auth method set!
|
|
||||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
|
||||||
await knex(TableName.IdentityAccessToken)
|
|
||||||
.whereNotExists((queryBuilder) => {
|
|
||||||
void queryBuilder
|
|
||||||
.select("id")
|
|
||||||
.from(TableName.Identity)
|
|
||||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
|
||||||
.whereNotNull("authMethod");
|
|
||||||
})
|
|
||||||
.delete();
|
|
||||||
|
|
||||||
// Finally we set the authMethod to notNullable after populating the column.
|
// Finally we set the authMethod to notNullable after populating the column.
|
||||||
// This will fail if the data is not populated correctly, so it's safe.
|
// This will fail if the data is not populated correctly, so it's safe.
|
||||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||||
|
@ -0,0 +1,21 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
t.dropForeign("orgId");
|
||||||
|
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
t.dropForeign("orgId");
|
||||||
|
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
|
||||||
|
await knex.schema.createTable(TableName.TotpConfig, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.uuid("userId").notNullable();
|
||||||
|
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||||
|
t.boolean("isVerified").defaultTo(false).notNullable();
|
||||||
|
t.binary("encryptedRecoveryCodes").notNullable();
|
||||||
|
t.binary("encryptedSecret").notNullable();
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
t.unique("userId");
|
||||||
|
});
|
||||||
|
|
||||||
|
await createOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||||
|
}
|
||||||
|
|
||||||
|
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||||
|
if (!doesOrgMfaMethodColExist) {
|
||||||
|
t.string("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||||
|
if (!doesUserSelectedMfaMethodColExist) {
|
||||||
|
t.string("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||||
|
await knex.schema.dropTableIfExists(TableName.TotpConfig);
|
||||||
|
|
||||||
|
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||||
|
if (doesOrgMfaMethodColExist) {
|
||||||
|
t.dropColumn("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||||
|
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||||
|
if (doesUserSelectedMfaMethodColExist) {
|
||||||
|
t.dropColumn("selectedMfaMethod");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||||
|
|
||||||
|
if (!hasProjectDescription) {
|
||||||
|
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||||
|
t.string("description");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||||
|
|
||||||
|
if (hasProjectDescription) {
|
||||||
|
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||||
|
t.dropColumn("description");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||||
|
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||||
|
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||||
|
t.string("value", 1020).notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||||
|
t.string("value", 1020).alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,59 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.AccessApprovalPolicy,
|
||||||
|
"deletedAt"
|
||||||
|
);
|
||||||
|
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.SecretApprovalPolicy,
|
||||||
|
"deletedAt"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasAccessApprovalPolicyDeletedAtColumn) {
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||||
|
t.timestamp("deletedAt");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (!hasSecretApprovalPolicyDeletedAtColumn) {
|
||||||
|
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||||
|
t.timestamp("deletedAt");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||||
|
t.dropForeign(["privilegeId"]);
|
||||||
|
|
||||||
|
// Add the new foreign key constraint with ON DELETE SET NULL
|
||||||
|
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.AccessApprovalPolicy,
|
||||||
|
"deletedAt"
|
||||||
|
);
|
||||||
|
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.SecretApprovalPolicy,
|
||||||
|
"deletedAt"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (hasAccessApprovalPolicyDeletedAtColumn) {
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||||
|
t.dropColumn("deletedAt");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (hasSecretApprovalPolicyDeletedAtColumn) {
|
||||||
|
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||||
|
t.dropColumn("deletedAt");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||||
|
t.dropForeign(["privilegeId"]);
|
||||||
|
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
|
||||||
|
});
|
||||||
|
}
|
@ -0,0 +1,34 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasTable(TableName.IdentityJwtAuth))) {
|
||||||
|
await knex.schema.createTable(TableName.IdentityJwtAuth, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||||
|
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||||
|
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||||
|
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||||
|
t.uuid("identityId").notNullable().unique();
|
||||||
|
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||||
|
t.string("configurationType").notNullable();
|
||||||
|
t.string("jwksUrl").notNullable();
|
||||||
|
t.binary("encryptedJwksCaCert").notNullable();
|
||||||
|
t.binary("encryptedPublicKeys").notNullable();
|
||||||
|
t.string("boundIssuer").notNullable();
|
||||||
|
t.string("boundAudiences").notNullable();
|
||||||
|
t.jsonb("boundClaims").notNullable();
|
||||||
|
t.string("boundSubject").notNullable();
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
await createOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await knex.schema.dropTableIfExists(TableName.IdentityJwtAuth);
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
|
||||||
|
}
|
@ -0,0 +1,19 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
|
||||||
|
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||||
|
t.index("folderId");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
|
||||||
|
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||||
|
t.dropIndex("folderId");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,297 @@
|
|||||||
|
import slugify from "@sindresorhus/slugify";
|
||||||
|
import { Knex } from "knex";
|
||||||
|
import { v4 as uuidV4 } from "uuid";
|
||||||
|
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { ProjectType, TableName } from "../schemas";
|
||||||
|
|
||||||
|
/* eslint-disable no-await-in-loop,@typescript-eslint/ban-ts-comment */
|
||||||
|
const newProject = async (knex: Knex, projectId: string, projectType: ProjectType) => {
|
||||||
|
const newProjectId = uuidV4();
|
||||||
|
const project = await knex(TableName.Project).where("id", projectId).first();
|
||||||
|
await knex(TableName.Project).insert({
|
||||||
|
...project,
|
||||||
|
type: projectType,
|
||||||
|
// @ts-ignore id is required
|
||||||
|
id: newProjectId,
|
||||||
|
slug: slugify(`${project?.name}-${alphaNumericNanoId(4)}`)
|
||||||
|
});
|
||||||
|
|
||||||
|
const customRoleMapping: Record<string, string> = {};
|
||||||
|
const projectCustomRoles = await knex(TableName.ProjectRoles).where("projectId", projectId);
|
||||||
|
if (projectCustomRoles.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.ProjectRoles,
|
||||||
|
projectCustomRoles.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
customRoleMapping[el.id] = id;
|
||||||
|
return {
|
||||||
|
...el,
|
||||||
|
id,
|
||||||
|
projectId: newProjectId,
|
||||||
|
permissions: el.permissions ? JSON.stringify(el.permissions) : el.permissions
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const groupMembershipMapping: Record<string, string> = {};
|
||||||
|
const groupMemberships = await knex(TableName.GroupProjectMembership).where("projectId", projectId);
|
||||||
|
if (groupMemberships.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.GroupProjectMembership,
|
||||||
|
groupMemberships.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
groupMembershipMapping[el.id] = id;
|
||||||
|
return { ...el, id, projectId: newProjectId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const groupMembershipRoles = await knex(TableName.GroupProjectMembershipRole).whereIn(
|
||||||
|
"projectMembershipId",
|
||||||
|
groupMemberships.map((el) => el.id)
|
||||||
|
);
|
||||||
|
if (groupMembershipRoles.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.GroupProjectMembershipRole,
|
||||||
|
groupMembershipRoles.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
const projectMembershipId = groupMembershipMapping[el.projectMembershipId];
|
||||||
|
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
||||||
|
return { ...el, id, projectMembershipId, customRoleId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const identityProjectMembershipMapping: Record<string, string> = {};
|
||||||
|
const identities = await knex(TableName.IdentityProjectMembership).where("projectId", projectId);
|
||||||
|
if (identities.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.IdentityProjectMembership,
|
||||||
|
identities.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
identityProjectMembershipMapping[el.id] = id;
|
||||||
|
return { ...el, id, projectId: newProjectId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const identitiesRoles = await knex(TableName.IdentityProjectMembershipRole).whereIn(
|
||||||
|
"projectMembershipId",
|
||||||
|
identities.map((el) => el.id)
|
||||||
|
);
|
||||||
|
if (identitiesRoles.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.IdentityProjectMembershipRole,
|
||||||
|
identitiesRoles.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
const projectMembershipId = identityProjectMembershipMapping[el.projectMembershipId];
|
||||||
|
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
||||||
|
return { ...el, id, projectMembershipId, customRoleId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectMembershipMapping: Record<string, string> = {};
|
||||||
|
const projectUserMembers = await knex(TableName.ProjectMembership).where("projectId", projectId);
|
||||||
|
if (projectUserMembers.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.ProjectMembership,
|
||||||
|
projectUserMembers.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
projectMembershipMapping[el.id] = id;
|
||||||
|
return { ...el, id, projectId: newProjectId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const membershipRoles = await knex(TableName.ProjectUserMembershipRole).whereIn(
|
||||||
|
"projectMembershipId",
|
||||||
|
projectUserMembers.map((el) => el.id)
|
||||||
|
);
|
||||||
|
if (membershipRoles.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.ProjectUserMembershipRole,
|
||||||
|
membershipRoles.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
const projectMembershipId = projectMembershipMapping[el.projectMembershipId];
|
||||||
|
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
||||||
|
return { ...el, id, projectMembershipId, customRoleId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const kmsKeys = await knex(TableName.KmsKey).where("projectId", projectId).andWhere("isReserved", true);
|
||||||
|
if (kmsKeys.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.KmsKey,
|
||||||
|
kmsKeys.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
const slug = slugify(alphaNumericNanoId(8).toLowerCase());
|
||||||
|
return { ...el, id, slug, projectId: newProjectId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectBot = await knex(TableName.ProjectBot).where("projectId", projectId).first();
|
||||||
|
if (projectBot) {
|
||||||
|
const newProjectBot = { ...projectBot, id: uuidV4(), projectId: newProjectId };
|
||||||
|
await knex(TableName.ProjectBot).insert(newProjectBot);
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectKeys = await knex(TableName.ProjectKeys).where("projectId", projectId);
|
||||||
|
if (projectKeys.length) {
|
||||||
|
await knex.batchInsert(
|
||||||
|
TableName.ProjectKeys,
|
||||||
|
projectKeys.map((el) => {
|
||||||
|
const id = uuidV4();
|
||||||
|
return { ...el, id, projectId: newProjectId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return newProjectId;
|
||||||
|
};
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
|
||||||
|
if (!hasSplitMappingTable) {
|
||||||
|
await knex.schema.createTable(TableName.ProjectSplitBackfillIds, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.string("sourceProjectId", 36).notNullable();
|
||||||
|
t.foreign("sourceProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||||
|
t.string("destinationProjectType").notNullable();
|
||||||
|
t.string("destinationProjectId", 36).notNullable();
|
||||||
|
t.foreign("destinationProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
|
||||||
|
if (!hasTypeColumn) {
|
||||||
|
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||||
|
t.string("type");
|
||||||
|
});
|
||||||
|
|
||||||
|
let projectsToBeTyped;
|
||||||
|
do {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
projectsToBeTyped = await knex(TableName.Project).whereNull("type").limit(BATCH_SIZE).select("id");
|
||||||
|
if (projectsToBeTyped.length) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.Project)
|
||||||
|
.whereIn(
|
||||||
|
"id",
|
||||||
|
projectsToBeTyped.map((el) => el.id)
|
||||||
|
)
|
||||||
|
.update({ type: ProjectType.SecretManager });
|
||||||
|
}
|
||||||
|
} while (projectsToBeTyped.length > 0);
|
||||||
|
|
||||||
|
const projectsWithCertificates = await knex(TableName.CertificateAuthority)
|
||||||
|
.distinct("projectId")
|
||||||
|
.select("projectId");
|
||||||
|
/* eslint-disable no-await-in-loop,no-param-reassign */
|
||||||
|
for (const { projectId } of projectsWithCertificates) {
|
||||||
|
const newProjectId = await newProject(knex, projectId, ProjectType.CertificateManager);
|
||||||
|
await knex(TableName.CertificateAuthority).where("projectId", projectId).update({ projectId: newProjectId });
|
||||||
|
await knex(TableName.PkiAlert).where("projectId", projectId).update({ projectId: newProjectId });
|
||||||
|
await knex(TableName.PkiCollection).where("projectId", projectId).update({ projectId: newProjectId });
|
||||||
|
await knex(TableName.ProjectSplitBackfillIds).insert({
|
||||||
|
sourceProjectId: projectId,
|
||||||
|
destinationProjectType: ProjectType.CertificateManager,
|
||||||
|
destinationProjectId: newProjectId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectsWithCmek = await knex(TableName.KmsKey)
|
||||||
|
.where("isReserved", false)
|
||||||
|
.whereNotNull("projectId")
|
||||||
|
.distinct("projectId")
|
||||||
|
.select("projectId");
|
||||||
|
for (const { projectId } of projectsWithCmek) {
|
||||||
|
if (projectId) {
|
||||||
|
const newProjectId = await newProject(knex, projectId, ProjectType.KMS);
|
||||||
|
await knex(TableName.KmsKey)
|
||||||
|
.where({
|
||||||
|
isReserved: false,
|
||||||
|
projectId
|
||||||
|
})
|
||||||
|
.update({ projectId: newProjectId });
|
||||||
|
await knex(TableName.ProjectSplitBackfillIds).insert({
|
||||||
|
sourceProjectId: projectId,
|
||||||
|
destinationProjectType: ProjectType.KMS,
|
||||||
|
destinationProjectId: newProjectId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* eslint-enable */
|
||||||
|
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||||
|
t.string("type").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
|
||||||
|
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
|
||||||
|
|
||||||
|
if (hasTypeColumn && hasSplitMappingTable) {
|
||||||
|
const splitProjectMappings = await knex(TableName.ProjectSplitBackfillIds).where({});
|
||||||
|
const certMapping = splitProjectMappings.filter(
|
||||||
|
(el) => el.destinationProjectType === ProjectType.CertificateManager
|
||||||
|
);
|
||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
for (const project of certMapping) {
|
||||||
|
await knex(TableName.CertificateAuthority)
|
||||||
|
.where("projectId", project.destinationProjectId)
|
||||||
|
.update({ projectId: project.sourceProjectId });
|
||||||
|
await knex(TableName.PkiAlert)
|
||||||
|
.where("projectId", project.destinationProjectId)
|
||||||
|
.update({ projectId: project.sourceProjectId });
|
||||||
|
await knex(TableName.PkiCollection)
|
||||||
|
.where("projectId", project.destinationProjectId)
|
||||||
|
.update({ projectId: project.sourceProjectId });
|
||||||
|
}
|
||||||
|
|
||||||
|
/* eslint-enable */
|
||||||
|
const kmsMapping = splitProjectMappings.filter((el) => el.destinationProjectType === ProjectType.KMS);
|
||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
for (const project of kmsMapping) {
|
||||||
|
await knex(TableName.KmsKey)
|
||||||
|
.where({
|
||||||
|
isReserved: false,
|
||||||
|
projectId: project.destinationProjectId
|
||||||
|
})
|
||||||
|
.update({ projectId: project.sourceProjectId });
|
||||||
|
}
|
||||||
|
/* eslint-enable */
|
||||||
|
await knex(TableName.ProjectMembership)
|
||||||
|
.whereIn(
|
||||||
|
"projectId",
|
||||||
|
splitProjectMappings.map((el) => el.destinationProjectId)
|
||||||
|
)
|
||||||
|
.delete();
|
||||||
|
await knex(TableName.ProjectRoles)
|
||||||
|
.whereIn(
|
||||||
|
"projectId",
|
||||||
|
splitProjectMappings.map((el) => el.destinationProjectId)
|
||||||
|
)
|
||||||
|
.delete();
|
||||||
|
await knex(TableName.Project)
|
||||||
|
.whereIn(
|
||||||
|
"id",
|
||||||
|
splitProjectMappings.map((el) => el.destinationProjectId)
|
||||||
|
)
|
||||||
|
.delete();
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||||
|
t.dropColumn("type");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasSplitMappingTable) {
|
||||||
|
await knex.schema.dropTableIfExists(TableName.ProjectSplitBackfillIds);
|
||||||
|
}
|
||||||
|
}
|
99
backend/src/db/migrations/20241216013357_ssh-mgmt.ts
Normal file
99
backend/src/db/migrations/20241216013357_ssh-mgmt.ts
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthority))) {
|
||||||
|
await knex.schema.createTable(TableName.SshCertificateAuthority, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
t.string("projectId").notNullable();
|
||||||
|
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||||
|
t.string("status").notNullable(); // active / disabled
|
||||||
|
t.string("friendlyName").notNullable();
|
||||||
|
t.string("keyAlgorithm").notNullable();
|
||||||
|
});
|
||||||
|
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthoritySecret))) {
|
||||||
|
await knex.schema.createTable(TableName.SshCertificateAuthoritySecret, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
t.uuid("sshCaId").notNullable().unique();
|
||||||
|
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
||||||
|
t.binary("encryptedPrivateKey").notNullable();
|
||||||
|
});
|
||||||
|
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(await knex.schema.hasTable(TableName.SshCertificateTemplate))) {
|
||||||
|
await knex.schema.createTable(TableName.SshCertificateTemplate, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
t.uuid("sshCaId").notNullable();
|
||||||
|
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
||||||
|
t.string("status").notNullable(); // active / disabled
|
||||||
|
t.string("name").notNullable();
|
||||||
|
t.string("ttl").notNullable();
|
||||||
|
t.string("maxTTL").notNullable();
|
||||||
|
t.specificType("allowedUsers", "text[]").notNullable();
|
||||||
|
t.specificType("allowedHosts", "text[]").notNullable();
|
||||||
|
t.boolean("allowUserCertificates").notNullable();
|
||||||
|
t.boolean("allowHostCertificates").notNullable();
|
||||||
|
t.boolean("allowCustomKeyIds").notNullable();
|
||||||
|
});
|
||||||
|
await createOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(await knex.schema.hasTable(TableName.SshCertificate))) {
|
||||||
|
await knex.schema.createTable(TableName.SshCertificate, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
t.uuid("sshCaId").notNullable();
|
||||||
|
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
|
||||||
|
t.uuid("sshCertificateTemplateId");
|
||||||
|
t.foreign("sshCertificateTemplateId")
|
||||||
|
.references("id")
|
||||||
|
.inTable(TableName.SshCertificateTemplate)
|
||||||
|
.onDelete("SET NULL");
|
||||||
|
t.string("serialNumber").notNullable().unique();
|
||||||
|
t.string("certType").notNullable(); // user or host
|
||||||
|
t.specificType("principals", "text[]").notNullable();
|
||||||
|
t.string("keyId").notNullable();
|
||||||
|
t.datetime("notBefore").notNullable();
|
||||||
|
t.datetime("notAfter").notNullable();
|
||||||
|
});
|
||||||
|
await createOnUpdateTrigger(knex, TableName.SshCertificate);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(await knex.schema.hasTable(TableName.SshCertificateBody))) {
|
||||||
|
await knex.schema.createTable(TableName.SshCertificateBody, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
t.uuid("sshCertId").notNullable().unique();
|
||||||
|
t.foreign("sshCertId").references("id").inTable(TableName.SshCertificate).onDelete("CASCADE");
|
||||||
|
t.binary("encryptedCertificate").notNullable();
|
||||||
|
});
|
||||||
|
|
||||||
|
await createOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await knex.schema.dropTableIfExists(TableName.SshCertificateBody);
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
||||||
|
|
||||||
|
await knex.schema.dropTableIfExists(TableName.SshCertificate);
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.SshCertificate);
|
||||||
|
|
||||||
|
await knex.schema.dropTableIfExists(TableName.SshCertificateTemplate);
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
||||||
|
|
||||||
|
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthoritySecret);
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
||||||
|
|
||||||
|
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthority);
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
||||||
|
}
|
28
backend/src/db/migrations/20241218181018_app-connection.ts
Normal file
28
backend/src/db/migrations/20241218181018_app-connection.ts
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasTable(TableName.AppConnection))) {
|
||||||
|
await knex.schema.createTable(TableName.AppConnection, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.string("name", 32).notNullable();
|
||||||
|
t.string("description");
|
||||||
|
t.string("app").notNullable();
|
||||||
|
t.string("method").notNullable();
|
||||||
|
t.binary("encryptedCredentials").notNullable();
|
||||||
|
t.integer("version").defaultTo(1).notNullable();
|
||||||
|
t.uuid("orgId").notNullable();
|
||||||
|
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
await createOnUpdateTrigger(knex, TableName.AppConnection);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await knex.schema.dropTableIfExists(TableName.AppConnection);
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.AppConnection);
|
||||||
|
}
|
@ -15,7 +15,8 @@ export const AccessApprovalPoliciesSchema = z.object({
|
|||||||
envId: z.string().uuid(),
|
envId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
enforcementLevel: z.string().default("hard")
|
enforcementLevel: z.string().default("hard"),
|
||||||
|
deletedAt: z.date().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||||
|
27
backend/src/db/schemas/app-connections.ts
Normal file
27
backend/src/db/schemas/app-connections.ts
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const AppConnectionsSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string().nullable().optional(),
|
||||||
|
app: z.string(),
|
||||||
|
method: z.string(),
|
||||||
|
encryptedCredentials: zodBuffer,
|
||||||
|
version: z.number().default(1),
|
||||||
|
orgId: z.string().uuid(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
|
||||||
|
export type TAppConnectionsInsert = Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>;
|
||||||
|
export type TAppConnectionsUpdate = Partial<Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>>;
|
33
backend/src/db/schemas/identity-jwt-auths.ts
Normal file
33
backend/src/db/schemas/identity-jwt-auths.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const IdentityJwtAuthsSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
accessTokenTTL: z.coerce.number().default(7200),
|
||||||
|
accessTokenMaxTTL: z.coerce.number().default(7200),
|
||||||
|
accessTokenNumUsesLimit: z.coerce.number().default(0),
|
||||||
|
accessTokenTrustedIps: z.unknown(),
|
||||||
|
identityId: z.string().uuid(),
|
||||||
|
configurationType: z.string(),
|
||||||
|
jwksUrl: z.string(),
|
||||||
|
encryptedJwksCaCert: zodBuffer,
|
||||||
|
encryptedPublicKeys: zodBuffer,
|
||||||
|
boundIssuer: z.string(),
|
||||||
|
boundAudiences: z.string(),
|
||||||
|
boundClaims: z.unknown(),
|
||||||
|
boundSubject: z.string(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TIdentityJwtAuths = z.infer<typeof IdentityJwtAuthsSchema>;
|
||||||
|
export type TIdentityJwtAuthsInsert = Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>;
|
||||||
|
export type TIdentityJwtAuthsUpdate = Partial<Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>>;
|
@ -30,6 +30,7 @@ export * from "./identity-access-tokens";
|
|||||||
export * from "./identity-aws-auths";
|
export * from "./identity-aws-auths";
|
||||||
export * from "./identity-azure-auths";
|
export * from "./identity-azure-auths";
|
||||||
export * from "./identity-gcp-auths";
|
export * from "./identity-gcp-auths";
|
||||||
|
export * from "./identity-jwt-auths";
|
||||||
export * from "./identity-kubernetes-auths";
|
export * from "./identity-kubernetes-auths";
|
||||||
export * from "./identity-metadata";
|
export * from "./identity-metadata";
|
||||||
export * from "./identity-oidc-auths";
|
export * from "./identity-oidc-auths";
|
||||||
@ -64,6 +65,7 @@ export * from "./project-keys";
|
|||||||
export * from "./project-memberships";
|
export * from "./project-memberships";
|
||||||
export * from "./project-roles";
|
export * from "./project-roles";
|
||||||
export * from "./project-slack-configs";
|
export * from "./project-slack-configs";
|
||||||
|
export * from "./project-split-backfill-ids";
|
||||||
export * from "./project-templates";
|
export * from "./project-templates";
|
||||||
export * from "./project-user-additional-privilege";
|
export * from "./project-user-additional-privilege";
|
||||||
export * from "./project-user-membership-roles";
|
export * from "./project-user-membership-roles";
|
||||||
@ -105,7 +107,13 @@ export * from "./secrets";
|
|||||||
export * from "./secrets-v2";
|
export * from "./secrets-v2";
|
||||||
export * from "./service-tokens";
|
export * from "./service-tokens";
|
||||||
export * from "./slack-integrations";
|
export * from "./slack-integrations";
|
||||||
|
export * from "./ssh-certificate-authorities";
|
||||||
|
export * from "./ssh-certificate-authority-secrets";
|
||||||
|
export * from "./ssh-certificate-bodies";
|
||||||
|
export * from "./ssh-certificate-templates";
|
||||||
|
export * from "./ssh-certificates";
|
||||||
export * from "./super-admin";
|
export * from "./super-admin";
|
||||||
|
export * from "./totp-configs";
|
||||||
export * from "./trusted-ips";
|
export * from "./trusted-ips";
|
||||||
export * from "./user-actions";
|
export * from "./user-actions";
|
||||||
export * from "./user-aliases";
|
export * from "./user-aliases";
|
||||||
|
@ -12,7 +12,7 @@ import { TImmutableDBKeys } from "./models";
|
|||||||
export const KmsRootConfigSchema = z.object({
|
export const KmsRootConfigSchema = z.object({
|
||||||
id: z.string().uuid(),
|
id: z.string().uuid(),
|
||||||
encryptedRootKey: zodBuffer,
|
encryptedRootKey: zodBuffer,
|
||||||
encryptionStrategy: z.string(),
|
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date()
|
updatedAt: z.date()
|
||||||
});
|
});
|
||||||
|
@ -2,6 +2,11 @@ import { z } from "zod";
|
|||||||
|
|
||||||
export enum TableName {
|
export enum TableName {
|
||||||
Users = "users",
|
Users = "users",
|
||||||
|
SshCertificateAuthority = "ssh_certificate_authorities",
|
||||||
|
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
|
||||||
|
SshCertificateTemplate = "ssh_certificate_templates",
|
||||||
|
SshCertificate = "ssh_certificates",
|
||||||
|
SshCertificateBody = "ssh_certificate_bodies",
|
||||||
CertificateAuthority = "certificate_authorities",
|
CertificateAuthority = "certificate_authorities",
|
||||||
CertificateTemplateEstConfig = "certificate_template_est_configs",
|
CertificateTemplateEstConfig = "certificate_template_est_configs",
|
||||||
CertificateAuthorityCert = "certificate_authority_certs",
|
CertificateAuthorityCert = "certificate_authority_certs",
|
||||||
@ -68,6 +73,7 @@ export enum TableName {
|
|||||||
IdentityUaClientSecret = "identity_ua_client_secrets",
|
IdentityUaClientSecret = "identity_ua_client_secrets",
|
||||||
IdentityAwsAuth = "identity_aws_auths",
|
IdentityAwsAuth = "identity_aws_auths",
|
||||||
IdentityOidcAuth = "identity_oidc_auths",
|
IdentityOidcAuth = "identity_oidc_auths",
|
||||||
|
IdentityJwtAuth = "identity_jwt_auths",
|
||||||
IdentityOrgMembership = "identity_org_memberships",
|
IdentityOrgMembership = "identity_org_memberships",
|
||||||
IdentityProjectMembership = "identity_project_memberships",
|
IdentityProjectMembership = "identity_project_memberships",
|
||||||
IdentityProjectMembershipRole = "identity_project_membership_role",
|
IdentityProjectMembershipRole = "identity_project_membership_role",
|
||||||
@ -105,6 +111,7 @@ export enum TableName {
|
|||||||
SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2",
|
SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2",
|
||||||
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
||||||
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
||||||
|
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
||||||
// junction tables with tags
|
// junction tables with tags
|
||||||
SecretV2JnTag = "secret_v2_tag_junction",
|
SecretV2JnTag = "secret_v2_tag_junction",
|
||||||
JnSecretTag = "secret_tag_junction",
|
JnSecretTag = "secret_tag_junction",
|
||||||
@ -117,11 +124,13 @@ export enum TableName {
|
|||||||
ExternalKms = "external_kms",
|
ExternalKms = "external_kms",
|
||||||
InternalKms = "internal_kms",
|
InternalKms = "internal_kms",
|
||||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||||
|
TotpConfig = "totp_configs",
|
||||||
// @depreciated
|
// @depreciated
|
||||||
KmsKeyVersion = "kms_key_versions",
|
KmsKeyVersion = "kms_key_versions",
|
||||||
WorkflowIntegrations = "workflow_integrations",
|
WorkflowIntegrations = "workflow_integrations",
|
||||||
SlackIntegrations = "slack_integrations",
|
SlackIntegrations = "slack_integrations",
|
||||||
ProjectSlackConfigs = "project_slack_configs"
|
ProjectSlackConfigs = "project_slack_configs",
|
||||||
|
AppConnection = "app_connections"
|
||||||
}
|
}
|
||||||
|
|
||||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||||
@ -195,5 +204,13 @@ export enum IdentityAuthMethod {
|
|||||||
GCP_AUTH = "gcp-auth",
|
GCP_AUTH = "gcp-auth",
|
||||||
AWS_AUTH = "aws-auth",
|
AWS_AUTH = "aws-auth",
|
||||||
AZURE_AUTH = "azure-auth",
|
AZURE_AUTH = "azure-auth",
|
||||||
OIDC_AUTH = "oidc-auth"
|
OIDC_AUTH = "oidc-auth",
|
||||||
|
JWT_AUTH = "jwt-auth"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum ProjectType {
|
||||||
|
SecretManager = "secret-manager",
|
||||||
|
CertificateManager = "cert-manager",
|
||||||
|
KMS = "kms",
|
||||||
|
SSH = "ssh"
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,8 @@ export const OrganizationsSchema = z.object({
|
|||||||
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
||||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||||
defaultMembershipRole: z.string().default("member"),
|
defaultMembershipRole: z.string().default("member"),
|
||||||
enforceMfa: z.boolean().default(false)
|
enforceMfa: z.boolean().default(false),
|
||||||
|
selectedMfaMethod: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||||
|
21
backend/src/db/schemas/project-split-backfill-ids.ts
Normal file
21
backend/src/db/schemas/project-split-backfill-ids.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const ProjectSplitBackfillIdsSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
sourceProjectId: z.string(),
|
||||||
|
destinationProjectType: z.string(),
|
||||||
|
destinationProjectId: z.string()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TProjectSplitBackfillIds = z.infer<typeof ProjectSplitBackfillIdsSchema>;
|
||||||
|
export type TProjectSplitBackfillIdsInsert = Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>;
|
||||||
|
export type TProjectSplitBackfillIdsUpdate = Partial<
|
||||||
|
Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>
|
||||||
|
>;
|
@ -23,7 +23,9 @@ export const ProjectsSchema = z.object({
|
|||||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||||
auditLogsRetentionDays: z.number().nullable().optional(),
|
auditLogsRetentionDays: z.number().nullable().optional(),
|
||||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
|
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||||
|
description: z.string().nullable().optional(),
|
||||||
|
type: z.string()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||||
|
@ -15,7 +15,8 @@ export const SecretApprovalPoliciesSchema = z.object({
|
|||||||
envId: z.string().uuid(),
|
envId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
enforcementLevel: z.string().default("hard")
|
enforcementLevel: z.string().default("hard"),
|
||||||
|
deletedAt: z.date().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||||
|
24
backend/src/db/schemas/ssh-certificate-authorities.ts
Normal file
24
backend/src/db/schemas/ssh-certificate-authorities.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const SshCertificateAuthoritiesSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date(),
|
||||||
|
projectId: z.string(),
|
||||||
|
status: z.string(),
|
||||||
|
friendlyName: z.string(),
|
||||||
|
keyAlgorithm: z.string()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;
|
||||||
|
export type TSshCertificateAuthoritiesInsert = Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>;
|
||||||
|
export type TSshCertificateAuthoritiesUpdate = Partial<
|
||||||
|
Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>
|
||||||
|
>;
|
27
backend/src/db/schemas/ssh-certificate-authority-secrets.ts
Normal file
27
backend/src/db/schemas/ssh-certificate-authority-secrets.ts
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const SshCertificateAuthoritySecretsSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date(),
|
||||||
|
sshCaId: z.string().uuid(),
|
||||||
|
encryptedPrivateKey: zodBuffer
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TSshCertificateAuthoritySecrets = z.infer<typeof SshCertificateAuthoritySecretsSchema>;
|
||||||
|
export type TSshCertificateAuthoritySecretsInsert = Omit<
|
||||||
|
z.input<typeof SshCertificateAuthoritySecretsSchema>,
|
||||||
|
TImmutableDBKeys
|
||||||
|
>;
|
||||||
|
export type TSshCertificateAuthoritySecretsUpdate = Partial<
|
||||||
|
Omit<z.input<typeof SshCertificateAuthoritySecretsSchema>, TImmutableDBKeys>
|
||||||
|
>;
|
22
backend/src/db/schemas/ssh-certificate-bodies.ts
Normal file
22
backend/src/db/schemas/ssh-certificate-bodies.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const SshCertificateBodiesSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date(),
|
||||||
|
sshCertId: z.string().uuid(),
|
||||||
|
encryptedCertificate: zodBuffer
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TSshCertificateBodies = z.infer<typeof SshCertificateBodiesSchema>;
|
||||||
|
export type TSshCertificateBodiesInsert = Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>;
|
||||||
|
export type TSshCertificateBodiesUpdate = Partial<Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>>;
|
30
backend/src/db/schemas/ssh-certificate-templates.ts
Normal file
30
backend/src/db/schemas/ssh-certificate-templates.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const SshCertificateTemplatesSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date(),
|
||||||
|
sshCaId: z.string().uuid(),
|
||||||
|
status: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
ttl: z.string(),
|
||||||
|
maxTTL: z.string(),
|
||||||
|
allowedUsers: z.string().array(),
|
||||||
|
allowedHosts: z.string().array(),
|
||||||
|
allowUserCertificates: z.boolean(),
|
||||||
|
allowHostCertificates: z.boolean(),
|
||||||
|
allowCustomKeyIds: z.boolean()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TSshCertificateTemplates = z.infer<typeof SshCertificateTemplatesSchema>;
|
||||||
|
export type TSshCertificateTemplatesInsert = Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>;
|
||||||
|
export type TSshCertificateTemplatesUpdate = Partial<
|
||||||
|
Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>
|
||||||
|
>;
|
26
backend/src/db/schemas/ssh-certificates.ts
Normal file
26
backend/src/db/schemas/ssh-certificates.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const SshCertificatesSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date(),
|
||||||
|
sshCaId: z.string().uuid(),
|
||||||
|
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
|
||||||
|
serialNumber: z.string(),
|
||||||
|
certType: z.string(),
|
||||||
|
principals: z.string().array(),
|
||||||
|
keyId: z.string(),
|
||||||
|
notBefore: z.date(),
|
||||||
|
notAfter: z.date()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;
|
||||||
|
export type TSshCertificatesInsert = Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>;
|
||||||
|
export type TSshCertificatesUpdate = Partial<Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>>;
|
24
backend/src/db/schemas/totp-configs.ts
Normal file
24
backend/src/db/schemas/totp-configs.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const TotpConfigsSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
userId: z.string().uuid(),
|
||||||
|
isVerified: z.boolean().default(false),
|
||||||
|
encryptedRecoveryCodes: zodBuffer,
|
||||||
|
encryptedSecret: zodBuffer,
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
|
||||||
|
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
|
||||||
|
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;
|
@ -26,7 +26,8 @@ export const UsersSchema = z.object({
|
|||||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||||
isLocked: z.boolean().default(false).nullable().optional(),
|
isLocked: z.boolean().default(false).nullable().optional(),
|
||||||
temporaryLockDateEnd: z.date().nullable().optional(),
|
temporaryLockDateEnd: z.date().nullable().optional(),
|
||||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
|
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
|
||||||
|
selectedMfaMethod: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TUsers = z.infer<typeof UsersSchema>;
|
export type TUsers = z.infer<typeof UsersSchema>;
|
||||||
|
@ -4,7 +4,7 @@ import { Knex } from "knex";
|
|||||||
|
|
||||||
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||||
|
|
||||||
import { ProjectMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
|
import { ProjectMembershipRole, ProjectType, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
|
||||||
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
|
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
|
||||||
|
|
||||||
export const DEFAULT_PROJECT_ENVS = [
|
export const DEFAULT_PROJECT_ENVS = [
|
||||||
@ -24,6 +24,7 @@ export async function seed(knex: Knex): Promise<void> {
|
|||||||
name: seedData1.project.name,
|
name: seedData1.project.name,
|
||||||
orgId: seedData1.organization.id,
|
orgId: seedData1.organization.id,
|
||||||
slug: "first-project",
|
slug: "first-project",
|
||||||
|
type: ProjectType.SecretManager,
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
id: seedData1.project.id
|
id: seedData1.project.id
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { Knex } from "knex";
|
import { Knex } from "knex";
|
||||||
|
|
||||||
import { ProjectMembershipRole, ProjectVersion, TableName } from "../schemas";
|
import { ProjectMembershipRole, ProjectType, ProjectVersion, TableName } from "../schemas";
|
||||||
import { seedData1 } from "../seed-data";
|
import { seedData1 } from "../seed-data";
|
||||||
|
|
||||||
export const DEFAULT_PROJECT_ENVS = [
|
export const DEFAULT_PROJECT_ENVS = [
|
||||||
@ -16,6 +16,7 @@ export async function seed(knex: Knex): Promise<void> {
|
|||||||
orgId: seedData1.organization.id,
|
orgId: seedData1.organization.id,
|
||||||
slug: seedData1.projectV3.slug,
|
slug: seedData1.projectV3.slug,
|
||||||
version: ProjectVersion.V3,
|
version: ProjectVersion.V3,
|
||||||
|
type: ProjectType.SecretManager,
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
id: seedData1.projectV3.id
|
id: seedData1.projectV3.id
|
||||||
|
@ -2,6 +2,9 @@ import { Knex } from "knex";
|
|||||||
|
|
||||||
import { TableName } from "./schemas";
|
import { TableName } from "./schemas";
|
||||||
|
|
||||||
|
interface PgTriggerResult {
|
||||||
|
rows: Array<{ exists: boolean }>;
|
||||||
|
}
|
||||||
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
||||||
knex.schema.createTable(tableName, (table) => {
|
knex.schema.createTable(tableName, (table) => {
|
||||||
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
@ -28,13 +31,26 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
|
|||||||
|
|
||||||
// we would be using this to apply updatedAt where ever we wanta
|
// we would be using this to apply updatedAt where ever we wanta
|
||||||
// remember to set `timestamps(true,true,true)` before this on schema
|
// remember to set `timestamps(true,true,true)` before this on schema
|
||||||
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
|
||||||
knex.raw(`
|
const triggerExists = await knex.raw<PgTriggerResult>(`
|
||||||
CREATE TRIGGER "${tableName}_updatedAt"
|
SELECT EXISTS (
|
||||||
BEFORE UPDATE ON ${tableName}
|
SELECT 1
|
||||||
FOR EACH ROW
|
FROM pg_trigger
|
||||||
EXECUTE PROCEDURE on_update_timestamp();
|
WHERE tgname = '${tableName}_updatedAt'
|
||||||
`);
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
if (!triggerExists?.rows?.[0]?.exists) {
|
||||||
|
return knex.raw(`
|
||||||
|
CREATE TRIGGER "${tableName}_updatedAt"
|
||||||
|
BEFORE UPDATE ON ${tableName}
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE on_update_timestamp();
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||||
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
||||||
|
@ -109,7 +109,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
|||||||
approvers: z.string().array(),
|
approvers: z.string().array(),
|
||||||
secretPath: z.string().nullish(),
|
secretPath: z.string().nullish(),
|
||||||
envId: z.string(),
|
envId: z.string(),
|
||||||
enforcementLevel: z.string()
|
enforcementLevel: z.string(),
|
||||||
|
deletedAt: z.date().nullish()
|
||||||
}),
|
}),
|
||||||
reviewers: z
|
reviewers: z
|
||||||
.object({
|
.object({
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import ms from "ms";
|
import ms from "ms";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
@ -8,6 +7,7 @@ import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
|||||||
import { daysToMillisecond } from "@app/lib/dates";
|
import { daysToMillisecond } from "@app/lib/dates";
|
||||||
import { removeTrailingSlash } from "@app/lib/fn";
|
import { removeTrailingSlash } from "@app/lib/fn";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@ -48,15 +48,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
|||||||
.nullable(),
|
.nullable(),
|
||||||
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
|
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
|
||||||
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
|
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
|
||||||
name: z
|
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name)
|
||||||
.string()
|
|
||||||
.describe(DYNAMIC_SECRETS.CREATE.name)
|
|
||||||
.min(1)
|
|
||||||
.toLowerCase()
|
|
||||||
.max(64)
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid"
|
|
||||||
})
|
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
|
@ -4,9 +4,15 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
|
|||||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
import {
|
import {
|
||||||
ExternalKmsAwsSchema,
|
ExternalKmsAwsSchema,
|
||||||
|
ExternalKmsGcpCredentialSchema,
|
||||||
|
ExternalKmsGcpSchema,
|
||||||
ExternalKmsInputSchema,
|
ExternalKmsInputSchema,
|
||||||
ExternalKmsInputUpdateSchema
|
ExternalKmsInputUpdateSchema,
|
||||||
|
KmsGcpKeyFetchAuthType,
|
||||||
|
KmsProviders,
|
||||||
|
TExternalKmsGcpCredentialSchema
|
||||||
} from "@app/ee/services/external-kms/providers/model";
|
} from "@app/ee/services/external-kms/providers/model";
|
||||||
|
import { NotFoundError } from "@app/lib/errors";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@ -44,7 +50,8 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
|
|||||||
statusDetails: true,
|
statusDetails: true,
|
||||||
provider: true
|
provider: true
|
||||||
}).extend({
|
}).extend({
|
||||||
providerInput: ExternalKmsAwsSchema
|
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
|
||||||
|
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -286,4 +293,67 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
|||||||
return { externalKms };
|
return { externalKms };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/gcp/keys",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
body: z.discriminatedUnion("authMethod", [
|
||||||
|
z.object({
|
||||||
|
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
|
||||||
|
region: z.string().trim().min(1),
|
||||||
|
credential: ExternalKmsGcpCredentialSchema
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
|
||||||
|
region: z.string().trim().min(1),
|
||||||
|
kmsId: z.string().trim().min(1)
|
||||||
|
})
|
||||||
|
]),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
keys: z.string().array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { region, authMethod } = req.body;
|
||||||
|
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
|
||||||
|
|
||||||
|
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
|
||||||
|
credentialJson = req.body.credential;
|
||||||
|
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
|
||||||
|
const externalKms = await server.services.externalKms.findById({
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
id: req.body.kmsId
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
|
||||||
|
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
|
||||||
|
}
|
||||||
|
|
||||||
|
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!credentialJson) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = await server.services.externalKms.fetchGcpKeys({
|
||||||
|
credential: credentialJson,
|
||||||
|
gcpRegion: region
|
||||||
|
});
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
|
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
|
||||||
|
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||||
import { GROUPS } from "@app/lib/api-docs";
|
import { GROUPS } from "@app/lib/api-docs";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
@ -14,15 +15,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
|||||||
schema: {
|
schema: {
|
||||||
body: z.object({
|
body: z.object({
|
||||||
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
|
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
|
||||||
slug: z
|
slug: slugSchema({ min: 5, max: 36 }).optional().describe(GROUPS.CREATE.slug),
|
||||||
.string()
|
|
||||||
.min(5)
|
|
||||||
.max(36)
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
.describe(GROUPS.CREATE.slug),
|
|
||||||
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
|
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
@ -100,14 +93,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
|||||||
body: z
|
body: z
|
||||||
.object({
|
.object({
|
||||||
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
|
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
|
||||||
slug: z
|
slug: slugSchema({ min: 5, max: 36 }).describe(GROUPS.UPDATE.slug),
|
||||||
.string()
|
|
||||||
.min(5)
|
|
||||||
.max(36)
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.describe(GROUPS.UPDATE.slug),
|
|
||||||
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
|
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
|
||||||
})
|
})
|
||||||
.partial(),
|
.partial(),
|
||||||
@ -166,7 +152,8 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
|||||||
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
|
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search)
|
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||||
|
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
@ -179,7 +166,8 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
|||||||
})
|
})
|
||||||
.merge(
|
.merge(
|
||||||
z.object({
|
z.object({
|
||||||
isPartOfGroup: z.boolean()
|
isPartOfGroup: z.boolean(),
|
||||||
|
joinedGroupAt: z.date().nullable()
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.array(),
|
.array(),
|
||||||
|
@ -8,6 +8,7 @@ import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
|||||||
import { UnauthorizedError } from "@app/lib/errors";
|
import { UnauthorizedError } from "@app/lib/errors";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import {
|
import {
|
||||||
ProjectPermissionSchema,
|
ProjectPermissionSchema,
|
||||||
@ -33,17 +34,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
|||||||
body: z.object({
|
body: z.object({
|
||||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
||||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||||
.string()
|
|
||||||
.min(1)
|
|
||||||
.max(60)
|
|
||||||
.trim()
|
|
||||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
|
||||||
permissions: ProjectPermissionSchema.array()
|
permissions: ProjectPermissionSchema.array()
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||||
.optional(),
|
.optional(),
|
||||||
@ -77,7 +68,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
|||||||
actorOrgId: req.permission.orgId,
|
actorOrgId: req.permission.orgId,
|
||||||
actorAuthMethod: req.permission.authMethod,
|
actorAuthMethod: req.permission.authMethod,
|
||||||
...req.body,
|
...req.body,
|
||||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
|
||||||
isTemporary: false,
|
isTemporary: false,
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
// @ts-ignore-error this is valid ts
|
// @ts-ignore-error this is valid ts
|
||||||
@ -103,17 +94,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
|||||||
body: z.object({
|
body: z.object({
|
||||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
||||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||||
.string()
|
|
||||||
.min(1)
|
|
||||||
.max(60)
|
|
||||||
.trim()
|
|
||||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
|
||||||
permissions: ProjectPermissionSchema.array()
|
permissions: ProjectPermissionSchema.array()
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||||
.optional(),
|
.optional(),
|
||||||
@ -159,7 +140,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
|||||||
actorOrgId: req.permission.orgId,
|
actorOrgId: req.permission.orgId,
|
||||||
actorAuthMethod: req.permission.authMethod,
|
actorAuthMethod: req.permission.authMethod,
|
||||||
...req.body,
|
...req.body,
|
||||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
|
||||||
isTemporary: true,
|
isTemporary: true,
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
// @ts-ignore-error this is valid ts
|
// @ts-ignore-error this is valid ts
|
||||||
@ -189,16 +170,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
|||||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
|
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
|
||||||
privilegeDetails: z
|
privilegeDetails: z
|
||||||
.object({
|
.object({
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
|
||||||
.string()
|
|
||||||
.min(1)
|
|
||||||
.max(60)
|
|
||||||
.trim()
|
|
||||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
|
|
||||||
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||||
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
|
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
|
||||||
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission
|
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission
|
||||||
|
@ -25,6 +25,9 @@ import { registerSecretRotationRouter } from "./secret-rotation-router";
|
|||||||
import { registerSecretScanningRouter } from "./secret-scanning-router";
|
import { registerSecretScanningRouter } from "./secret-scanning-router";
|
||||||
import { registerSecretVersionRouter } from "./secret-version-router";
|
import { registerSecretVersionRouter } from "./secret-version-router";
|
||||||
import { registerSnapshotRouter } from "./snapshot-router";
|
import { registerSnapshotRouter } from "./snapshot-router";
|
||||||
|
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
|
||||||
|
import { registerSshCertRouter } from "./ssh-certificate-router";
|
||||||
|
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
|
||||||
import { registerTrustedIpRouter } from "./trusted-ip-router";
|
import { registerTrustedIpRouter } from "./trusted-ip-router";
|
||||||
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
|
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
|
||||||
|
|
||||||
@ -68,6 +71,15 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
|||||||
{ prefix: "/pki" }
|
{ prefix: "/pki" }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
await server.register(
|
||||||
|
async (sshRouter) => {
|
||||||
|
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
|
||||||
|
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
|
||||||
|
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
|
||||||
|
},
|
||||||
|
{ prefix: "/ssh" }
|
||||||
|
);
|
||||||
|
|
||||||
await server.register(
|
await server.register(
|
||||||
async (ssoRouter) => {
|
async (ssoRouter) => {
|
||||||
await ssoRouter.register(registerSamlRouter);
|
await ssoRouter.register(registerSamlRouter);
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
import { Authenticator, Strategy } from "@fastify/passport";
|
import { Authenticator, Strategy } from "@fastify/passport";
|
||||||
import fastifySession from "@fastify/session";
|
import fastifySession from "@fastify/session";
|
||||||
import RedisStore from "connect-redis";
|
import RedisStore from "connect-redis";
|
||||||
import { Redis } from "ioredis";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||||
@ -21,7 +20,6 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
|||||||
|
|
||||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
const redis = new Redis(appCfg.REDIS_URL);
|
|
||||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -30,7 +28,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
||||||
*/
|
*/
|
||||||
const redisStore = new RedisStore({
|
const redisStore = new RedisStore({
|
||||||
client: redis,
|
client: server.redis,
|
||||||
prefix: "oidc-session:",
|
prefix: "oidc-session:",
|
||||||
ttl: 600 // 10 minutes
|
ttl: 600 // 10 minutes
|
||||||
});
|
});
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
|
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
@ -18,19 +18,12 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
organizationId: z.string().trim()
|
organizationId: z.string().trim()
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 64 }).refine(
|
||||||
.string()
|
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
|
||||||
.min(1)
|
"Please choose a different slug, the slug you have entered is reserved"
|
||||||
.trim()
|
),
|
||||||
.refine(
|
|
||||||
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
|
|
||||||
"Please choose a different slug, the slug you have entered is reserved"
|
|
||||||
)
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid"
|
|
||||||
}),
|
|
||||||
name: z.string().trim(),
|
name: z.string().trim(),
|
||||||
description: z.string().trim().optional(),
|
description: z.string().trim().nullish(),
|
||||||
permissions: z.any().array()
|
permissions: z.any().array()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
@ -94,19 +87,15 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
roleId: z.string().trim()
|
roleId: z.string().trim()
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
slug: z
|
// TODO: Switch to slugSchema after verifying correct methods with Akhil - Omar 11/24
|
||||||
.string()
|
slug: slugSchema({ min: 1, max: 64 })
|
||||||
.trim()
|
|
||||||
.optional()
|
|
||||||
.refine(
|
.refine(
|
||||||
(val) => typeof val !== "undefined" && !Object.keys(OrgMembershipRole).includes(val),
|
(val) => !Object.keys(OrgMembershipRole).includes(val),
|
||||||
"Please choose a different slug, the slug you have entered is reserved."
|
"Please choose a different slug, the slug you have entered is reserved."
|
||||||
)
|
)
|
||||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
.optional(),
|
||||||
message: "Slug must be a valid"
|
|
||||||
}),
|
|
||||||
name: z.string().trim().optional(),
|
name: z.string().trim().optional(),
|
||||||
description: z.string().trim().optional(),
|
description: z.string().trim().nullish(),
|
||||||
permissions: z.any().array().optional()
|
permissions: z.any().array().optional()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import { packRules } from "@casl/ability/extra";
|
import { packRules } from "@casl/ability/extra";
|
||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
|
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
|
||||||
@ -9,6 +8,7 @@ import {
|
|||||||
} from "@app/ee/services/permission/project-permission";
|
} from "@app/ee/services/permission/project-permission";
|
||||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
|
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@ -32,21 +32,14 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug)
|
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
slug: z
|
slug: slugSchema({ max: 64 })
|
||||||
.string()
|
|
||||||
.toLowerCase()
|
|
||||||
.trim()
|
|
||||||
.min(1)
|
|
||||||
.refine(
|
.refine(
|
||||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||||
"Please choose a different slug, the slug you have entered is reserved"
|
"Please choose a different slug, the slug you have entered is reserved"
|
||||||
)
|
)
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid"
|
|
||||||
})
|
|
||||||
.describe(PROJECT_ROLE.CREATE.slug),
|
.describe(PROJECT_ROLE.CREATE.slug),
|
||||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
@ -94,23 +87,15 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
slug: z
|
slug: slugSchema({ max: 64 })
|
||||||
.string()
|
|
||||||
.toLowerCase()
|
|
||||||
.trim()
|
|
||||||
.optional()
|
|
||||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
|
||||||
.refine(
|
.refine(
|
||||||
(val) =>
|
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||||
typeof val === "undefined" ||
|
|
||||||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
|
||||||
"Please choose a different slug, the slug you have entered is reserved"
|
"Please choose a different slug, the slug you have entered is reserved"
|
||||||
)
|
)
|
||||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||||
message: "Slug must be a valid"
|
.optional(),
|
||||||
}),
|
|
||||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
|
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
|
||||||
@ -8,22 +7,13 @@ import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-tem
|
|||||||
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
||||||
import { ProjectTemplates } from "@app/lib/api-docs";
|
import { ProjectTemplates } from "@app/lib/api-docs";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||||
|
|
||||||
const SlugSchema = z
|
|
||||||
.string()
|
|
||||||
.trim()
|
|
||||||
.min(1)
|
|
||||||
.max(32)
|
|
||||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Must be valid slug format"
|
|
||||||
});
|
|
||||||
|
|
||||||
const isReservedRoleSlug = (slug: string) =>
|
const isReservedRoleSlug = (slug: string) =>
|
||||||
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
|
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
|
||||||
|
|
||||||
@ -34,14 +24,14 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
|||||||
roles: z
|
roles: z
|
||||||
.object({
|
.object({
|
||||||
name: z.string().trim().min(1),
|
name: z.string().trim().min(1),
|
||||||
slug: SlugSchema,
|
slug: slugSchema(),
|
||||||
permissions: UnpackedPermissionSchema.array()
|
permissions: UnpackedPermissionSchema.array()
|
||||||
})
|
})
|
||||||
.array(),
|
.array(),
|
||||||
environments: z
|
environments: z
|
||||||
.object({
|
.object({
|
||||||
name: z.string().trim().min(1),
|
name: z.string().trim().min(1),
|
||||||
slug: SlugSchema,
|
slug: slugSchema(),
|
||||||
position: z.number().min(1)
|
position: z.number().min(1)
|
||||||
})
|
})
|
||||||
.array()
|
.array()
|
||||||
@ -50,7 +40,7 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
|||||||
const ProjectTemplateRolesSchema = z
|
const ProjectTemplateRolesSchema = z
|
||||||
.object({
|
.object({
|
||||||
name: z.string().trim().min(1),
|
name: z.string().trim().min(1),
|
||||||
slug: SlugSchema,
|
slug: slugSchema(),
|
||||||
permissions: ProjectPermissionV2Schema.array()
|
permissions: ProjectPermissionV2Schema.array()
|
||||||
})
|
})
|
||||||
.array()
|
.array()
|
||||||
@ -78,7 +68,7 @@ const ProjectTemplateRolesSchema = z
|
|||||||
const ProjectTemplateEnvironmentsSchema = z
|
const ProjectTemplateEnvironmentsSchema = z
|
||||||
.object({
|
.object({
|
||||||
name: z.string().trim().min(1),
|
name: z.string().trim().min(1),
|
||||||
slug: SlugSchema,
|
slug: slugSchema(),
|
||||||
position: z.number().min(1)
|
position: z.number().min(1)
|
||||||
})
|
})
|
||||||
.array()
|
.array()
|
||||||
@ -188,9 +178,11 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
|
|||||||
schema: {
|
schema: {
|
||||||
description: "Create a project template.",
|
description: "Create a project template.",
|
||||||
body: z.object({
|
body: z.object({
|
||||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
name: slugSchema({ field: "name" })
|
||||||
message: `The requested project template name is reserved.`
|
.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||||
}).describe(ProjectTemplates.CREATE.name),
|
message: `The requested project template name is reserved.`
|
||||||
|
})
|
||||||
|
.describe(ProjectTemplates.CREATE.name),
|
||||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
|
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
|
||||||
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
|
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
|
||||||
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
|
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
|
||||||
@ -230,9 +222,10 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
|
|||||||
description: "Update a project template.",
|
description: "Update a project template.",
|
||||||
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
|
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
name: slugSchema({ field: "name" })
|
||||||
message: `The requested project template name is reserved.`
|
.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||||
})
|
message: `The requested project template name is reserved.`
|
||||||
|
})
|
||||||
.optional()
|
.optional()
|
||||||
.describe(ProjectTemplates.UPDATE.name),
|
.describe(ProjectTemplates.UPDATE.name),
|
||||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),
|
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),
|
||||||
|
@ -122,6 +122,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
},
|
},
|
||||||
`email: ${email} firstName: ${profile.firstName as string}`
|
`email: ${email} firstName: ${profile.firstName as string}`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
throw new Error("Invalid saml request. Missing email or first name");
|
||||||
}
|
}
|
||||||
|
|
||||||
const userMetadata = Object.keys(profile.attributes || {})
|
const userMetadata = Object.keys(profile.attributes || {})
|
||||||
|
@ -52,7 +52,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
|||||||
})
|
})
|
||||||
.array(),
|
.array(),
|
||||||
secretPath: z.string().optional().nullable(),
|
secretPath: z.string().optional().nullable(),
|
||||||
enforcementLevel: z.string()
|
enforcementLevel: z.string(),
|
||||||
|
deletedAt: z.date().nullish()
|
||||||
}),
|
}),
|
||||||
committerUser: approvalRequestUser,
|
committerUser: approvalRequestUser,
|
||||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||||
@ -260,7 +261,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
|||||||
approvals: z.number(),
|
approvals: z.number(),
|
||||||
approvers: approvalRequestUser.array(),
|
approvers: approvalRequestUser.array(),
|
||||||
secretPath: z.string().optional().nullable(),
|
secretPath: z.string().optional().nullable(),
|
||||||
enforcementLevel: z.string()
|
enforcementLevel: z.string(),
|
||||||
|
deletedAt: z.date().nullish()
|
||||||
}),
|
}),
|
||||||
environment: z.string(),
|
environment: z.string(),
|
||||||
statusChangedByUser: approvalRequestUser.optional(),
|
statusChangedByUser: approvalRequestUser.optional(),
|
||||||
|
279
backend/src/ee/routes/v1/ssh-certificate-authority-router.ts
Normal file
279
backend/src/ee/routes/v1/ssh-certificate-authority-router.ts
Normal file
@ -0,0 +1,279 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
|
||||||
|
import { SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||||
|
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
|
||||||
|
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||||
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||||
|
|
||||||
|
export const registerSshCaRouter = async (server: FastifyZodProvider) => {
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
schema: {
|
||||||
|
description: "Create SSH CA",
|
||||||
|
body: z.object({
|
||||||
|
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
|
||||||
|
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
|
||||||
|
keyAlgorithm: z
|
||||||
|
.nativeEnum(CertKeyAlgorithm)
|
||||||
|
.default(CertKeyAlgorithm.RSA_2048)
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
ca: sanitizedSshCa.extend({
|
||||||
|
publicKey: z.string()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const ca = await server.services.sshCertificateAuthority.createSshCa({
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
...req.body
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: ca.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.CREATE_SSH_CA,
|
||||||
|
metadata: {
|
||||||
|
sshCaId: ca.id,
|
||||||
|
friendlyName: ca.friendlyName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
ca
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/:sshCaId",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
schema: {
|
||||||
|
description: "Get SSH CA",
|
||||||
|
params: z.object({
|
||||||
|
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET.sshCaId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
ca: sanitizedSshCa.extend({
|
||||||
|
publicKey: z.string()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const ca = await server.services.sshCertificateAuthority.getSshCaById({
|
||||||
|
caId: req.params.sshCaId,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: ca.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_SSH_CA,
|
||||||
|
metadata: {
|
||||||
|
sshCaId: ca.id,
|
||||||
|
friendlyName: ca.friendlyName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
ca
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/:sshCaId/public-key",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Get public key of SSH CA",
|
||||||
|
params: z.object({
|
||||||
|
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_PUBLIC_KEY.sshCaId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.string()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const publicKey = await server.services.sshCertificateAuthority.getSshCaPublicKey({
|
||||||
|
caId: req.params.sshCaId
|
||||||
|
});
|
||||||
|
|
||||||
|
return publicKey;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "PATCH",
|
||||||
|
url: "/:sshCaId",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
schema: {
|
||||||
|
description: "Update SSH CA",
|
||||||
|
params: z.object({
|
||||||
|
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.sshCaId)
|
||||||
|
}),
|
||||||
|
body: z.object({
|
||||||
|
friendlyName: z.string().optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.friendlyName),
|
||||||
|
status: z.nativeEnum(SshCaStatus).optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.status)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
ca: sanitizedSshCa.extend({
|
||||||
|
publicKey: z.string()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const ca = await server.services.sshCertificateAuthority.updateSshCaById({
|
||||||
|
caId: req.params.sshCaId,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
...req.body
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: ca.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.UPDATE_SSH_CA,
|
||||||
|
metadata: {
|
||||||
|
sshCaId: ca.id,
|
||||||
|
friendlyName: ca.friendlyName,
|
||||||
|
status: ca.status as SshCaStatus
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
ca
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "DELETE",
|
||||||
|
url: "/:sshCaId",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
schema: {
|
||||||
|
description: "Delete SSH CA",
|
||||||
|
params: z.object({
|
||||||
|
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.DELETE.sshCaId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
ca: sanitizedSshCa
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const ca = await server.services.sshCertificateAuthority.deleteSshCaById({
|
||||||
|
caId: req.params.sshCaId,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: ca.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.DELETE_SSH_CA,
|
||||||
|
metadata: {
|
||||||
|
sshCaId: ca.id,
|
||||||
|
friendlyName: ca.friendlyName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
ca
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/:sshCaId/certificate-templates",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
schema: {
|
||||||
|
description: "Get list of certificate templates for the SSH CA",
|
||||||
|
params: z.object({
|
||||||
|
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_CERTIFICATE_TEMPLATES.sshCaId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
certificateTemplates: sanitizedSshCertificateTemplate.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const { certificateTemplates, ca } = await server.services.sshCertificateAuthority.getSshCaCertificateTemplates({
|
||||||
|
caId: req.params.sshCaId,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: ca.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES,
|
||||||
|
metadata: {
|
||||||
|
sshCaId: ca.id,
|
||||||
|
friendlyName: ca.friendlyName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
certificateTemplates
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
164
backend/src/ee/routes/v1/ssh-certificate-router.ts
Normal file
164
backend/src/ee/routes/v1/ssh-certificate-router.ts
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
import ms from "ms";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||||
|
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||||
|
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||||
|
|
||||||
|
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/sign",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
schema: {
|
||||||
|
description: "Sign SSH public key",
|
||||||
|
body: z.object({
|
||||||
|
certificateTemplateId: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certificateTemplateId),
|
||||||
|
publicKey: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.publicKey),
|
||||||
|
certType: z
|
||||||
|
.nativeEnum(SshCertType)
|
||||||
|
.default(SshCertType.USER)
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certType),
|
||||||
|
principals: z
|
||||||
|
.array(z.string().transform((val) => val.trim()))
|
||||||
|
.nonempty("Principals array must not be empty")
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.principals),
|
||||||
|
ttl: z
|
||||||
|
.string()
|
||||||
|
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||||
|
.optional()
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.ttl),
|
||||||
|
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.keyId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.serialNumber),
|
||||||
|
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.signedKey)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const { serialNumber, signedPublicKey, certificateTemplate, ttl, keyId } =
|
||||||
|
await server.services.sshCertificateAuthority.signSshKey({
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
...req.body
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.SIGN_SSH_KEY,
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: certificateTemplate.id,
|
||||||
|
certType: req.body.certType,
|
||||||
|
principals: req.body.principals,
|
||||||
|
ttl: String(ttl),
|
||||||
|
keyId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
serialNumber,
|
||||||
|
signedKey: signedPublicKey
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/issue",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
schema: {
|
||||||
|
description: "Issue SSH credentials (certificate + key)",
|
||||||
|
body: z.object({
|
||||||
|
certificateTemplateId: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certificateTemplateId),
|
||||||
|
keyAlgorithm: z
|
||||||
|
.nativeEnum(CertKeyAlgorithm)
|
||||||
|
.default(CertKeyAlgorithm.RSA_2048)
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm),
|
||||||
|
certType: z
|
||||||
|
.nativeEnum(SshCertType)
|
||||||
|
.default(SshCertType.USER)
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certType),
|
||||||
|
principals: z
|
||||||
|
.array(z.string().transform((val) => val.trim()))
|
||||||
|
.nonempty("Principals array must not be empty")
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.principals),
|
||||||
|
ttl: z
|
||||||
|
.string()
|
||||||
|
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||||
|
.optional()
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.ttl),
|
||||||
|
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.serialNumber),
|
||||||
|
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.signedKey),
|
||||||
|
privateKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.privateKey),
|
||||||
|
publicKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.publicKey),
|
||||||
|
keyAlgorithm: z
|
||||||
|
.nativeEnum(CertKeyAlgorithm)
|
||||||
|
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const { serialNumber, signedPublicKey, privateKey, publicKey, certificateTemplate, ttl, keyId } =
|
||||||
|
await server.services.sshCertificateAuthority.issueSshCreds({
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
...req.body
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.ISSUE_SSH_CREDS,
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: certificateTemplate.id,
|
||||||
|
keyAlgorithm: req.body.keyAlgorithm,
|
||||||
|
certType: req.body.certType,
|
||||||
|
principals: req.body.principals,
|
||||||
|
ttl: String(ttl),
|
||||||
|
keyId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
serialNumber,
|
||||||
|
signedKey: signedPublicKey,
|
||||||
|
privateKey,
|
||||||
|
publicKey,
|
||||||
|
keyAlgorithm: req.body.keyAlgorithm
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
258
backend/src/ee/routes/v1/ssh-certificate-template-router.ts
Normal file
258
backend/src/ee/routes/v1/ssh-certificate-template-router.ts
Normal file
@ -0,0 +1,258 @@
|
|||||||
|
import slugify from "@sindresorhus/slugify";
|
||||||
|
import ms from "ms";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
|
||||||
|
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||||
|
import {
|
||||||
|
isValidHostPattern,
|
||||||
|
isValidUserPattern
|
||||||
|
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
|
||||||
|
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
|
||||||
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
export const registerSshCertificateTemplateRouter = async (server: FastifyZodProvider) => {
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/:certificateTemplateId",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.GET.certificateTemplateId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: sanitizedSshCertificateTemplate
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const certificateTemplate = await server.services.sshCertificateTemplate.getSshCertTemplate({
|
||||||
|
id: req.params.certificateTemplateId,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: certificateTemplate.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: certificateTemplate.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return certificateTemplate;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
body: z
|
||||||
|
.object({
|
||||||
|
sshCaId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.sshCaId),
|
||||||
|
name: z
|
||||||
|
.string()
|
||||||
|
.min(1)
|
||||||
|
.max(36)
|
||||||
|
.refine((v) => slugify(v) === v, {
|
||||||
|
message: "Name must be a valid slug"
|
||||||
|
})
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.name),
|
||||||
|
ttl: z
|
||||||
|
.string()
|
||||||
|
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||||
|
.default("1h")
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.ttl),
|
||||||
|
maxTTL: z
|
||||||
|
.string()
|
||||||
|
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
|
||||||
|
.default("30d")
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.maxTTL),
|
||||||
|
allowedUsers: z
|
||||||
|
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedUsers),
|
||||||
|
allowedHosts: z
|
||||||
|
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedHosts),
|
||||||
|
allowUserCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowUserCertificates),
|
||||||
|
allowHostCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowHostCertificates),
|
||||||
|
allowCustomKeyIds: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowCustomKeyIds)
|
||||||
|
})
|
||||||
|
.refine((data) => ms(data.maxTTL) > ms(data.ttl), {
|
||||||
|
message: "Max TLL must be greater than TTL",
|
||||||
|
path: ["maxTTL"]
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: sanitizedSshCertificateTemplate
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { certificateTemplate, ca } = await server.services.sshCertificateTemplate.createSshCertTemplate({
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
...req.body
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: ca.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: certificateTemplate.id,
|
||||||
|
sshCaId: ca.id,
|
||||||
|
name: certificateTemplate.name,
|
||||||
|
ttl: certificateTemplate.ttl,
|
||||||
|
maxTTL: certificateTemplate.maxTTL,
|
||||||
|
allowedUsers: certificateTemplate.allowedUsers,
|
||||||
|
allowedHosts: certificateTemplate.allowedHosts,
|
||||||
|
allowUserCertificates: certificateTemplate.allowUserCertificates,
|
||||||
|
allowHostCertificates: certificateTemplate.allowHostCertificates,
|
||||||
|
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return certificateTemplate;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "PATCH",
|
||||||
|
url: "/:certificateTemplateId",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
body: z.object({
|
||||||
|
status: z.nativeEnum(SshCertTemplateStatus).optional(),
|
||||||
|
name: z
|
||||||
|
.string()
|
||||||
|
.min(1)
|
||||||
|
.max(36)
|
||||||
|
.refine((v) => slugify(v) === v, {
|
||||||
|
message: "Slug must be a valid slug"
|
||||||
|
})
|
||||||
|
.optional()
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.name),
|
||||||
|
ttl: z
|
||||||
|
.string()
|
||||||
|
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||||
|
.optional()
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.ttl),
|
||||||
|
maxTTL: z
|
||||||
|
.string()
|
||||||
|
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
|
||||||
|
.optional()
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.maxTTL),
|
||||||
|
allowedUsers: z
|
||||||
|
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
|
||||||
|
.optional()
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedUsers),
|
||||||
|
allowedHosts: z
|
||||||
|
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
|
||||||
|
.optional()
|
||||||
|
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedHosts),
|
||||||
|
allowUserCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowUserCertificates),
|
||||||
|
allowHostCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowHostCertificates),
|
||||||
|
allowCustomKeyIds: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowCustomKeyIds)
|
||||||
|
}),
|
||||||
|
params: z.object({
|
||||||
|
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.certificateTemplateId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: sanitizedSshCertificateTemplate
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { certificateTemplate, projectId } = await server.services.sshCertificateTemplate.updateSshCertTemplate({
|
||||||
|
...req.body,
|
||||||
|
id: req.params.certificateTemplateId,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
status: certificateTemplate.status as SshCertTemplateStatus,
|
||||||
|
certificateTemplateId: certificateTemplate.id,
|
||||||
|
sshCaId: certificateTemplate.sshCaId,
|
||||||
|
name: certificateTemplate.name,
|
||||||
|
ttl: certificateTemplate.ttl,
|
||||||
|
maxTTL: certificateTemplate.maxTTL,
|
||||||
|
allowedUsers: certificateTemplate.allowedUsers,
|
||||||
|
allowedHosts: certificateTemplate.allowedHosts,
|
||||||
|
allowUserCertificates: certificateTemplate.allowUserCertificates,
|
||||||
|
allowHostCertificates: certificateTemplate.allowHostCertificates,
|
||||||
|
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return certificateTemplate;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "DELETE",
|
||||||
|
url: "/:certificateTemplateId",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.DELETE.certificateTemplateId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: sanitizedSshCertificateTemplate
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const certificateTemplate = await server.services.sshCertificateTemplate.deleteSshCertTemplate({
|
||||||
|
id: req.params.certificateTemplateId,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: certificateTemplate.projectId,
|
||||||
|
event: {
|
||||||
|
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: certificateTemplate.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return certificateTemplate;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
@ -7,6 +7,7 @@ import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/pr
|
|||||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@ -21,17 +22,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
|||||||
schema: {
|
schema: {
|
||||||
body: z.object({
|
body: z.object({
|
||||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||||
.string()
|
|
||||||
.min(1)
|
|
||||||
.max(60)
|
|
||||||
.trim()
|
|
||||||
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
|
||||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||||
type: z.discriminatedUnion("isTemporary", [
|
type: z.discriminatedUnion("isTemporary", [
|
||||||
z.object({
|
z.object({
|
||||||
@ -87,15 +78,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
|||||||
}),
|
}),
|
||||||
body: z
|
body: z
|
||||||
.object({
|
.object({
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||||
.string()
|
|
||||||
.max(60)
|
|
||||||
.trim()
|
|
||||||
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
|
||||||
permissions: ProjectPermissionV2Schema.array()
|
permissions: ProjectPermissionV2Schema.array()
|
||||||
.optional()
|
.optional()
|
||||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||||
|
@ -7,6 +7,7 @@ import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-p
|
|||||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@ -28,17 +29,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
|||||||
body: z.object({
|
body: z.object({
|
||||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||||
.string()
|
|
||||||
.min(1)
|
|
||||||
.max(60)
|
|
||||||
.trim()
|
|
||||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
|
||||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||||
type: z.discriminatedUnion("isTemporary", [
|
type: z.discriminatedUnion("isTemporary", [
|
||||||
z.object({
|
z.object({
|
||||||
@ -100,16 +91,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
|||||||
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
|
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||||
.string()
|
|
||||||
.min(1)
|
|
||||||
.max(60)
|
|
||||||
.trim()
|
|
||||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid slug"
|
|
||||||
})
|
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
|
||||||
permissions: ProjectPermissionV2Schema.array()
|
permissions: ProjectPermissionV2Schema.array()
|
||||||
.optional()
|
.optional()
|
||||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
import { packRules } from "@casl/ability/extra";
|
import { packRules } from "@casl/ability/extra";
|
||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@ -29,21 +29,14 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
|
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 64 })
|
||||||
.string()
|
|
||||||
.toLowerCase()
|
|
||||||
.trim()
|
|
||||||
.min(1)
|
|
||||||
.refine(
|
.refine(
|
||||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||||
"Please choose a different slug, the slug you have entered is reserved"
|
"Please choose a different slug, the slug you have entered is reserved"
|
||||||
)
|
)
|
||||||
.refine((v) => slugify(v) === v, {
|
|
||||||
message: "Slug must be a valid"
|
|
||||||
})
|
|
||||||
.describe(PROJECT_ROLE.CREATE.slug),
|
.describe(PROJECT_ROLE.CREATE.slug),
|
||||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
@ -90,23 +83,15 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
slug: z
|
slug: slugSchema({ min: 1, max: 64 })
|
||||||
.string()
|
|
||||||
.toLowerCase()
|
|
||||||
.trim()
|
|
||||||
.optional()
|
|
||||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
|
||||||
.refine(
|
.refine(
|
||||||
(val) =>
|
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||||
typeof val === "undefined" ||
|
|
||||||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
|
||||||
"Please choose a different slug, the slug you have entered is reserved"
|
"Please choose a different slug, the slug you have entered is reserved"
|
||||||
)
|
)
|
||||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
.optional()
|
||||||
message: "Slug must be a valid"
|
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||||
}),
|
|
||||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
|
@ -139,5 +139,10 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return { ...accessApprovalPolicyOrm, find, findById };
|
const softDeleteById = async (policyId: string, tx?: Knex) => {
|
||||||
|
const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
|
||||||
|
return softDeletedPolicy;
|
||||||
|
};
|
||||||
|
|
||||||
|
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
|
||||||
};
|
};
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError } from "@casl/ability";
|
||||||
|
|
||||||
|
import { ProjectType } from "@app/db/schemas";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
@ -8,7 +9,11 @@ import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal
|
|||||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||||
|
|
||||||
|
import { TAccessApprovalRequestDALFactory } from "../access-approval-request/access-approval-request-dal";
|
||||||
|
import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-request/access-approval-request-reviewer-dal";
|
||||||
|
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
|
||||||
import { TGroupDALFactory } from "../group/group-dal";
|
import { TGroupDALFactory } from "../group/group-dal";
|
||||||
|
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||||
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
|
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
|
||||||
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
||||||
import {
|
import {
|
||||||
@ -21,7 +26,7 @@ import {
|
|||||||
TUpdateAccessApprovalPolicy
|
TUpdateAccessApprovalPolicy
|
||||||
} from "./access-approval-policy-types";
|
} from "./access-approval-policy-types";
|
||||||
|
|
||||||
type TSecretApprovalPolicyServiceFactoryDep = {
|
type TAccessApprovalPolicyServiceFactoryDep = {
|
||||||
projectDAL: TProjectDALFactory;
|
projectDAL: TProjectDALFactory;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||||
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
|
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
|
||||||
@ -30,6 +35,9 @@ type TSecretApprovalPolicyServiceFactoryDep = {
|
|||||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||||
groupDAL: TGroupDALFactory;
|
groupDAL: TGroupDALFactory;
|
||||||
userDAL: Pick<TUserDALFactory, "find">;
|
userDAL: Pick<TUserDALFactory, "find">;
|
||||||
|
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
|
||||||
|
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
|
||||||
|
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
|
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
|
||||||
@ -41,8 +49,11 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
permissionService,
|
permissionService,
|
||||||
projectEnvDAL,
|
projectEnvDAL,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
userDAL
|
userDAL,
|
||||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
accessApprovalRequestDAL,
|
||||||
|
additionalPrivilegeDAL,
|
||||||
|
accessApprovalRequestReviewerDAL
|
||||||
|
}: TAccessApprovalPolicyServiceFactoryDep) => {
|
||||||
const createAccessApprovalPolicy = async ({
|
const createAccessApprovalPolicy = async ({
|
||||||
name,
|
name,
|
||||||
actor,
|
actor,
|
||||||
@ -76,13 +87,15 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
|
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
|
||||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||||
|
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
project.id,
|
project.id,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
|
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionActions.Create,
|
ProjectPermissionActions.Create,
|
||||||
ProjectPermissionSub.SecretApproval
|
ProjectPermissionSub.SecretApproval
|
||||||
@ -180,16 +193,9 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||||
|
|
||||||
// Anyone in the project should be able to get the policies.
|
// Anyone in the project should be able to get the policies.
|
||||||
/* const { permission } = */ await permissionService.getProjectPermission(
|
await permissionService.getProjectPermission(actor, actorId, project.id, actorAuthMethod, actorOrgId);
|
||||||
actor,
|
|
||||||
actorId,
|
|
||||||
project.id,
|
|
||||||
actorAuthMethod,
|
|
||||||
actorOrgId
|
|
||||||
);
|
|
||||||
// ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
|
||||||
|
|
||||||
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id });
|
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
|
||||||
return accessApprovalPolicies;
|
return accessApprovalPolicies;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -231,13 +237,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
if (!accessApprovalPolicy) {
|
if (!accessApprovalPolicy) {
|
||||||
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
||||||
}
|
}
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
accessApprovalPolicy.projectId,
|
accessApprovalPolicy.projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
|
|
||||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
||||||
|
|
||||||
@ -314,19 +321,42 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
const policy = await accessApprovalPolicyDAL.findById(policyId);
|
const policy = await accessApprovalPolicyDAL.findById(policyId);
|
||||||
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
||||||
|
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
policy.projectId,
|
policy.projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionActions.Delete,
|
ProjectPermissionActions.Delete,
|
||||||
ProjectPermissionSub.SecretApproval
|
ProjectPermissionSub.SecretApproval
|
||||||
);
|
);
|
||||||
|
|
||||||
await accessApprovalPolicyDAL.deleteById(policyId);
|
await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||||
|
await accessApprovalPolicyDAL.softDeleteById(policyId, tx);
|
||||||
|
const allAccessApprovalRequests = await accessApprovalRequestDAL.find({ policyId });
|
||||||
|
|
||||||
|
if (allAccessApprovalRequests.length) {
|
||||||
|
const accessApprovalRequestsIds = allAccessApprovalRequests.map((request) => request.id);
|
||||||
|
|
||||||
|
const privilegeIdsArray = allAccessApprovalRequests
|
||||||
|
.map((request) => request.privilegeId)
|
||||||
|
.filter((id): id is string => id != null);
|
||||||
|
|
||||||
|
if (privilegeIdsArray.length) {
|
||||||
|
await additionalPrivilegeDAL.delete({ $in: { id: privilegeIdsArray } }, tx);
|
||||||
|
}
|
||||||
|
|
||||||
|
await accessApprovalRequestReviewerDAL.update(
|
||||||
|
{ $in: { id: accessApprovalRequestsIds }, status: ApprovalStatus.PENDING },
|
||||||
|
{ status: ApprovalStatus.REJECTED },
|
||||||
|
tx
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return policy;
|
return policy;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -356,7 +386,11 @@ export const accessApprovalPolicyServiceFactory = ({
|
|||||||
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
||||||
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
|
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
|
||||||
|
|
||||||
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
|
const policies = await accessApprovalPolicyDAL.find({
|
||||||
|
envId: environment.id,
|
||||||
|
projectId: project.id,
|
||||||
|
deletedAt: null
|
||||||
|
});
|
||||||
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
|
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
|
||||||
|
|
||||||
return { count: policies.length };
|
return { count: policies.length };
|
||||||
|
@ -61,7 +61,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
|||||||
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||||
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId")
|
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||||
|
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||||
)
|
)
|
||||||
|
|
||||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||||
@ -118,7 +119,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
|||||||
approvals: doc.policyApprovals,
|
approvals: doc.policyApprovals,
|
||||||
secretPath: doc.policySecretPath,
|
secretPath: doc.policySecretPath,
|
||||||
enforcementLevel: doc.policyEnforcementLevel,
|
enforcementLevel: doc.policyEnforcementLevel,
|
||||||
envId: doc.policyEnvId
|
envId: doc.policyEnvId,
|
||||||
|
deletedAt: doc.policyDeletedAt
|
||||||
},
|
},
|
||||||
requestedByUser: {
|
requestedByUser: {
|
||||||
userId: doc.requestedByUserId,
|
userId: doc.requestedByUserId,
|
||||||
@ -141,7 +143,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
|||||||
}
|
}
|
||||||
: null,
|
: null,
|
||||||
|
|
||||||
isApproved: !!doc.privilegeId
|
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
|
||||||
}),
|
}),
|
||||||
childrenMapper: [
|
childrenMapper: [
|
||||||
{
|
{
|
||||||
@ -252,7 +254,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
|||||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals")
|
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||||
|
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||||
);
|
);
|
||||||
|
|
||||||
const findById = async (id: string, tx?: Knex) => {
|
const findById = async (id: string, tx?: Knex) => {
|
||||||
@ -271,7 +274,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
|||||||
name: el.policyName,
|
name: el.policyName,
|
||||||
approvals: el.policyApprovals,
|
approvals: el.policyApprovals,
|
||||||
secretPath: el.policySecretPath,
|
secretPath: el.policySecretPath,
|
||||||
enforcementLevel: el.policyEnforcementLevel
|
enforcementLevel: el.policyEnforcementLevel,
|
||||||
|
deletedAt: el.policyDeletedAt
|
||||||
},
|
},
|
||||||
requestedByUser: {
|
requestedByUser: {
|
||||||
userId: el.requestedByUserId,
|
userId: el.requestedByUserId,
|
||||||
@ -363,6 +367,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
|||||||
)
|
)
|
||||||
|
|
||||||
.where(`${TableName.Environment}.projectId`, projectId)
|
.where(`${TableName.Environment}.projectId`, projectId)
|
||||||
|
.where(`${TableName.AccessApprovalPolicy}.deletedAt`, null)
|
||||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||||
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
|
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
|
||||||
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));
|
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));
|
||||||
|
@ -130,6 +130,9 @@ export const accessApprovalRequestServiceFactory = ({
|
|||||||
message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.`
|
message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
if (policy.deletedAt) {
|
||||||
|
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
|
||||||
|
}
|
||||||
|
|
||||||
const approverIds: string[] = [];
|
const approverIds: string[] = [];
|
||||||
const approverGroupIds: string[] = [];
|
const approverGroupIds: string[] = [];
|
||||||
@ -309,6 +312,12 @@ export const accessApprovalRequestServiceFactory = ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { policy } = accessApprovalRequest;
|
const { policy } = accessApprovalRequest;
|
||||||
|
if (policy.deletedAt) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "The policy associated with this access request has been deleted."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import { RawAxiosRequestHeaders } from "axios";
|
import { RawAxiosRequestHeaders } from "axios";
|
||||||
|
|
||||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||||
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { request } from "@app/lib/config/request";
|
import { request } from "@app/lib/config/request";
|
||||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||||
@ -20,27 +21,130 @@ type TAuditLogQueueServiceFactoryDep = {
|
|||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
|
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
|
||||||
|
|
||||||
// keep this timeout 5s it must be fast because else the queue will take time to finish
|
// keep this timeout 5s it must be fast because else the queue will take time to finish
|
||||||
// audit log is a crowded queue thus needs to be fast
|
// audit log is a crowded queue thus needs to be fast
|
||||||
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
|
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
|
||||||
export const auditLogQueueServiceFactory = ({
|
|
||||||
|
export const auditLogQueueServiceFactory = async ({
|
||||||
auditLogDAL,
|
auditLogDAL,
|
||||||
queueService,
|
queueService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
licenseService,
|
licenseService,
|
||||||
auditLogStreamDAL
|
auditLogStreamDAL
|
||||||
}: TAuditLogQueueServiceFactoryDep) => {
|
}: TAuditLogQueueServiceFactoryDep) => {
|
||||||
|
const appCfg = getConfig();
|
||||||
|
|
||||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||||
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||||
removeOnFail: {
|
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
|
||||||
count: 3
|
retryLimit: 10,
|
||||||
},
|
retryBackoff: true
|
||||||
removeOnComplete: true
|
});
|
||||||
});
|
} else {
|
||||||
|
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||||
|
removeOnFail: {
|
||||||
|
count: 3
|
||||||
|
},
|
||||||
|
removeOnComplete: true
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||||
|
await queueService.startPg<QueueName.AuditLog>(
|
||||||
|
QueueJobs.AuditLog,
|
||||||
|
async ([job]) => {
|
||||||
|
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||||
|
let { orgId } = job.data;
|
||||||
|
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||||
|
let project;
|
||||||
|
|
||||||
|
if (!orgId) {
|
||||||
|
// it will never be undefined for both org and project id
|
||||||
|
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||||
|
project = await projectDAL.findById(projectId as string);
|
||||||
|
orgId = project.orgId;
|
||||||
|
}
|
||||||
|
|
||||||
|
const plan = await licenseService.getPlan(orgId);
|
||||||
|
if (plan.auditLogsRetentionDays === 0) {
|
||||||
|
// skip inserting if audit log retention is 0 meaning its not supported
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// For project actions, set TTL to project-level audit log retention config
|
||||||
|
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||||
|
const ttlInDays =
|
||||||
|
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||||
|
? project.auditLogsRetentionDays
|
||||||
|
: plan.auditLogsRetentionDays;
|
||||||
|
|
||||||
|
const ttl = ttlInDays * MS_IN_DAY;
|
||||||
|
|
||||||
|
const auditLog = await auditLogDAL.create({
|
||||||
|
actor: actor.type,
|
||||||
|
actorMetadata: actor.metadata,
|
||||||
|
userAgent,
|
||||||
|
projectId,
|
||||||
|
projectName: project?.name,
|
||||||
|
ipAddress,
|
||||||
|
orgId,
|
||||||
|
eventType: event.type,
|
||||||
|
expiresAt: new Date(Date.now() + ttl),
|
||||||
|
eventMetadata: event.metadata,
|
||||||
|
userAgentType
|
||||||
|
});
|
||||||
|
|
||||||
|
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||||
|
await Promise.allSettled(
|
||||||
|
logStreams.map(
|
||||||
|
async ({
|
||||||
|
url,
|
||||||
|
encryptedHeadersTag,
|
||||||
|
encryptedHeadersIV,
|
||||||
|
encryptedHeadersKeyEncoding,
|
||||||
|
encryptedHeadersCiphertext
|
||||||
|
}) => {
|
||||||
|
const streamHeaders =
|
||||||
|
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||||
|
? (JSON.parse(
|
||||||
|
infisicalSymmetricDecrypt({
|
||||||
|
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||||
|
iv: encryptedHeadersIV,
|
||||||
|
tag: encryptedHeadersTag,
|
||||||
|
ciphertext: encryptedHeadersCiphertext
|
||||||
|
})
|
||||||
|
) as LogStreamHeaders[])
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||||
|
|
||||||
|
if (streamHeaders.length)
|
||||||
|
streamHeaders.forEach(({ key, value }) => {
|
||||||
|
headers[key] = value;
|
||||||
|
});
|
||||||
|
|
||||||
|
return request.post(url, auditLog, {
|
||||||
|
headers,
|
||||||
|
// request timeout
|
||||||
|
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||||
|
// connection timeout
|
||||||
|
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
batchSize: 1,
|
||||||
|
workerCount: 30,
|
||||||
|
pollingIntervalSeconds: 0.5
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
queueService.start(QueueName.AuditLog, async (job) => {
|
queueService.start(QueueName.AuditLog, async (job) => {
|
||||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||||
let { orgId } = job.data;
|
let { orgId } = job.data;
|
||||||
|
@ -2,9 +2,14 @@ import {
|
|||||||
TCreateProjectTemplateDTO,
|
TCreateProjectTemplateDTO,
|
||||||
TUpdateProjectTemplateDTO
|
TUpdateProjectTemplateDTO
|
||||||
} from "@app/ee/services/project-template/project-template-types";
|
} from "@app/ee/services/project-template/project-template-types";
|
||||||
|
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||||
|
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||||
import { TProjectPermission } from "@app/lib/types";
|
import { TProjectPermission } from "@app/lib/types";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import { TCreateAppConnectionDTO, TUpdateAppConnectionDTO } from "@app/services/app-connection/app-connection-types";
|
||||||
import { ActorType } from "@app/services/auth/auth-type";
|
import { ActorType } from "@app/services/auth/auth-type";
|
||||||
|
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||||
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
||||||
@ -60,6 +65,7 @@ export enum EventType {
|
|||||||
DELETE_SECRETS = "delete-secrets",
|
DELETE_SECRETS = "delete-secrets",
|
||||||
GET_WORKSPACE_KEY = "get-workspace-key",
|
GET_WORKSPACE_KEY = "get-workspace-key",
|
||||||
AUTHORIZE_INTEGRATION = "authorize-integration",
|
AUTHORIZE_INTEGRATION = "authorize-integration",
|
||||||
|
UPDATE_INTEGRATION_AUTH = "update-integration-auth",
|
||||||
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
|
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
|
||||||
CREATE_INTEGRATION = "create-integration",
|
CREATE_INTEGRATION = "create-integration",
|
||||||
DELETE_INTEGRATION = "delete-integration",
|
DELETE_INTEGRATION = "delete-integration",
|
||||||
@ -94,6 +100,11 @@ export enum EventType {
|
|||||||
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
|
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
|
||||||
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
|
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
|
||||||
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
|
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
|
||||||
|
LOGIN_IDENTITY_JWT_AUTH = "login-identity-jwt-auth",
|
||||||
|
ADD_IDENTITY_JWT_AUTH = "add-identity-jwt-auth",
|
||||||
|
UPDATE_IDENTITY_JWT_AUTH = "update-identity-jwt-auth",
|
||||||
|
GET_IDENTITY_JWT_AUTH = "get-identity-jwt-auth",
|
||||||
|
REVOKE_IDENTITY_JWT_AUTH = "revoke-identity-jwt-auth",
|
||||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||||
@ -137,6 +148,17 @@ export enum EventType {
|
|||||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||||
|
SIGN_SSH_KEY = "sign-ssh-key",
|
||||||
|
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||||
|
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||||
|
GET_SSH_CA = "get-ssh-certificate-authority",
|
||||||
|
UPDATE_SSH_CA = "update-ssh-certificate-authority",
|
||||||
|
DELETE_SSH_CA = "delete-ssh-certificate-authority",
|
||||||
|
GET_SSH_CA_CERTIFICATE_TEMPLATES = "get-ssh-certificate-authority-certificate-templates",
|
||||||
|
CREATE_SSH_CERTIFICATE_TEMPLATE = "create-ssh-certificate-template",
|
||||||
|
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
|
||||||
|
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
|
||||||
|
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
|
||||||
CREATE_CA = "create-certificate-authority",
|
CREATE_CA = "create-certificate-authority",
|
||||||
GET_CA = "get-certificate-authority",
|
GET_CA = "get-certificate-authority",
|
||||||
UPDATE_CA = "update-certificate-authority",
|
UPDATE_CA = "update-certificate-authority",
|
||||||
@ -202,7 +224,12 @@ export enum EventType {
|
|||||||
CREATE_PROJECT_TEMPLATE = "create-project-template",
|
CREATE_PROJECT_TEMPLATE = "create-project-template",
|
||||||
UPDATE_PROJECT_TEMPLATE = "update-project-template",
|
UPDATE_PROJECT_TEMPLATE = "update-project-template",
|
||||||
DELETE_PROJECT_TEMPLATE = "delete-project-template",
|
DELETE_PROJECT_TEMPLATE = "delete-project-template",
|
||||||
APPLY_PROJECT_TEMPLATE = "apply-project-template"
|
APPLY_PROJECT_TEMPLATE = "apply-project-template",
|
||||||
|
GET_APP_CONNECTIONS = "get-app-connections",
|
||||||
|
GET_APP_CONNECTION = "get-app-connection",
|
||||||
|
CREATE_APP_CONNECTION = "create-app-connection",
|
||||||
|
UPDATE_APP_CONNECTION = "update-app-connection",
|
||||||
|
DELETE_APP_CONNECTION = "delete-app-connection"
|
||||||
}
|
}
|
||||||
|
|
||||||
interface UserActorMetadata {
|
interface UserActorMetadata {
|
||||||
@ -357,6 +384,13 @@ interface AuthorizeIntegrationEvent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface UpdateIntegrationAuthEvent {
|
||||||
|
type: EventType.UPDATE_INTEGRATION_AUTH;
|
||||||
|
metadata: {
|
||||||
|
integration: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
interface UnauthorizeIntegrationEvent {
|
interface UnauthorizeIntegrationEvent {
|
||||||
type: EventType.UNAUTHORIZE_INTEGRATION;
|
type: EventType.UNAUTHORIZE_INTEGRATION;
|
||||||
metadata: {
|
metadata: {
|
||||||
@ -895,6 +929,67 @@ interface GetIdentityOidcAuthEvent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface LoginIdentityJwtAuthEvent {
|
||||||
|
type: EventType.LOGIN_IDENTITY_JWT_AUTH;
|
||||||
|
metadata: {
|
||||||
|
identityId: string;
|
||||||
|
identityJwtAuthId: string;
|
||||||
|
identityAccessTokenId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AddIdentityJwtAuthEvent {
|
||||||
|
type: EventType.ADD_IDENTITY_JWT_AUTH;
|
||||||
|
metadata: {
|
||||||
|
identityId: string;
|
||||||
|
configurationType: string;
|
||||||
|
jwksUrl?: string;
|
||||||
|
jwksCaCert: string;
|
||||||
|
publicKeys: string[];
|
||||||
|
boundIssuer: string;
|
||||||
|
boundAudiences: string;
|
||||||
|
boundClaims: Record<string, string>;
|
||||||
|
boundSubject: string;
|
||||||
|
accessTokenTTL: number;
|
||||||
|
accessTokenMaxTTL: number;
|
||||||
|
accessTokenNumUsesLimit: number;
|
||||||
|
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpdateIdentityJwtAuthEvent {
|
||||||
|
type: EventType.UPDATE_IDENTITY_JWT_AUTH;
|
||||||
|
metadata: {
|
||||||
|
identityId: string;
|
||||||
|
configurationType?: string;
|
||||||
|
jwksUrl?: string;
|
||||||
|
jwksCaCert?: string;
|
||||||
|
publicKeys?: string[];
|
||||||
|
boundIssuer?: string;
|
||||||
|
boundAudiences?: string;
|
||||||
|
boundClaims?: Record<string, string>;
|
||||||
|
boundSubject?: string;
|
||||||
|
accessTokenTTL?: number;
|
||||||
|
accessTokenMaxTTL?: number;
|
||||||
|
accessTokenNumUsesLimit?: number;
|
||||||
|
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeleteIdentityJwtAuthEvent {
|
||||||
|
type: EventType.REVOKE_IDENTITY_JWT_AUTH;
|
||||||
|
metadata: {
|
||||||
|
identityId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GetIdentityJwtAuthEvent {
|
||||||
|
type: EventType.GET_IDENTITY_JWT_AUTH;
|
||||||
|
metadata: {
|
||||||
|
identityId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
interface CreateEnvironmentEvent {
|
interface CreateEnvironmentEvent {
|
||||||
type: EventType.CREATE_ENVIRONMENT;
|
type: EventType.CREATE_ENVIRONMENT;
|
||||||
metadata: {
|
metadata: {
|
||||||
@ -1132,6 +1227,117 @@ interface SecretApprovalRequest {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface SignSshKey {
|
||||||
|
type: EventType.SIGN_SSH_KEY;
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: string;
|
||||||
|
certType: SshCertType;
|
||||||
|
principals: string[];
|
||||||
|
ttl: string;
|
||||||
|
keyId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IssueSshCreds {
|
||||||
|
type: EventType.ISSUE_SSH_CREDS;
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: string;
|
||||||
|
keyAlgorithm: CertKeyAlgorithm;
|
||||||
|
certType: SshCertType;
|
||||||
|
principals: string[];
|
||||||
|
ttl: string;
|
||||||
|
keyId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CreateSshCa {
|
||||||
|
type: EventType.CREATE_SSH_CA;
|
||||||
|
metadata: {
|
||||||
|
sshCaId: string;
|
||||||
|
friendlyName: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GetSshCa {
|
||||||
|
type: EventType.GET_SSH_CA;
|
||||||
|
metadata: {
|
||||||
|
sshCaId: string;
|
||||||
|
friendlyName: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpdateSshCa {
|
||||||
|
type: EventType.UPDATE_SSH_CA;
|
||||||
|
metadata: {
|
||||||
|
sshCaId: string;
|
||||||
|
friendlyName: string;
|
||||||
|
status: SshCaStatus;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeleteSshCa {
|
||||||
|
type: EventType.DELETE_SSH_CA;
|
||||||
|
metadata: {
|
||||||
|
sshCaId: string;
|
||||||
|
friendlyName: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GetSshCaCertificateTemplates {
|
||||||
|
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES;
|
||||||
|
metadata: {
|
||||||
|
sshCaId: string;
|
||||||
|
friendlyName: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CreateSshCertificateTemplate {
|
||||||
|
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE;
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: string;
|
||||||
|
sshCaId: string;
|
||||||
|
name: string;
|
||||||
|
ttl: string;
|
||||||
|
maxTTL: string;
|
||||||
|
allowedUsers: string[];
|
||||||
|
allowedHosts: string[];
|
||||||
|
allowUserCertificates: boolean;
|
||||||
|
allowHostCertificates: boolean;
|
||||||
|
allowCustomKeyIds: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GetSshCertificateTemplate {
|
||||||
|
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE;
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpdateSshCertificateTemplate {
|
||||||
|
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE;
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: string;
|
||||||
|
sshCaId: string;
|
||||||
|
name: string;
|
||||||
|
status: SshCertTemplateStatus;
|
||||||
|
ttl: string;
|
||||||
|
maxTTL: string;
|
||||||
|
allowedUsers: string[];
|
||||||
|
allowedHosts: string[];
|
||||||
|
allowUserCertificates: boolean;
|
||||||
|
allowHostCertificates: boolean;
|
||||||
|
allowCustomKeyIds: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeleteSshCertificateTemplate {
|
||||||
|
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE;
|
||||||
|
metadata: {
|
||||||
|
certificateTemplateId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
interface CreateCa {
|
interface CreateCa {
|
||||||
type: EventType.CREATE_CA;
|
type: EventType.CREATE_CA;
|
||||||
metadata: {
|
metadata: {
|
||||||
@ -1668,6 +1874,39 @@ interface ApplyProjectTemplateEvent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface GetAppConnectionsEvent {
|
||||||
|
type: EventType.GET_APP_CONNECTIONS;
|
||||||
|
metadata: {
|
||||||
|
app?: AppConnection;
|
||||||
|
count: number;
|
||||||
|
connectionIds: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GetAppConnectionEvent {
|
||||||
|
type: EventType.GET_APP_CONNECTION;
|
||||||
|
metadata: {
|
||||||
|
connectionId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CreateAppConnectionEvent {
|
||||||
|
type: EventType.CREATE_APP_CONNECTION;
|
||||||
|
metadata: Omit<TCreateAppConnectionDTO, "credentials"> & { connectionId: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpdateAppConnectionEvent {
|
||||||
|
type: EventType.UPDATE_APP_CONNECTION;
|
||||||
|
metadata: Omit<TUpdateAppConnectionDTO, "credentials"> & { connectionId: string; credentialsUpdated: boolean };
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeleteAppConnectionEvent {
|
||||||
|
type: EventType.DELETE_APP_CONNECTION;
|
||||||
|
metadata: {
|
||||||
|
connectionId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export type Event =
|
export type Event =
|
||||||
| GetSecretsEvent
|
| GetSecretsEvent
|
||||||
| GetSecretEvent
|
| GetSecretEvent
|
||||||
@ -1680,6 +1919,7 @@ export type Event =
|
|||||||
| DeleteSecretBatchEvent
|
| DeleteSecretBatchEvent
|
||||||
| GetWorkspaceKeyEvent
|
| GetWorkspaceKeyEvent
|
||||||
| AuthorizeIntegrationEvent
|
| AuthorizeIntegrationEvent
|
||||||
|
| UpdateIntegrationAuthEvent
|
||||||
| UnauthorizeIntegrationEvent
|
| UnauthorizeIntegrationEvent
|
||||||
| CreateIntegrationEvent
|
| CreateIntegrationEvent
|
||||||
| DeleteIntegrationEvent
|
| DeleteIntegrationEvent
|
||||||
@ -1733,6 +1973,11 @@ export type Event =
|
|||||||
| DeleteIdentityOidcAuthEvent
|
| DeleteIdentityOidcAuthEvent
|
||||||
| UpdateIdentityOidcAuthEvent
|
| UpdateIdentityOidcAuthEvent
|
||||||
| GetIdentityOidcAuthEvent
|
| GetIdentityOidcAuthEvent
|
||||||
|
| LoginIdentityJwtAuthEvent
|
||||||
|
| AddIdentityJwtAuthEvent
|
||||||
|
| UpdateIdentityJwtAuthEvent
|
||||||
|
| GetIdentityJwtAuthEvent
|
||||||
|
| DeleteIdentityJwtAuthEvent
|
||||||
| CreateEnvironmentEvent
|
| CreateEnvironmentEvent
|
||||||
| GetEnvironmentEvent
|
| GetEnvironmentEvent
|
||||||
| UpdateEnvironmentEvent
|
| UpdateEnvironmentEvent
|
||||||
@ -1757,6 +2002,17 @@ export type Event =
|
|||||||
| SecretApprovalClosed
|
| SecretApprovalClosed
|
||||||
| SecretApprovalRequest
|
| SecretApprovalRequest
|
||||||
| SecretApprovalReopened
|
| SecretApprovalReopened
|
||||||
|
| SignSshKey
|
||||||
|
| IssueSshCreds
|
||||||
|
| CreateSshCa
|
||||||
|
| GetSshCa
|
||||||
|
| UpdateSshCa
|
||||||
|
| DeleteSshCa
|
||||||
|
| GetSshCaCertificateTemplates
|
||||||
|
| CreateSshCertificateTemplate
|
||||||
|
| UpdateSshCertificateTemplate
|
||||||
|
| GetSshCertificateTemplate
|
||||||
|
| DeleteSshCertificateTemplate
|
||||||
| CreateCa
|
| CreateCa
|
||||||
| GetCa
|
| GetCa
|
||||||
| UpdateCa
|
| UpdateCa
|
||||||
@ -1822,4 +2078,9 @@ export type Event =
|
|||||||
| CreateProjectTemplateEvent
|
| CreateProjectTemplateEvent
|
||||||
| UpdateProjectTemplateEvent
|
| UpdateProjectTemplateEvent
|
||||||
| DeleteProjectTemplateEvent
|
| DeleteProjectTemplateEvent
|
||||||
| ApplyProjectTemplateEvent;
|
| ApplyProjectTemplateEvent
|
||||||
|
| GetAppConnectionsEvent
|
||||||
|
| GetAppConnectionEvent
|
||||||
|
| CreateAppConnectionEvent
|
||||||
|
| UpdateAppConnectionEvent
|
||||||
|
| DeleteAppConnectionEvent;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { ForbiddenError, subject } from "@casl/ability";
|
import { ForbiddenError, subject } from "@casl/ability";
|
||||||
import ms from "ms";
|
import ms from "ms";
|
||||||
|
|
||||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import {
|
import {
|
||||||
@ -67,13 +67,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||||
|
|
||||||
const projectId = project.id;
|
const projectId = project.id;
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
projectId,
|
projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionDynamicSecretActions.Lease,
|
ProjectPermissionDynamicSecretActions.Lease,
|
||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
@ -112,7 +113,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
})
|
})
|
||||||
) as object;
|
) as object;
|
||||||
|
|
||||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||||
const { maxTTL } = dynamicSecretCfg;
|
const { maxTTL } = dynamicSecretCfg;
|
||||||
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
|
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
|
||||||
if (maxTTL) {
|
if (maxTTL) {
|
||||||
@ -146,13 +147,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||||
|
|
||||||
const projectId = project.id;
|
const projectId = project.id;
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
projectId,
|
projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionDynamicSecretActions.Lease,
|
ProjectPermissionDynamicSecretActions.Lease,
|
||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
@ -187,7 +189,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
})
|
})
|
||||||
) as object;
|
) as object;
|
||||||
|
|
||||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||||
const { maxTTL } = dynamicSecretCfg;
|
const { maxTTL } = dynamicSecretCfg;
|
||||||
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
|
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
|
||||||
if (maxTTL) {
|
if (maxTTL) {
|
||||||
@ -225,13 +227,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||||
|
|
||||||
const projectId = project.id;
|
const projectId = project.id;
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
projectId,
|
projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionDynamicSecretActions.Lease,
|
ProjectPermissionDynamicSecretActions.Lease,
|
||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { ForbiddenError, subject } from "@casl/ability";
|
import { ForbiddenError, subject } from "@casl/ability";
|
||||||
|
|
||||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import {
|
import {
|
||||||
@ -73,13 +73,14 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||||
|
|
||||||
const projectId = project.id;
|
const projectId = project.id;
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
projectId,
|
projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionDynamicSecretActions.CreateRootCredential,
|
ProjectPermissionDynamicSecretActions.CreateRootCredential,
|
||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
@ -144,13 +145,14 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
|
|
||||||
const projectId = project.id;
|
const projectId = project.id;
|
||||||
|
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
projectId,
|
projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
@ -227,13 +229,14 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
|
|
||||||
const projectId = project.id;
|
const projectId = project.id;
|
||||||
|
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
projectId,
|
projectId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(
|
ForbiddenError.from(permission).throwUnlessCan(
|
||||||
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
|
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
|
||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
|
@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const addUserToInfisicalGroup = async (userId: string) => {
|
const $addUserToInfisicalGroup = async (userId: string) => {
|
||||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||||
|
|
||||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||||
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
|||||||
await ensureInfisicalGroupExists(clusterName);
|
await ensureInfisicalGroupExists(clusterName);
|
||||||
|
|
||||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
userId: creationInput.UserId,
|
userId: creationInput.UserId,
|
||||||
@ -127,7 +127,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
|||||||
};
|
};
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 64)();
|
return customAlphabet(charset, 64)();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -211,8 +211,8 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
// Do nothing
|
// No renewal necessary
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||||
const client = new IAMClient({
|
const client = new IAMClient({
|
||||||
region: providerInputs.region,
|
region: providerInputs.region,
|
||||||
credentials: {
|
credentials: {
|
||||||
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||||
return isConnected;
|
return isConnected;
|
||||||
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown) => {
|
const create = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||||
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
|
|
||||||
@ -179,9 +179,8 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
// do nothing
|
// No renewal necessary
|
||||||
const username = entityId;
|
return { entityId };
|
||||||
return { entityId: username };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -9,7 +9,7 @@ const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
|
|||||||
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
|
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 64)();
|
return customAlphabet(charset, 64)();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getToken = async (
|
const $getToken = async (
|
||||||
tenantId: string,
|
tenantId: string,
|
||||||
applicationId: string,
|
applicationId: string,
|
||||||
clientSecret: string
|
clientSecret: string
|
||||||
@ -51,18 +51,13 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||||
return data.success;
|
return data.success;
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string) => {
|
|
||||||
// Do nothing
|
|
||||||
return { entityId };
|
|
||||||
};
|
|
||||||
|
|
||||||
const create = async (inputs: unknown) => {
|
const create = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||||
if (!data.success) {
|
if (!data.success) {
|
||||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||||
}
|
}
|
||||||
@ -98,7 +93,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||||
const data = await getToken(tenantId, applicationId, clientSecret);
|
const data = await $getToken(tenantId, applicationId, clientSecret);
|
||||||
if (!data.success) {
|
if (!data.success) {
|
||||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||||
}
|
}
|
||||||
@ -127,6 +122,11 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
|||||||
return users;
|
return users;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
// No renewal necessary
|
||||||
|
return { entityId };
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
validateProviderInputs,
|
validateProviderInputs,
|
||||||
validateConnection,
|
validateConnection,
|
||||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
|||||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = (size = 48) => {
|
const generatePassword = (size = 48) => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 48)(size);
|
return customAlphabet(charset, 48)(size);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||||
const client = new cassandra.Client({
|
const client = new cassandra.Client({
|
||||||
sslOptions,
|
sslOptions,
|
||||||
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||||
await client.shutdown();
|
await client.shutdown();
|
||||||
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown, expireAt: number) => {
|
const create = async (inputs: unknown, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
const { keyspace } = providerInputs;
|
const { keyspace } = providerInputs;
|
||||||
@ -99,20 +99,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
if (!providerInputs.renewStatement) return { entityId };
|
||||||
|
|
||||||
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
const { keyspace } = providerInputs;
|
const { keyspace } = providerInputs;
|
||||||
|
|
||||||
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||||
|
username: entityId,
|
||||||
|
keyspace,
|
||||||
|
expiration
|
||||||
|
});
|
||||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||||
for (const query of queries) {
|
for await (const query of queries) {
|
||||||
// eslint-disable-next-line
|
|
||||||
await client.execute(query);
|
await client.execute(query);
|
||||||
}
|
}
|
||||||
await client.shutdown();
|
await client.shutdown();
|
||||||
return { entityId: username };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
|||||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 64)();
|
return customAlphabet(charset, 64)();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||||
const connection = new ElasticSearchClient({
|
const connection = new ElasticSearchClient({
|
||||||
node: {
|
node: {
|
||||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||||
@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const infoResponse = await connection
|
const infoResponse = await connection
|
||||||
.info()
|
.info()
|
||||||
@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown) => {
|
const create = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
await connection.security.deleteUser({
|
await connection.security.deleteUser({
|
||||||
username: entityId
|
username: entityId
|
||||||
@ -95,8 +95,8 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
|||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
// Do nothing
|
// No renewal necessary
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -6,15 +6,17 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
|
|||||||
import { CassandraProvider } from "./cassandra";
|
import { CassandraProvider } from "./cassandra";
|
||||||
import { ElasticSearchProvider } from "./elastic-search";
|
import { ElasticSearchProvider } from "./elastic-search";
|
||||||
import { LdapProvider } from "./ldap";
|
import { LdapProvider } from "./ldap";
|
||||||
import { DynamicSecretProviders } from "./models";
|
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
|
||||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||||
import { MongoDBProvider } from "./mongo-db";
|
import { MongoDBProvider } from "./mongo-db";
|
||||||
import { RabbitMqProvider } from "./rabbit-mq";
|
import { RabbitMqProvider } from "./rabbit-mq";
|
||||||
import { RedisDatabaseProvider } from "./redis";
|
import { RedisDatabaseProvider } from "./redis";
|
||||||
|
import { SapAseProvider } from "./sap-ase";
|
||||||
import { SapHanaProvider } from "./sap-hana";
|
import { SapHanaProvider } from "./sap-hana";
|
||||||
import { SqlDatabaseProvider } from "./sql-database";
|
import { SqlDatabaseProvider } from "./sql-database";
|
||||||
|
import { TotpProvider } from "./totp";
|
||||||
|
|
||||||
export const buildDynamicSecretProviders = () => ({
|
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||||
@ -27,5 +29,7 @@ export const buildDynamicSecretProviders = () => ({
|
|||||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||||
|
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||||
|
[DynamicSecretProviders.SapAse]: SapAseProvider()
|
||||||
});
|
});
|
||||||
|
@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const client = ldapjs.createClient({
|
const client = ldapjs.createClient({
|
||||||
url: providerInputs.url,
|
url: providerInputs.url,
|
||||||
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
return client.connected;
|
return client.connected;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown) => {
|
const create = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||||
@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||||
@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string) => {
|
const renew = async (inputs: unknown, entityId: string) => {
|
||||||
// Do nothing
|
// No renewal necessary
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -4,7 +4,8 @@ export enum SqlProviders {
|
|||||||
Postgres = "postgres",
|
Postgres = "postgres",
|
||||||
MySQL = "mysql2",
|
MySQL = "mysql2",
|
||||||
Oracle = "oracledb",
|
Oracle = "oracledb",
|
||||||
MsSQL = "mssql"
|
MsSQL = "mssql",
|
||||||
|
SapAse = "sap-ase"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ElasticSearchAuthTypes {
|
export enum ElasticSearchAuthTypes {
|
||||||
@ -17,6 +18,17 @@ export enum LdapCredentialType {
|
|||||||
Static = "static"
|
Static = "static"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum TotpConfigType {
|
||||||
|
URL = "url",
|
||||||
|
MANUAL = "manual"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum TotpAlgorithm {
|
||||||
|
SHA1 = "sha1",
|
||||||
|
SHA256 = "sha256",
|
||||||
|
SHA512 = "sha512"
|
||||||
|
}
|
||||||
|
|
||||||
export const DynamicSecretRedisDBSchema = z.object({
|
export const DynamicSecretRedisDBSchema = z.object({
|
||||||
host: z.string().trim().toLowerCase(),
|
host: z.string().trim().toLowerCase(),
|
||||||
port: z.number(),
|
port: z.number(),
|
||||||
@ -107,6 +119,16 @@ export const DynamicSecretCassandraSchema = z.object({
|
|||||||
ca: z.string().optional()
|
ca: z.string().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const DynamicSecretSapAseSchema = z.object({
|
||||||
|
host: z.string().trim().toLowerCase(),
|
||||||
|
port: z.number(),
|
||||||
|
database: z.string().trim(),
|
||||||
|
username: z.string().trim(),
|
||||||
|
password: z.string().trim(),
|
||||||
|
creationStatement: z.string().trim(),
|
||||||
|
revocationStatement: z.string().trim()
|
||||||
|
});
|
||||||
|
|
||||||
export const DynamicSecretAwsIamSchema = z.object({
|
export const DynamicSecretAwsIamSchema = z.object({
|
||||||
accessKey: z.string().trim().min(1),
|
accessKey: z.string().trim().min(1),
|
||||||
secretAccessKey: z.string().trim().min(1),
|
secretAccessKey: z.string().trim().min(1),
|
||||||
@ -221,6 +243,34 @@ export const LdapSchema = z.union([
|
|||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||||
|
z.object({
|
||||||
|
configType: z.literal(TotpConfigType.URL),
|
||||||
|
url: z
|
||||||
|
.string()
|
||||||
|
.url()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.refine((val) => {
|
||||||
|
const urlObj = new URL(val);
|
||||||
|
const secret = urlObj.searchParams.get("secret");
|
||||||
|
|
||||||
|
return Boolean(secret);
|
||||||
|
}, "OTP URL must contain secret field")
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
configType: z.literal(TotpConfigType.MANUAL),
|
||||||
|
secret: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.transform((val) => val.replace(/\s+/g, "")),
|
||||||
|
period: z.number().optional(),
|
||||||
|
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||||
|
digits: z.number().optional()
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
export enum DynamicSecretProviders {
|
export enum DynamicSecretProviders {
|
||||||
SqlDatabase = "sql-database",
|
SqlDatabase = "sql-database",
|
||||||
Cassandra = "cassandra",
|
Cassandra = "cassandra",
|
||||||
@ -234,12 +284,15 @@ export enum DynamicSecretProviders {
|
|||||||
AzureEntraID = "azure-entra-id",
|
AzureEntraID = "azure-entra-id",
|
||||||
Ldap = "ldap",
|
Ldap = "ldap",
|
||||||
SapHana = "sap-hana",
|
SapHana = "sap-hana",
|
||||||
Snowflake = "snowflake"
|
Snowflake = "snowflake",
|
||||||
|
Totp = "totp",
|
||||||
|
SapAse = "sap-ase"
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||||
|
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||||
@ -250,7 +303,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
|||||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||||
|
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export type TDynamicProviderFns = {
|
export type TDynamicProviderFns = {
|
||||||
|
@ -8,7 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
|||||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = (size = 48) => {
|
const generatePassword = (size = 48) => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 48)(size);
|
return customAlphabet(charset, 48)(size);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||||
const client = axios.create({
|
const client = axios.create({
|
||||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||||
headers: {
|
headers: {
|
||||||
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const isConnected = await client({
|
const isConnected = await client({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown, expireAt: number) => {
|
const create = async (inputs: unknown, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
const isExisting = await client({
|
const isExisting = await client({
|
||||||
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
|||||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = (size = 48) => {
|
const generatePassword = (size = 48) => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 48)(size);
|
return customAlphabet(charset, 48)(size);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||||
const isSrv = !providerInputs.port;
|
const isSrv = !providerInputs.port;
|
||||||
const uri = isSrv
|
const uri = isSrv
|
||||||
? `mongodb+srv://${providerInputs.host}`
|
? `mongodb+srv://${providerInputs.host}`
|
||||||
@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const isConnected = await client
|
const isConnected = await client
|
||||||
.db(providerInputs.database)
|
.db(providerInputs.database)
|
||||||
@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown) => {
|
const create = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
|
|
||||||
@ -88,6 +88,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (_inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
// No renewal necessary
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
|||||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 64)();
|
return customAlphabet(charset, 64)();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||||
const axiosInstance = axios.create({
|
const axiosInstance = axios.create({
|
||||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||||
auth: {
|
auth: {
|
||||||
@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||||
|
|
||||||
@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown) => {
|
const create = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
@ -134,15 +134,15 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||||
|
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string) => {
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
// Do nothing
|
// No renewal necessary
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
|||||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 64)();
|
return customAlphabet(charset, 64)();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||||
let connection: Redis | null = null;
|
let connection: Redis | null = null;
|
||||||
try {
|
try {
|
||||||
connection = new Redis({
|
connection = new Redis({
|
||||||
@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const pingResponse = await connection
|
const pingResponse = await connection
|
||||||
.ping()
|
.ping()
|
||||||
@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown, expireAt: number) => {
|
const create = async (inputs: unknown, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
|
|
||||||
@ -141,7 +141,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
if (!providerInputs.renewStatement) return { entityId };
|
||||||
|
|
||||||
|
const connection = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
import handlebars from "handlebars";
|
||||||
|
import { customAlphabet } from "nanoid";
|
||||||
|
import odbc from "odbc";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
|
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
|
const generatePassword = (size = 48) => {
|
||||||
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||||
|
return customAlphabet(charset, 48)(size);
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateUsername = () => {
|
||||||
|
return alphaNumericNanoId(25);
|
||||||
|
};
|
||||||
|
|
||||||
|
enum SapCommands {
|
||||||
|
CreateLogin = "sp_addlogin",
|
||||||
|
DropLogin = "sp_droplogin"
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SapAseProvider = (): TDynamicProviderFns => {
|
||||||
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||||
|
|
||||||
|
verifyHostInputValidity(providerInputs.host);
|
||||||
|
return providerInputs;
|
||||||
|
};
|
||||||
|
|
||||||
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||||
|
const connectionString =
|
||||||
|
`DRIVER={FreeTDS};` +
|
||||||
|
`SERVER=${providerInputs.host};` +
|
||||||
|
`PORT=${providerInputs.port};` +
|
||||||
|
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||||
|
`UID=${providerInputs.username};` +
|
||||||
|
`PWD=${providerInputs.password};` +
|
||||||
|
`TDS_VERSION=5.0`;
|
||||||
|
|
||||||
|
const client = await odbc.connect(connectionString);
|
||||||
|
|
||||||
|
return client;
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConnection = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
const masterClient = await $getClient(providerInputs, true);
|
||||||
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
|
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||||
|
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||||
|
|
||||||
|
if (!resultFromSelectedDatabase.version) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to validate SAP ASE connection, version query failed"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
const create = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
const username = `inf_${generateUsername()}`;
|
||||||
|
const password = `${generatePassword()}`;
|
||||||
|
|
||||||
|
const client = await $getClient(providerInputs);
|
||||||
|
const masterClient = await $getClient(providerInputs, true);
|
||||||
|
|
||||||
|
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||||
|
username,
|
||||||
|
password
|
||||||
|
});
|
||||||
|
|
||||||
|
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||||
|
|
||||||
|
for await (const query of queries) {
|
||||||
|
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||||
|
// If not done, then the newly created user won't be able to authenticate.
|
||||||
|
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||||
|
}
|
||||||
|
|
||||||
|
await masterClient.close();
|
||||||
|
await client.close();
|
||||||
|
|
||||||
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
};
|
||||||
|
|
||||||
|
const revoke = async (inputs: unknown, username: string) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||||
|
username
|
||||||
|
});
|
||||||
|
|
||||||
|
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||||
|
|
||||||
|
const client = await $getClient(providerInputs);
|
||||||
|
const masterClient = await $getClient(providerInputs, true);
|
||||||
|
|
||||||
|
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
|
||||||
|
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
|
||||||
|
|
||||||
|
if (result && result.length > 0) {
|
||||||
|
for await (const row of result) {
|
||||||
|
if (row.spid) {
|
||||||
|
await masterClient.query(`KILL ${row.spid.trim()}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for await (const query of queries) {
|
||||||
|
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||||
|
}
|
||||||
|
|
||||||
|
await masterClient.close();
|
||||||
|
await client.close();
|
||||||
|
|
||||||
|
return { entityId: username };
|
||||||
|
};
|
||||||
|
|
||||||
|
const renew = async (_: unknown, username: string) => {
|
||||||
|
// No need for renewal
|
||||||
|
return { entityId: username };
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
validateProviderInputs,
|
||||||
|
validateConnection,
|
||||||
|
create,
|
||||||
|
revoke,
|
||||||
|
renew
|
||||||
|
};
|
||||||
|
};
|
@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||||
const client = hdb.createClient({
|
const client = hdb.createClient({
|
||||||
host: providerInputs.host,
|
host: providerInputs.host,
|
||||||
port: providerInputs.port,
|
port: providerInputs.port,
|
||||||
@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const testResult: boolean = await new Promise((resolve, reject) => {
|
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject();
|
reject();
|
||||||
@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||||
username,
|
username,
|
||||||
password,
|
password,
|
||||||
@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, username: string) => {
|
const revoke = async (inputs: unknown, username: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||||
for await (const query of queries) {
|
for await (const query of queries) {
|
||||||
@ -135,13 +135,15 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
return { entityId: username };
|
return { entityId: username };
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
if (!providerInputs.renewStatement) return { entityId };
|
||||||
|
|
||||||
|
const client = await $getClient(providerInputs);
|
||||||
try {
|
try {
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
|
||||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
|
||||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||||
for await (const query of queries) {
|
for await (const query of queries) {
|
||||||
await new Promise((resolve, reject) => {
|
await new Promise((resolve, reject) => {
|
||||||
@ -161,7 +163,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
|||||||
client.disconnect();
|
client.disconnect();
|
||||||
}
|
}
|
||||||
|
|
||||||
return { entityId: username };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -12,7 +12,7 @@ import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
|||||||
const noop = () => {};
|
const noop = () => {};
|
||||||
|
|
||||||
const generatePassword = (size = 48) => {
|
const generatePassword = (size = 48) => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 48)(size);
|
return customAlphabet(charset, 48)(size);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||||
const client = snowflake.createConnection({
|
const client = snowflake.createConnection({
|
||||||
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||||
username: providerInputs.username,
|
username: providerInputs.username,
|
||||||
@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
let isValidConnection: boolean;
|
let isValidConnection: boolean;
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
const create = async (inputs: unknown, expireAt: number) => {
|
const create = async (inputs: unknown, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
const revoke = async (inputs: unknown, username: string) => {
|
const revoke = async (inputs: unknown, username: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
const client = await getClient(providerInputs);
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||||
@ -131,17 +131,16 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
return { entityId: username };
|
return { entityId: username };
|
||||||
};
|
};
|
||||||
|
|
||||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
if (!providerInputs.renewStatement) return { entityId };
|
||||||
|
|
||||||
if (!providerInputs.renewStatement) return { entityId: username };
|
const client = await $getClient(providerInputs);
|
||||||
|
|
||||||
const client = await getClient(providerInputs);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||||
username,
|
username: entityId,
|
||||||
expiration
|
expiration
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -161,7 +160,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
|||||||
client.destroy(noop);
|
client.destroy(noop);
|
||||||
}
|
}
|
||||||
|
|
||||||
return { entityId: username };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -14,7 +14,7 @@ const generatePassword = (provider: SqlProviders) => {
|
|||||||
// oracle has limit of 48 password length
|
// oracle has limit of 48 password length
|
||||||
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
||||||
|
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||||
return customAlphabet(charset, 48)(size);
|
return customAlphabet(charset, 48)(size);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||||
const db = knex({
|
const db = knex({
|
||||||
client: providerInputs.client,
|
client: providerInputs.client,
|
||||||
@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const validateConnection = async (inputs: unknown) => {
|
const validateConnection = async (inputs: unknown) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const db = await getClient(providerInputs);
|
const db = await $getClient(providerInputs);
|
||||||
// oracle needs from keyword
|
// oracle needs from keyword
|
||||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||||
|
|
||||||
@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const create = async (inputs: unknown, expireAt: number) => {
|
const create = async (inputs: unknown, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const db = await getClient(providerInputs);
|
const db = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = generateUsername(providerInputs.client);
|
const username = generateUsername(providerInputs.client);
|
||||||
const password = generatePassword(providerInputs.client);
|
const password = generatePassword(providerInputs.client);
|
||||||
@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const db = await getClient(providerInputs);
|
const db = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
const username = entityId;
|
||||||
const { database } = providerInputs;
|
const { database } = providerInputs;
|
||||||
@ -110,13 +110,19 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
|
|
||||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const db = await getClient(providerInputs);
|
if (!providerInputs.renewStatement) return { entityId };
|
||||||
|
|
||||||
|
const db = await $getClient(providerInputs);
|
||||||
|
|
||||||
const username = entityId;
|
|
||||||
const expiration = new Date(expireAt).toISOString();
|
const expiration = new Date(expireAt).toISOString();
|
||||||
const { database } = providerInputs;
|
const { database } = providerInputs;
|
||||||
|
|
||||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||||
|
username: entityId,
|
||||||
|
expiration,
|
||||||
|
database
|
||||||
|
});
|
||||||
|
|
||||||
if (renewStatement) {
|
if (renewStatement) {
|
||||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||||
await db.transaction(async (tx) => {
|
await db.transaction(async (tx) => {
|
||||||
@ -128,7 +134,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
return { entityId: username };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
import { authenticator } from "otplib";
|
||||||
|
import { HashAlgorithms } from "otplib/core";
|
||||||
|
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||||
|
|
||||||
|
export const TotpProvider = (): TDynamicProviderFns => {
|
||||||
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||||
|
|
||||||
|
return providerInputs;
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConnection = async () => {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
const create = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
const entityId = alphaNumericNanoId(32);
|
||||||
|
const authenticatorInstance = authenticator.clone();
|
||||||
|
|
||||||
|
let secret: string;
|
||||||
|
let period: number | null | undefined;
|
||||||
|
let digits: number | null | undefined;
|
||||||
|
let algorithm: HashAlgorithms | null | undefined;
|
||||||
|
|
||||||
|
if (providerInputs.configType === TotpConfigType.URL) {
|
||||||
|
const urlObj = new URL(providerInputs.url);
|
||||||
|
secret = urlObj.searchParams.get("secret") as string;
|
||||||
|
const periodFromUrl = urlObj.searchParams.get("period");
|
||||||
|
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||||
|
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||||
|
|
||||||
|
if (periodFromUrl) {
|
||||||
|
period = +periodFromUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (digitsFromUrl) {
|
||||||
|
digits = +digitsFromUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (algorithmFromUrl) {
|
||||||
|
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
secret = providerInputs.secret;
|
||||||
|
period = providerInputs.period;
|
||||||
|
digits = providerInputs.digits;
|
||||||
|
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (digits) {
|
||||||
|
authenticatorInstance.options = { digits };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (algorithm) {
|
||||||
|
authenticatorInstance.options = { algorithm };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (period) {
|
||||||
|
authenticatorInstance.options = { step: period };
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
entityId,
|
||||||
|
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||||
|
return { entityId };
|
||||||
|
};
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||||
|
const renew = async (_inputs: unknown, entityId: string) => {
|
||||||
|
// No renewal necessary
|
||||||
|
return { entityId };
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
validateProviderInputs,
|
||||||
|
validateConnection,
|
||||||
|
create,
|
||||||
|
revoke,
|
||||||
|
renew
|
||||||
|
};
|
||||||
|
};
|
@ -20,7 +20,8 @@ import {
|
|||||||
TUpdateExternalKmsDTO
|
TUpdateExternalKmsDTO
|
||||||
} from "./external-kms-types";
|
} from "./external-kms-types";
|
||||||
import { AwsKmsProviderFactory } from "./providers/aws-kms";
|
import { AwsKmsProviderFactory } from "./providers/aws-kms";
|
||||||
import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model";
|
import { GcpKmsProviderFactory } from "./providers/gcp-kms";
|
||||||
|
import { ExternalKmsAwsSchema, ExternalKmsGcpSchema, KmsProviders, TExternalKmsGcpSchema } from "./providers/model";
|
||||||
|
|
||||||
type TExternalKmsServiceFactoryDep = {
|
type TExternalKmsServiceFactoryDep = {
|
||||||
externalKmsDAL: TExternalKmsDALFactory;
|
externalKmsDAL: TExternalKmsDALFactory;
|
||||||
@ -78,6 +79,13 @@ export const externalKmsServiceFactory = ({
|
|||||||
await externalKms.validateConnection();
|
await externalKms.validateConnection();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
case KmsProviders.Gcp:
|
||||||
|
{
|
||||||
|
const externalKms = await GcpKmsProviderFactory({ inputs: provider.inputs });
|
||||||
|
await externalKms.validateConnection();
|
||||||
|
sanitizedProviderInput = JSON.stringify(provider.inputs);
|
||||||
|
}
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||||
}
|
}
|
||||||
@ -88,7 +96,7 @@ export const externalKmsServiceFactory = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({
|
const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({
|
||||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
plainText: Buffer.from(sanitizedProviderInput)
|
||||||
});
|
});
|
||||||
|
|
||||||
const externalKms = await externalKmsDAL.transaction(async (tx) => {
|
const externalKms = await externalKmsDAL.transaction(async (tx) => {
|
||||||
@ -162,7 +170,7 @@ export const externalKmsServiceFactory = ({
|
|||||||
case KmsProviders.Aws:
|
case KmsProviders.Aws:
|
||||||
{
|
{
|
||||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
JSON.parse(decryptedProviderInputBlob.toString())
|
||||||
);
|
);
|
||||||
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||||
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
|
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
|
||||||
@ -170,6 +178,17 @@ export const externalKmsServiceFactory = ({
|
|||||||
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
case KmsProviders.Gcp:
|
||||||
|
{
|
||||||
|
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||||
|
JSON.parse(decryptedProviderInputBlob.toString())
|
||||||
|
);
|
||||||
|
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||||
|
const externalKms = await GcpKmsProviderFactory({ inputs: updatedProviderInput });
|
||||||
|
await externalKms.validateConnection();
|
||||||
|
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||||
|
}
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||||
}
|
}
|
||||||
@ -178,7 +197,7 @@ export const externalKmsServiceFactory = ({
|
|||||||
let encryptedProviderInputs: Buffer | undefined;
|
let encryptedProviderInputs: Buffer | undefined;
|
||||||
if (sanitizedProviderInput) {
|
if (sanitizedProviderInput) {
|
||||||
const { cipherTextBlob } = orgDataKeyEncryptor({
|
const { cipherTextBlob } = orgDataKeyEncryptor({
|
||||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
plainText: Buffer.from(sanitizedProviderInput)
|
||||||
});
|
});
|
||||||
encryptedProviderInputs = cipherTextBlob;
|
encryptedProviderInputs = cipherTextBlob;
|
||||||
}
|
}
|
||||||
@ -271,10 +290,17 @@ export const externalKmsServiceFactory = ({
|
|||||||
switch (externalKmsDoc.provider) {
|
switch (externalKmsDoc.provider) {
|
||||||
case KmsProviders.Aws: {
|
case KmsProviders.Aws: {
|
||||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
JSON.parse(decryptedProviderInputBlob.toString())
|
||||||
);
|
);
|
||||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||||
}
|
}
|
||||||
|
case KmsProviders.Gcp: {
|
||||||
|
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||||
|
JSON.parse(decryptedProviderInputBlob.toString())
|
||||||
|
);
|
||||||
|
|
||||||
|
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||||
}
|
}
|
||||||
@ -312,21 +338,34 @@ export const externalKmsServiceFactory = ({
|
|||||||
switch (externalKmsDoc.provider) {
|
switch (externalKmsDoc.provider) {
|
||||||
case KmsProviders.Aws: {
|
case KmsProviders.Aws: {
|
||||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
JSON.parse(decryptedProviderInputBlob.toString())
|
||||||
);
|
);
|
||||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||||
}
|
}
|
||||||
|
case KmsProviders.Gcp: {
|
||||||
|
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||||
|
JSON.parse(decryptedProviderInputBlob.toString())
|
||||||
|
);
|
||||||
|
|
||||||
|
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const fetchGcpKeys = async ({ credential, gcpRegion }: Pick<TExternalKmsGcpSchema, "credential" | "gcpRegion">) => {
|
||||||
|
const externalKms = await GcpKmsProviderFactory({ inputs: { credential, gcpRegion, keyName: "" } });
|
||||||
|
return externalKms.getKeysList();
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
create,
|
create,
|
||||||
updateById,
|
updateById,
|
||||||
deleteById,
|
deleteById,
|
||||||
list,
|
list,
|
||||||
findById,
|
findById,
|
||||||
findByName
|
findByName,
|
||||||
|
fetchGcpKeys
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
113
backend/src/ee/services/external-kms/providers/gcp-kms.ts
Normal file
113
backend/src/ee/services/external-kms/providers/gcp-kms.ts
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
import { KeyManagementServiceClient } from "@google-cloud/kms";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
|
import { ExternalKmsGcpSchema, TExternalKmsGcpClientSchema, TExternalKmsProviderFns } from "./model";
|
||||||
|
|
||||||
|
const getGcpKmsClient = async ({ credential, gcpRegion }: TExternalKmsGcpClientSchema) => {
|
||||||
|
const gcpKmsClient = new KeyManagementServiceClient({
|
||||||
|
credentials: credential
|
||||||
|
});
|
||||||
|
const projectId = credential.project_id;
|
||||||
|
const locationName = gcpKmsClient.locationPath(projectId, gcpRegion);
|
||||||
|
|
||||||
|
return {
|
||||||
|
gcpKmsClient,
|
||||||
|
locationName
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type GcpKmsProviderArgs = {
|
||||||
|
inputs: unknown;
|
||||||
|
};
|
||||||
|
type TGcpKmsProviderFactoryReturn = TExternalKmsProviderFns & {
|
||||||
|
getKeysList: () => Promise<{ keys: string[] }>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Promise<TGcpKmsProviderFactoryReturn> => {
|
||||||
|
const { credential, gcpRegion, keyName } = await ExternalKmsGcpSchema.parseAsync(inputs);
|
||||||
|
const { gcpKmsClient, locationName } = await getGcpKmsClient({
|
||||||
|
credential,
|
||||||
|
gcpRegion
|
||||||
|
});
|
||||||
|
|
||||||
|
const validateConnection = async () => {
|
||||||
|
try {
|
||||||
|
await gcpKmsClient.listKeyRings({
|
||||||
|
parent: locationName
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Cannot connect to GCP KMS"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Used when adding the KMS to fetch the list of keys in specified region
|
||||||
|
const getKeysList = async () => {
|
||||||
|
try {
|
||||||
|
const [keyRings] = await gcpKmsClient.listKeyRings({
|
||||||
|
parent: locationName
|
||||||
|
});
|
||||||
|
|
||||||
|
const validKeyRings = keyRings
|
||||||
|
.filter(
|
||||||
|
(keyRing): keyRing is { name: string } =>
|
||||||
|
keyRing !== null && typeof keyRing === "object" && "name" in keyRing && typeof keyRing.name === "string"
|
||||||
|
)
|
||||||
|
.map((keyRing) => keyRing.name);
|
||||||
|
const keyList: string[] = [];
|
||||||
|
const keyListPromises = validKeyRings.map((keyRingName) =>
|
||||||
|
gcpKmsClient
|
||||||
|
.listCryptoKeys({
|
||||||
|
parent: keyRingName
|
||||||
|
})
|
||||||
|
.then(([cryptoKeys]) =>
|
||||||
|
cryptoKeys
|
||||||
|
.filter(
|
||||||
|
(key): key is { name: string } =>
|
||||||
|
key !== null && typeof key === "object" && "name" in key && typeof key.name === "string"
|
||||||
|
)
|
||||||
|
.map((key) => key.name)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
const cryptoKeyLists = await Promise.all(keyListPromises);
|
||||||
|
keyList.push(...cryptoKeyLists.flat());
|
||||||
|
return { keys: keyList };
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "Could not validate GCP KMS connection and credentials");
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Could not validate GCP KMS connection and credentials",
|
||||||
|
error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const encrypt = async (data: Buffer) => {
|
||||||
|
const encryptedText = await gcpKmsClient.encrypt({
|
||||||
|
name: keyName,
|
||||||
|
plaintext: data
|
||||||
|
});
|
||||||
|
if (!encryptedText[0].ciphertext) throw new Error("encryption failed");
|
||||||
|
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext) };
|
||||||
|
};
|
||||||
|
|
||||||
|
const decrypt = async (encryptedBlob: Buffer) => {
|
||||||
|
const decryptedText = await gcpKmsClient.decrypt({
|
||||||
|
name: keyName,
|
||||||
|
ciphertext: encryptedBlob
|
||||||
|
});
|
||||||
|
if (!decryptedText[0].plaintext) throw new Error("decryption failed");
|
||||||
|
return { data: Buffer.from(decryptedText[0].plaintext) };
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
validateConnection,
|
||||||
|
getKeysList,
|
||||||
|
encrypt,
|
||||||
|
decrypt
|
||||||
|
};
|
||||||
|
};
|
@ -1,13 +1,23 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
export enum KmsProviders {
|
export enum KmsProviders {
|
||||||
Aws = "aws"
|
Aws = "aws",
|
||||||
|
Gcp = "gcp"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum KmsAwsCredentialType {
|
export enum KmsAwsCredentialType {
|
||||||
AssumeRole = "assume-role",
|
AssumeRole = "assume-role",
|
||||||
AccessKey = "access-key"
|
AccessKey = "access-key"
|
||||||
}
|
}
|
||||||
|
// Google uses snake_case for their enum values and we need to match that
|
||||||
|
export enum KmsGcpCredentialType {
|
||||||
|
ServiceAccount = "service_account"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum KmsGcpKeyFetchAuthType {
|
||||||
|
Credential = "credential",
|
||||||
|
Kms = "kmsId"
|
||||||
|
}
|
||||||
|
|
||||||
export const ExternalKmsAwsSchema = z.object({
|
export const ExternalKmsAwsSchema = z.object({
|
||||||
credential: z
|
credential: z
|
||||||
@ -42,14 +52,44 @@ export const ExternalKmsAwsSchema = z.object({
|
|||||||
});
|
});
|
||||||
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
|
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
|
||||||
|
|
||||||
|
export const ExternalKmsGcpCredentialSchema = z.object({
|
||||||
|
type: z.literal(KmsGcpCredentialType.ServiceAccount),
|
||||||
|
project_id: z.string().min(1),
|
||||||
|
private_key_id: z.string().min(1),
|
||||||
|
private_key: z.string().min(1),
|
||||||
|
client_email: z.string().min(1),
|
||||||
|
client_id: z.string().min(1),
|
||||||
|
auth_uri: z.string().min(1),
|
||||||
|
token_uri: z.string().min(1),
|
||||||
|
auth_provider_x509_cert_url: z.string().min(1),
|
||||||
|
client_x509_cert_url: z.string().min(1),
|
||||||
|
universe_domain: z.string().min(1)
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TExternalKmsGcpCredentialSchema = z.infer<typeof ExternalKmsGcpCredentialSchema>;
|
||||||
|
|
||||||
|
export const ExternalKmsGcpSchema = z.object({
|
||||||
|
credential: ExternalKmsGcpCredentialSchema.describe("GCP Service Account JSON credential to connect"),
|
||||||
|
gcpRegion: z.string().trim().describe("GCP region where the KMS key is located"),
|
||||||
|
keyName: z.string().trim().describe("GCP key name")
|
||||||
|
});
|
||||||
|
export type TExternalKmsGcpSchema = z.infer<typeof ExternalKmsGcpSchema>;
|
||||||
|
|
||||||
|
const ExternalKmsGcpClientSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true }).extend({
|
||||||
|
credential: ExternalKmsGcpCredentialSchema
|
||||||
|
});
|
||||||
|
export type TExternalKmsGcpClientSchema = z.infer<typeof ExternalKmsGcpClientSchema>;
|
||||||
|
|
||||||
// The root schema of the JSON
|
// The root schema of the JSON
|
||||||
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
|
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
|
||||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema })
|
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema }),
|
||||||
|
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema })
|
||||||
]);
|
]);
|
||||||
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
|
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
|
||||||
|
|
||||||
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
|
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
|
||||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() })
|
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() }),
|
||||||
|
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema.partial() })
|
||||||
]);
|
]);
|
||||||
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;
|
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user