mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-13 01:49:57 +00:00
Compare commits
502 Commits
infisical-
...
maidul-212
Author | SHA1 | Date | |
---|---|---|---|
8915b4055b | |||
935a3cb036 | |||
148a29db19 | |||
b12de3e4f5 | |||
661e5ec462 | |||
5cca51d711 | |||
9e9b9a7b94 | |||
df1ffcf934 | |||
0ef7eacd0e | |||
3ac6b7be65 | |||
10601b5afd | |||
8eec08356b | |||
5960a899ba | |||
ea98a0096d | |||
b8f65fc91a | |||
06a4e68ac1 | |||
9cbf9a675a | |||
178ddf1fb9 | |||
030d4fe152 | |||
46abda9041 | |||
c976a5ccba | |||
1eb9ea9c74 | |||
7d7612aaf4 | |||
f570b3b2ee | |||
0b8f6878fe | |||
758a9211ab | |||
0bb2b2887b | |||
eeb0111bbe | |||
d12c538511 | |||
6f67346b2a | |||
a93db44bbd | |||
1ddacfda62 | |||
5a1e43be44 | |||
04f54479cd | |||
351d0d0662 | |||
5a01edae7a | |||
506e86d666 | |||
11d9166684 | |||
1859557f90 | |||
59fc34412d | |||
1b2a1f2339 | |||
15b4c397ab | |||
fc27ad4575 | |||
b7467a83ab | |||
3baf434230 | |||
e28471a9f4 | |||
b2d6563994 | |||
cfba8f53e3 | |||
3537a5eb9b | |||
d5b17a8f24 | |||
d6881e2e68 | |||
92a663a17d | |||
b3463e0d0f | |||
c460f22665 | |||
7cdc47cd3a | |||
d666d60f9f | |||
491c4259ca | |||
cff20eb621 | |||
db39d03713 | |||
84d8879177 | |||
aa4f2adbb6 | |||
86ed3ef6d6 | |||
a5bb80adc4 | |||
0e87dd3996 | |||
e1801e9eb4 | |||
9daa5badec | |||
e1ed37c713 | |||
8eea82a1a0 | |||
694d0e3ed3 | |||
58f6c6b409 | |||
f4a33caba6 | |||
e0a6f09b5e | |||
98a15a901e | |||
1e701687ae | |||
15758b91f8 | |||
2d3a4a7559 | |||
a1d01d5cbd | |||
2e3aedc62b | |||
e0a5b1444a | |||
1c2698f533 | |||
b50833bded | |||
e0c774c045 | |||
514df55d67 | |||
311b378f3b | |||
b01b4323ca | |||
285a01af51 | |||
f7e658e62b | |||
a8aef2934a | |||
cc30476f79 | |||
5d59fe8810 | |||
90eed8d39b | |||
f5974ce9ad | |||
c6b51af4b1 | |||
5139bf2385 | |||
c13c37fc77 | |||
259c01c110 | |||
a016d0d33f | |||
663be06d30 | |||
fa392382da | |||
d34b2669c5 | |||
11ea5990c9 | |||
9a66514178 | |||
d4f9faf24d | |||
a3c8d06845 | |||
71b7be4057 | |||
5079a5889a | |||
232b375f46 | |||
d2acedf79e | |||
9d846319b0 | |||
d69267a3ca | |||
051eee8701 | |||
b5aa650899 | |||
376e185e2b | |||
a15a0a257c | |||
6facce220c | |||
620a423cee | |||
361496c644 | |||
e03f77d9cf | |||
60cb420242 | |||
1b8a77f507 | |||
5a957514df | |||
a6865585f3 | |||
1aaca12781 | |||
7ab5c02000 | |||
c735beea32 | |||
2d98560255 | |||
91bdd7ea6a | |||
b0f3476e4a | |||
14751df9de | |||
e1a4185f76 | |||
4905ad1f48 | |||
56bc25025a | |||
45da563465 | |||
1930d40be8 | |||
30b8d59796 | |||
aa6cca738e | |||
04dee70a55 | |||
dfb53dd333 | |||
ab19e7df6d | |||
f9a1accf84 | |||
ca86f3d2b6 | |||
de466b4b86 | |||
745f1c4e12 | |||
106dc261de | |||
548a0aed2a | |||
6029eaa9df | |||
8703314c0c | |||
b7b606ab9a | |||
00617ea7e8 | |||
6d9330e870 | |||
d026a9b988 | |||
c2c693d295 | |||
c9c77f6c58 | |||
36a34b0f58 | |||
45c153e592 | |||
eeaabe44ec | |||
084fc7c99e | |||
b6cc17d62a | |||
bd0d0bd333 | |||
4b37c0f1c4 | |||
c426ba517a | |||
973403c7f9 | |||
52fcf53d0e | |||
cbef9ea514 | |||
d0f8394f50 | |||
9c06cab99d | |||
c43a18904d | |||
dc0fe6920c | |||
077cbc97d5 | |||
f3da676b88 | |||
988c612048 | |||
7cf7eb5acb | |||
a2fd071b62 | |||
0d7a07dea3 | |||
f676b44335 | |||
00d83f9136 | |||
eca6871cbc | |||
97cff783cf | |||
3767ec9521 | |||
91634fbe76 | |||
f31340cf53 | |||
908358b841 | |||
b2a88a4384 | |||
ab73e77499 | |||
095a049661 | |||
3a51155d23 | |||
c5f361a3e5 | |||
5ace8ed073 | |||
193d6dad54 | |||
0f36fc46b3 | |||
4072a40fe9 | |||
0dc132dda3 | |||
605ccb13e9 | |||
4a1a399fd8 | |||
d19e2f64f0 | |||
1e0f54d9a4 | |||
8d55c2802e | |||
e9639df8ce | |||
e0f5ecbe7b | |||
2160c66e20 | |||
1c5c7c75c4 | |||
3e230555fb | |||
31e27ad1d7 | |||
24c75c6325 | |||
0a22a2a9ef | |||
d0f1cad98c | |||
4962a63888 | |||
ad92565783 | |||
6c98c96a15 | |||
f0a70d8769 | |||
9e9de9f527 | |||
6af4a06c02 | |||
fe6dc248b6 | |||
d64e2fa243 | |||
7d380f9b43 | |||
76c8410081 | |||
afee158b95 | |||
6df90fa825 | |||
c042bafba3 | |||
8067df821e | |||
1906896e56 | |||
a8ccfd9c92 | |||
32609b95a0 | |||
08d3436217 | |||
2ae45dc1cc | |||
44a898fb15 | |||
4d194052b5 | |||
1d622bb121 | |||
ecca6f4db5 | |||
b198f97930 | |||
63a9e46936 | |||
7c067551a4 | |||
5c149c6ac6 | |||
c19f8839ff | |||
1193ddbed1 | |||
c6c71a04e8 | |||
6457c34712 | |||
6a83b58de4 | |||
d47c586a52 | |||
88156c8cd8 | |||
27d5d90d02 | |||
0100ddfb99 | |||
2bc6db1c47 | |||
92f2f16656 | |||
07ca1ed424 | |||
18c5dd3cbd | |||
467e3aab56 | |||
577b432861 | |||
dda6b1d233 | |||
e83f31249a | |||
18e69578f0 | |||
0685a5ea8b | |||
3142d36ea1 | |||
bdc7c018eb | |||
9506b60d02 | |||
ed25b82113 | |||
83bd97fc70 | |||
1d5115972b | |||
d26521be0b | |||
473f8137fd | |||
bcd65333c0 | |||
719d0ea30f | |||
371b96a13a | |||
c5c00b520c | |||
8de4443be1 | |||
96ad3b0264 | |||
aaef339e21 | |||
e3beeb68eb | |||
d0c76ae4b4 | |||
a5cf6f40c7 | |||
f121f8e828 | |||
54c8da8ab6 | |||
6e0dfc72e4 | |||
b226fdac9d | |||
3c36d5dbd2 | |||
a5f895ad91 | |||
9f66b9bb4d | |||
80e55a9341 | |||
5142d6f3c1 | |||
c8677ac548 | |||
df51d05c46 | |||
4f2f7b2f70 | |||
d79ffbe37e | |||
2c237ee277 | |||
56cc248425 | |||
61fcb2b605 | |||
992cc03eca | |||
f0e7c459e2 | |||
29d0694a16 | |||
66e5edcfc0 | |||
f13930bc6b | |||
0d5514834d | |||
b495156444 | |||
65a2b0116b | |||
8ef2501407 | |||
21c6160c84 | |||
8a2268956a | |||
df3c58bc2a | |||
2675aa6969 | |||
6bad13738f | |||
dbae6968c9 | |||
e019f3811b | |||
db726128f1 | |||
24935f4e07 | |||
1835777832 | |||
cb237831c7 | |||
49d2ea6f2e | |||
3b2a2d1a73 | |||
f490fb6616 | |||
c4f9a3b31e | |||
afcf15df55 | |||
bf8aee25fe | |||
ebdfe31c17 | |||
e65ce932dd | |||
ae177343d5 | |||
0342ba0890 | |||
c119f506fd | |||
93638baba7 | |||
bad97774c4 | |||
68f5be2ff1 | |||
0b54099789 | |||
9b2a2eda0c | |||
a332019c25 | |||
8039b3f21e | |||
c9f7f6481f | |||
39df6ce086 | |||
de3e23ecfa | |||
17a79fb621 | |||
0ee792e84b | |||
116e940050 | |||
5d45237ea5 | |||
44928a2e3c | |||
ff912fc3b0 | |||
bde40e53e3 | |||
5211eb1ed6 | |||
96fffd3c03 | |||
56506b5a47 | |||
400b412196 | |||
2780414fcb | |||
b82524d65d | |||
c493f1d0f6 | |||
fb1b816be6 | |||
2645d4d158 | |||
61d60498a9 | |||
93f3395bde | |||
d6060781e4 | |||
345edb3f15 | |||
d4ef92787d | |||
b7326bf4c6 | |||
3dd024c90a | |||
dd6fb4232e | |||
3411185d60 | |||
ccef9646c6 | |||
458639e93d | |||
35998e98cf | |||
e19b67f9a2 | |||
f41ec46a35 | |||
33aa9ea1a7 | |||
2d8a2a6a3a | |||
5eeea767a3 | |||
2b4f5962e2 | |||
bf14bbfeee | |||
fa77dc01df | |||
ed5044a102 | |||
ec7fe013fd | |||
a26ad6cfb0 | |||
dd0399d12e | |||
8fca6b60b3 | |||
04456fe996 | |||
2605987289 | |||
7edcf5ff90 | |||
3947e3dabf | |||
fe6e5e09ac | |||
561992e5cf | |||
d69aab0b2c | |||
90dae62158 | |||
068eb9246d | |||
3472be480a | |||
df71ecffa0 | |||
68818beb38 | |||
e600b68684 | |||
b52aebfd92 | |||
c9e56e4e9f | |||
ef03e9bf3b | |||
08a77f6ddb | |||
bc3f21809e | |||
8686b4abd3 | |||
46b48cea63 | |||
44956c6a37 | |||
4de63b6140 | |||
5cee228f5f | |||
20fea1e25f | |||
d0ffb94bc7 | |||
d3932d8f08 | |||
d5658d374a | |||
810a58c836 | |||
9e24050f17 | |||
8d6f7babff | |||
7057d399bc | |||
c63d57f086 | |||
a9ce3789b0 | |||
023a0d99ab | |||
5aadc41a4a | |||
4f38352765 | |||
cf5e367aba | |||
da7da27572 | |||
a70043b80d | |||
b94db5d674 | |||
bd6a89fa9a | |||
81513e4a75 | |||
a28b458653 | |||
7ccf752e0c | |||
9977329741 | |||
2d10265d0d | |||
34338720e5 | |||
f5322abe85 | |||
cd030b0370 | |||
6c86db7d4e | |||
d48e7eca2d | |||
30f3dac35f | |||
0e5f0eefc1 | |||
2a005d2654 | |||
42425d91d5 | |||
a0770baff2 | |||
f101366bce | |||
21bd468307 | |||
e95109c446 | |||
76c468ecc7 | |||
dcf315a524 | |||
93d5180dfc | |||
a9bec84d27 | |||
e3f87382a3 | |||
736f067178 | |||
f3ea7b3dfd | |||
777dfd5f58 | |||
579948ea6d | |||
e6ed1231cd | |||
9197530b43 | |||
1eae7d0c30 | |||
cc8119766a | |||
928d5a5240 | |||
32dd478894 | |||
c3f7c1d46b | |||
89644703a0 | |||
d20b897f28 | |||
70e022826e | |||
b7f5fa2cec | |||
7b444e91a8 | |||
87df5a2749 | |||
868d0345d6 | |||
e696bff004 | |||
d9c4c332ea | |||
abd4b411fa | |||
bf430925e4 | |||
3079cd72df | |||
b48325b4ba | |||
9e4b248794 | |||
f6e44463c4 | |||
1a6b710138 | |||
43a3731b62 | |||
24b8b64d3b | |||
263d321d75 | |||
a6e71c98a6 | |||
0e86d5573a | |||
6c0ab43c97 | |||
d743537284 | |||
5df53a25fc | |||
b6c924ef37 | |||
931119f6ea | |||
7fe7056af4 | |||
2cbf471beb | |||
9072c6c567 | |||
ee152f2d20 | |||
f21a13f388 | |||
7ee440fa3f | |||
68a30f4212 | |||
4d830f1d1a | |||
cd6caab508 | |||
ab093dfc85 | |||
b8e9417466 | |||
4eb08c64d4 | |||
d76760fa9c | |||
4d8f94a9dc | |||
abd8d6aa8a | |||
9117067ab5 | |||
3a1168c7e8 | |||
31de0755a2 | |||
2937a46943 | |||
45fdd4ebc2 | |||
14229931ac | |||
526979fcec | |||
a0f507d2c9 | |||
a2a786f392 | |||
f9847f48b0 | |||
2f06168b29 | |||
b8516da90f | |||
f9a9599659 | |||
637b0b955f | |||
092665737f | |||
26ea949a4e | |||
f83c2215a5 | |||
0f41590d6a |
@ -50,6 +50,13 @@ jobs:
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
@ -74,21 +81,21 @@ jobs:
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-postgres-deployment:
|
||||
@ -98,6 +105,13 @@ jobs:
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
|
@ -35,7 +35,7 @@ jobs:
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
|
25
.github/workflows/check-migration-file-edited.yml
vendored
Normal file
25
.github/workflows/check-migration-file-edited.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
name: Check migration file edited
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check any migration files are modified, renamed or duplicated.
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
|
||||
if [ -s edited_files.txt ]; then
|
||||
echo "Exiting migration files cannot be modified."
|
||||
cat edited_files.txt
|
||||
exit 1
|
||||
fi
|
@ -19,18 +19,16 @@ jobs:
|
||||
|
||||
- name: Get list of newly added files in migration folder
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' || true | cut -f2 | xargs -r -n1 basename > added_files.txt
|
||||
if [ ! -s added_files.txt ]; then
|
||||
echo "No new files added. Skipping"
|
||||
echo "SKIP_RENAME=true" >> $GITHUB_ENV
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Script to rename migrations
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: python .github/resources/rename_migration_files.py
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
|
@ -5,3 +5,4 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/M
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
|
31
README.md
31
README.md
@ -48,25 +48,26 @@
|
||||
|
||||
## Introduction
|
||||
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
|
||||
|
||||
We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
|
||||
## Features
|
||||
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
- **[Internal PKI](https://infisical.com/docs/documentation/platform/pki/private-ca)** to create Private CA hierarchies and start issuing and managing X.509 digital certificates.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
|
||||
And much more.
|
||||
|
||||
@ -74,9 +75,9 @@ And much more.
|
||||
|
||||
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
|
@ -3,7 +3,6 @@ import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import knex from "knex";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
@ -15,6 +14,7 @@ import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -23,23 +23,21 @@ export default {
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: cfg.DB_CONNECTION_URI,
|
||||
migrations: {
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
seeds: {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
}
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
try {
|
||||
await db.migrate.latest();
|
||||
await db.seed.run();
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
@ -74,7 +72,14 @@ export default {
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback({}, true);
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
1419
backend/package-lock.json
generated
1419
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -72,6 +72,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
@ -86,6 +87,8 @@
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.10.0",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "^2.2.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
@ -97,6 +100,8 @@
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -115,6 +120,7 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
|
@ -2,13 +2,14 @@
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
|
||||
const formattedMigrationName = slugify(migrationName);
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
|
10
backend/src/@types/fastify.d.ts
vendored
10
backend/src/@types/fastify.d.ts
vendored
@ -6,14 +6,17 @@ import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-ap
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@ -29,6 +32,8 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
@ -98,6 +103,7 @@ declare module "fastify" {
|
||||
permission: TPermissionServiceFactory;
|
||||
org: TOrgServiceFactory;
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
oidc: TOidcConfigServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
@ -137,6 +143,9 @@ declare module "fastify" {
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
@ -147,6 +156,7 @@ declare module "fastify" {
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
281
backend/src/@types/knex.d.ts
vendored
281
backend/src/@types/knex.d.ts
vendored
@ -1,4 +1,4 @@
|
||||
import { Knex } from "knex";
|
||||
import { Knex as KnexOriginal } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
@ -32,6 +32,27 @@ import {
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate,
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate,
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate,
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate,
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate,
|
||||
TCertificates,
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
@ -113,6 +134,9 @@ import {
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOidcConfigs,
|
||||
TOidcConfigsInsert,
|
||||
TOidcConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
@ -149,6 +173,9 @@ import {
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
@ -253,285 +280,371 @@ import {
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
interface QueryInterface {
|
||||
primaryNode(): KnexOriginal;
|
||||
replicaNode(): KnexOriginal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.UserGroupMembership]: Knex.CompositeTableType<
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
|
||||
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
|
||||
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
>;
|
||||
[TableName.AuthTokens]: Knex.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: Knex.CompositeTableType<
|
||||
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate
|
||||
>;
|
||||
[TableName.BackupPrivateKey]: Knex.CompositeTableType<
|
||||
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate
|
||||
>;
|
||||
[TableName.Organization]: Knex.CompositeTableType<TOrganizations, TOrganizationsInsert, TOrganizationsUpdate>;
|
||||
[TableName.OrgMembership]: Knex.CompositeTableType<TOrgMemberships, TOrgMembershipsInsert, TOrgMembershipsUpdate>;
|
||||
[TableName.OrgRoles]: Knex.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: Knex.CompositeTableType<
|
||||
[TableName.Organization]: KnexOriginal.CompositeTableType<
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate
|
||||
>;
|
||||
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate
|
||||
>;
|
||||
[TableName.UserAction]: Knex.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: Knex.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: Knex.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: Knex.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: Knex.CompositeTableType<
|
||||
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.Environment]: Knex.CompositeTableType<
|
||||
[TableName.Environment]: KnexOriginal.CompositeTableType<
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
|
||||
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: Knex.CompositeTableType<
|
||||
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
|
||||
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate
|
||||
>;
|
||||
[TableName.SecretVersion]: Knex.CompositeTableType<TSecretVersions, TSecretVersionsInsert, TSecretVersionsUpdate>;
|
||||
[TableName.SecretFolder]: Knex.CompositeTableType<TSecretFolders, TSecretFoldersInsert, TSecretFoldersUpdate>;
|
||||
[TableName.SecretFolderVersion]: Knex.CompositeTableType<
|
||||
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate
|
||||
>;
|
||||
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
|
||||
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
|
||||
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: Knex.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: Knex.CompositeTableType<TServiceTokens, TServiceTokensInsert, TServiceTokensUpdate>;
|
||||
[TableName.IntegrationAuth]: Knex.CompositeTableType<
|
||||
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate
|
||||
>;
|
||||
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate
|
||||
>;
|
||||
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate
|
||||
>;
|
||||
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate
|
||||
>;
|
||||
[TableName.Identity]: Knex.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityUniversalAuth]: Knex.CompositeTableType<
|
||||
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: Knex.CompositeTableType<
|
||||
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate
|
||||
>;
|
||||
[TableName.IdentityAccessToken]: Knex.CompositeTableType<
|
||||
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate
|
||||
>;
|
||||
[TableName.IdentityOrgMembership]: Knex.CompositeTableType<
|
||||
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembership]: Knex.CompositeTableType<
|
||||
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
|
||||
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
|
||||
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: Knex.CompositeTableType<
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate
|
||||
>;
|
||||
[TableName.SecretRotationOutput]: Knex.CompositeTableType<
|
||||
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate
|
||||
>;
|
||||
[TableName.Snapshot]: Knex.CompositeTableType<TSecretSnapshots, TSecretSnapshotsInsert, TSecretSnapshotsUpdate>;
|
||||
[TableName.SnapshotSecret]: Knex.CompositeTableType<
|
||||
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate
|
||||
>;
|
||||
[TableName.SnapshotFolder]: Knex.CompositeTableType<
|
||||
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
|
||||
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
|
||||
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate
|
||||
>;
|
||||
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: Knex.CompositeTableType<
|
||||
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate
|
||||
>;
|
||||
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate
|
||||
>;
|
||||
[TableName.GitAppOrg]: Knex.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: Knex.CompositeTableType<
|
||||
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate
|
||||
>;
|
||||
[TableName.TrustedIps]: Knex.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
// Junction tables
|
||||
[TableName.JnSecretTag]: Knex.CompositeTableType<
|
||||
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionTag]: Knex.CompositeTableType<
|
||||
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
|
||||
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
|
||||
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,38 @@
|
||||
import knex from "knex";
|
||||
import knex, { Knex } from "knex";
|
||||
|
||||
export type TDbClient = ReturnType<typeof initDbConnection>;
|
||||
export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => {
|
||||
const db = knex({
|
||||
export const initDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert,
|
||||
readReplicas = []
|
||||
}: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
readReplicas?: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
}[];
|
||||
}) => {
|
||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
||||
// eslint-disable-next-line
|
||||
let db: Knex<any, unknown[]>;
|
||||
// eslint-disable-next-line
|
||||
let readReplicaDbs: Knex<any, unknown[]>[];
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("primaryNode", () => {
|
||||
return db;
|
||||
});
|
||||
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("replicaNode", () => {
|
||||
if (!readReplicaDbs.length) return db;
|
||||
|
||||
const selectedReplica = readReplicaDbs[Math.floor(Math.random() * readReplicaDbs.length)];
|
||||
return selectedReplica;
|
||||
});
|
||||
|
||||
db = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
@ -22,5 +52,21 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection
|
||||
}
|
||||
});
|
||||
|
||||
readReplicaDbs = readReplicas.map((el) => {
|
||||
const replicaDbCertificate = el.dbRootCert || dbRootCert;
|
||||
return knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: el.dbConnectionUri,
|
||||
ssl: replicaDbCertificate
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return db;
|
||||
};
|
||||
|
@ -0,0 +1,61 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (!doesPasswordFieldExist) t.string("hashedPassword");
|
||||
if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey");
|
||||
if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV");
|
||||
if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag");
|
||||
if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (doesPasswordFieldExist) t.dropColumn("hashedPassword");
|
||||
if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey");
|
||||
if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV");
|
||||
if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag");
|
||||
if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (!hasPitVersionLimitColumn) {
|
||||
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (hasPitVersionLimitColumn) {
|
||||
tb.dropColumn("pitVersionLimit");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.RateLimit))) {
|
||||
await knex.schema.createTable(TableName.RateLimit, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.integer("readRateLimit").defaultTo(600).notNullable();
|
||||
t.integer("writeRateLimit").defaultTo(200).notNullable();
|
||||
t.integer("secretsRateLimit").defaultTo(60).notNullable();
|
||||
t.integer("authRateLimit").defaultTo(60).notNullable();
|
||||
t.integer("inviteUserRateLimit").defaultTo(30).notNullable();
|
||||
t.integer("mfaRateLimit").defaultTo(20).notNullable();
|
||||
t.integer("creationLimit").defaultTo(30).notNullable();
|
||||
t.integer("publicEndpointLimit").defaultTo(30).notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.RateLimit);
|
||||
|
||||
// create init rate limit entry with defaults
|
||||
await knex(TableName.RateLimit).insert({});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.RateLimit);
|
||||
await dropOnUpdateTrigger(knex, TableName.RateLimit);
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
|
||||
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
|
||||
if (!hasCreatedByActorType) {
|
||||
tb.string("createdByActorType").notNullable().defaultTo(ActorType.USER);
|
||||
tb.dropForeign("createdBy");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
|
||||
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
|
||||
if (hasCreatedByActorType) {
|
||||
tb.dropColumn("createdByActorType");
|
||||
tb.foreign("createdBy").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
}
|
137
backend/src/db/migrations/20240614154212_certificate-mgmt.ts
Normal file
137
backend/src/db/migrations/20240614154212_certificate-mgmt.ts
Normal file
@ -0,0 +1,137 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Project)) {
|
||||
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (!doesProjectCertificateKeyIdExist) {
|
||||
t.uuid("kmsCertificateKeyId").nullable();
|
||||
t.foreign("kmsCertificateKeyId").references("id").inTable(TableName.KmsKey);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthority))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthority, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("parentCaId").nullable();
|
||||
t.foreign("parentCaId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("type").notNullable(); // root / intermediate
|
||||
t.string("status").notNullable(); // active / pending-certificate
|
||||
t.string("friendlyName").notNullable();
|
||||
t.string("organization").notNullable();
|
||||
t.string("ou").notNullable();
|
||||
t.string("country").notNullable();
|
||||
t.string("province").notNullable();
|
||||
t.string("locality").notNullable();
|
||||
t.string("commonName").notNullable();
|
||||
t.string("dn").notNullable();
|
||||
t.string("serialNumber").nullable().unique();
|
||||
t.integer("maxPathLength").nullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("notBefore").nullable();
|
||||
t.datetime("notAfter").nullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCert))) {
|
||||
// table to keep track of certificates belonging to CA
|
||||
await knex.schema.createTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
t.binary("encryptedCertificateChain").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthoritySecret))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthoritySecret, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedPrivateKey").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCrl))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthorityCrl, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedCrl").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.Certificate))) {
|
||||
await knex.schema.createTable(TableName.Certificate, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.string("status").notNullable(); // active / pending-certificate
|
||||
t.string("serialNumber").notNullable().unique();
|
||||
t.string("friendlyName").notNullable();
|
||||
t.string("commonName").notNullable();
|
||||
t.datetime("notBefore").notNullable();
|
||||
t.datetime("notAfter").notNullable();
|
||||
t.datetime("revokedAt").nullable();
|
||||
t.integer("revocationReason").nullable(); // integer based on crl reason in RFC 5280
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateBody))) {
|
||||
await knex.schema.createTable(TableName.CertificateBody, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("certId").notNullable().unique();
|
||||
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthority);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
|
||||
await createOnUpdateTrigger(knex, TableName.Certificate);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateBody);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// project
|
||||
if (await knex.schema.hasTable(TableName.Project)) {
|
||||
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (doesProjectCertificateKeyIdExist) t.dropColumn("kmsCertificateKeyId");
|
||||
});
|
||||
}
|
||||
|
||||
// certificates
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateBody);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateBody);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Certificate);
|
||||
await dropOnUpdateTrigger(knex, TableName.Certificate);
|
||||
|
||||
// certificate authorities
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthoritySecret);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCrl);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCrl);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCert);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthority);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthority);
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
|
||||
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasOrgIdColumn) t.uuid("orgId").nullable().alter();
|
||||
if (hasUserIdColumn) t.uuid("userId").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
|
||||
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasOrgIdColumn) t.uuid("orgId").notNullable().alter();
|
||||
if (hasUserIdColumn) t.uuid("userId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
49
backend/src/db/migrations/20240624161942_add-oidc-auth.ts
Normal file
49
backend/src/db/migrations/20240624161942_add-oidc-auth.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.OidcConfig))) {
|
||||
await knex.schema.createTable(TableName.OidcConfig, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("discoveryURL");
|
||||
tb.string("issuer");
|
||||
tb.string("authorizationEndpoint");
|
||||
tb.string("jwksUri");
|
||||
tb.string("tokenEndpoint");
|
||||
tb.string("userinfoEndpoint");
|
||||
tb.text("encryptedClientId").notNullable();
|
||||
tb.string("configurationType").notNullable();
|
||||
tb.string("clientIdIV").notNullable();
|
||||
tb.string("clientIdTag").notNullable();
|
||||
tb.text("encryptedClientSecret").notNullable();
|
||||
tb.string("clientSecretIV").notNullable();
|
||||
tb.string("clientSecretTag").notNullable();
|
||||
tb.string("allowedEmailDomains").nullable();
|
||||
tb.boolean("isActive").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
tb.uuid("orgId").notNullable().unique();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
tb.boolean("trustOidcEmails").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.OidcConfig);
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustOidcEmails");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const DEFAULT_AUTH_ORG_ID_FIELD = "defaultAuthOrgId";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasDefaultOrgColumn) {
|
||||
t.uuid(DEFAULT_AUTH_ORG_ID_FIELD).nullable();
|
||||
t.foreign(DEFAULT_AUTH_ORG_ID_FIELD).references("id").inTable(TableName.Organization).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasDefaultOrgColumn) {
|
||||
t.dropForeign([DEFAULT_AUTH_ORG_ID_FIELD]);
|
||||
t.dropColumn(DEFAULT_AUTH_ORG_ID_FIELD);
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
const hasAltNamesColumn = await knex.schema.hasColumn(TableName.Certificate, "altNames");
|
||||
if (!hasAltNamesColumn) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.string("altNames").defaultTo("");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
if (await knex.schema.hasColumn(TableName.Certificate, "altNames")) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropColumn("altNames");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
|
||||
TableName.IntegrationAuth,
|
||||
"awsAssumeIamRoleArnCipherText"
|
||||
);
|
||||
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
|
||||
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
|
||||
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
|
||||
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
|
||||
if (!hasAwsAssumeRoleCipherText) t.text("awsAssumeIamRoleArnCipherText");
|
||||
if (!hasAwsAssumeRoleIV) t.text("awsAssumeIamRoleArnIV");
|
||||
if (!hasAwsAssumeRoleTag) t.text("awsAssumeIamRoleArnTag");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
|
||||
TableName.IntegrationAuth,
|
||||
"awsAssumeIamRoleArnCipherText"
|
||||
);
|
||||
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
|
||||
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
|
||||
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
|
||||
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
|
||||
if (hasAwsAssumeRoleCipherText) t.dropColumn("awsAssumeIamRoleArnCipherText");
|
||||
if (hasAwsAssumeRoleIV) t.dropColumn("awsAssumeIamRoleArnIV");
|
||||
if (hasAwsAssumeRoleTag) t.dropColumn("awsAssumeIamRoleArnTag");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
tb.specificType("enabledLoginMethods", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("enabledLoginMethods");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute"))) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (tb) => {
|
||||
tb.string("uniqueUserAttribute").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute")) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("uniqueUserAttribute");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays"))) {
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
tb.integer("auditLogsRetentionDays").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays")) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("auditLogsRetentionDays");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await createOnUpdateTrigger(knex, TableName.OidcConfig);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.OidcConfig);
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites"))) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (tb) => {
|
||||
tb.specificType("projectFavorites", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites")) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
t.dropColumn("projectFavorites");
|
||||
});
|
||||
}
|
||||
}
|
37
backend/src/db/schemas/certificate-authorities.ts
Normal file
37
backend/src/db/schemas/certificate-authorities.ts
Normal file
@ -0,0 +1,37 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthoritiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
parentCaId: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string(),
|
||||
type: z.string(),
|
||||
status: z.string(),
|
||||
friendlyName: z.string(),
|
||||
organization: z.string(),
|
||||
ou: z.string(),
|
||||
country: z.string(),
|
||||
province: z.string(),
|
||||
locality: z.string(),
|
||||
commonName: z.string(),
|
||||
dn: z.string(),
|
||||
serialNumber: z.string().nullable().optional(),
|
||||
maxPathLength: z.number().nullable().optional(),
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
export type TCertificateAuthoritiesInsert = Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateAuthoritiesUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>
|
||||
>;
|
25
backend/src/db/schemas/certificate-authority-certs.ts
Normal file
25
backend/src/db/schemas/certificate-authority-certs.ts
Normal file
@ -0,0 +1,25 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthorityCertsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedCertificate: zodBuffer,
|
||||
encryptedCertificateChain: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;
|
||||
export type TCertificateAuthorityCertsInsert = Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateAuthorityCertsUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>
|
||||
>;
|
24
backend/src/db/schemas/certificate-authority-crl.ts
Normal file
24
backend/src/db/schemas/certificate-authority-crl.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthorityCrlSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedCrl: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateAuthorityCrl = z.infer<typeof CertificateAuthorityCrlSchema>;
|
||||
export type TCertificateAuthorityCrlInsert = Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateAuthorityCrlUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>
|
||||
>;
|
27
backend/src/db/schemas/certificate-authority-secret.ts
Normal file
27
backend/src/db/schemas/certificate-authority-secret.ts
Normal file
@ -0,0 +1,27 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthoritySecretSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedPrivateKey: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateAuthoritySecret = z.infer<typeof CertificateAuthoritySecretSchema>;
|
||||
export type TCertificateAuthoritySecretInsert = Omit<
|
||||
z.input<typeof CertificateAuthoritySecretSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TCertificateAuthoritySecretUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthoritySecretSchema>, TImmutableDBKeys>
|
||||
>;
|
22
backend/src/db/schemas/certificate-bodies.ts
Normal file
22
backend/src/db/schemas/certificate-bodies.ts
Normal file
@ -0,0 +1,22 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateBodiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
certId: z.string().uuid(),
|
||||
encryptedCertificate: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateBodies = z.infer<typeof CertificateBodiesSchema>;
|
||||
export type TCertificateBodiesInsert = Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateBodiesUpdate = Partial<Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>>;
|
21
backend/src/db/schemas/certificate-secrets.ts
Normal file
21
backend/src/db/schemas/certificate-secrets.ts
Normal file
@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateSecretsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
certId: z.string().uuid(),
|
||||
pk: z.string(),
|
||||
sk: z.string()
|
||||
});
|
||||
|
||||
export type TCertificateSecrets = z.infer<typeof CertificateSecretsSchema>;
|
||||
export type TCertificateSecretsInsert = Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateSecretsUpdate = Partial<Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>>;
|
28
backend/src/db/schemas/certificates.ts
Normal file
28
backend/src/db/schemas/certificates.ts
Normal file
@ -0,0 +1,28 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
status: z.string(),
|
||||
serialNumber: z.string(),
|
||||
friendlyName: z.string(),
|
||||
commonName: z.string(),
|
||||
notBefore: z.date(),
|
||||
notAfter: z.date(),
|
||||
revokedAt: z.date().nullable().optional(),
|
||||
revocationReason: z.number().nullable().optional(),
|
||||
altNames: z.string().default("").nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
export type TCertificatesInsert = Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificatesUpdate = Partial<Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>>;
|
@ -8,6 +8,13 @@ export * from "./audit-logs";
|
||||
export * from "./auth-token-sessions";
|
||||
export * from "./auth-tokens";
|
||||
export * from "./backup-private-key";
|
||||
export * from "./certificate-authorities";
|
||||
export * from "./certificate-authority-certs";
|
||||
export * from "./certificate-authority-crl";
|
||||
export * from "./certificate-authority-secret";
|
||||
export * from "./certificate-bodies";
|
||||
export * from "./certificate-secrets";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./git-app-install-sessions";
|
||||
@ -36,6 +43,7 @@ export * from "./kms-root-config";
|
||||
export * from "./ldap-configs";
|
||||
export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./oidc-configs";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
@ -48,6 +56,7 @@ export * from "./project-roles";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
export * from "./rate-limit";
|
||||
export * from "./saml-configs";
|
||||
export * from "./scim-tokens";
|
||||
export * from "./secret-approval-policies";
|
||||
|
@ -29,7 +29,10 @@ export const IntegrationAuthsSchema = z.object({
|
||||
keyEncoding: z.string(),
|
||||
projectId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
awsAssumeIamRoleArnCipherText: z.string().nullable().optional(),
|
||||
awsAssumeIamRoleArnIV: z.string().nullable().optional(),
|
||||
awsAssumeIamRoleArnTag: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;
|
||||
|
@ -26,7 +26,8 @@ export const LdapConfigsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default("")
|
||||
searchFilter: z.string().default(""),
|
||||
uniqueUserAttribute: z.string().default("")
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
@ -2,6 +2,13 @@ import { z } from "zod";
|
||||
|
||||
export enum TableName {
|
||||
Users = "users",
|
||||
CertificateAuthority = "certificate_authorities",
|
||||
CertificateAuthorityCert = "certificate_authority_certs",
|
||||
CertificateAuthoritySecret = "certificate_authority_secret",
|
||||
CertificateAuthorityCrl = "certificate_authority_crl",
|
||||
Certificate = "certificates",
|
||||
CertificateBody = "certificate_bodies",
|
||||
CertificateSecret = "certificate_secrets",
|
||||
Groups = "groups",
|
||||
GroupProjectMembership = "group_project_memberships",
|
||||
GroupProjectMembershipRole = "group_project_membership_roles",
|
||||
@ -18,6 +25,7 @@ export enum TableName {
|
||||
IncidentContact = "incident_contacts",
|
||||
UserAction = "user_actions",
|
||||
SuperAdmin = "super_admin",
|
||||
RateLimit = "rate_limit",
|
||||
ApiKey = "api_keys",
|
||||
Project = "projects",
|
||||
ProjectBot = "project_bots",
|
||||
@ -70,6 +78,7 @@ export enum TableName {
|
||||
SecretRotationOutput = "secret_rotation_outputs",
|
||||
SamlConfig = "saml_configs",
|
||||
LdapConfig = "ldap_configs",
|
||||
OidcConfig = "oidc_configs",
|
||||
LdapGroupMap = "ldap_group_maps",
|
||||
AuditLog = "audit_logs",
|
||||
AuditLogStream = "audit_log_streams",
|
||||
|
34
backend/src/db/schemas/oidc-configs.ts
Normal file
34
backend/src/db/schemas/oidc-configs.ts
Normal file
@ -0,0 +1,34 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OidcConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
discoveryURL: z.string().nullable().optional(),
|
||||
issuer: z.string().nullable().optional(),
|
||||
authorizationEndpoint: z.string().nullable().optional(),
|
||||
jwksUri: z.string().nullable().optional(),
|
||||
tokenEndpoint: z.string().nullable().optional(),
|
||||
userinfoEndpoint: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string(),
|
||||
configurationType: z.string(),
|
||||
clientIdIV: z.string(),
|
||||
clientIdTag: z.string(),
|
||||
encryptedClientSecret: z.string(),
|
||||
clientSecretIV: z.string(),
|
||||
clientSecretTag: z.string(),
|
||||
allowedEmailDomains: z.string().nullable().optional(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
export type TOidcConfigsInsert = Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TOidcConfigsUpdate = Partial<Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>>;
|
@ -16,7 +16,8 @@ export const OrgMembershipsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
roleId: z.string().uuid().nullable().optional()
|
||||
roleId: z.string().uuid().nullable().optional(),
|
||||
projectFavorites: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
||||
|
@ -16,7 +16,10 @@ export const ProjectsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
version: z.number().default(1),
|
||||
upgradeStatus: z.string().nullable().optional()
|
||||
upgradeStatus: z.string().nullable().optional(),
|
||||
pitVersionLimit: z.number().default(10),
|
||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||
auditLogsRetentionDays: z.number().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
26
backend/src/db/schemas/rate-limit.ts
Normal file
26
backend/src/db/schemas/rate-limit.ts
Normal file
@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const RateLimitSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
readRateLimit: z.number().default(600),
|
||||
writeRateLimit: z.number().default(200),
|
||||
secretsRateLimit: z.number().default(60),
|
||||
authRateLimit: z.number().default(60),
|
||||
inviteUserRateLimit: z.number().default(30),
|
||||
mfaRateLimit: z.number().default(20),
|
||||
creationLimit: z.number().default(30),
|
||||
publicEndpointLimit: z.number().default(30),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TRateLimit = z.infer<typeof RateLimitSchema>;
|
||||
export type TRateLimitInsert = Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>;
|
||||
export type TRateLimitUpdate = Partial<Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>>;
|
@ -14,8 +14,8 @@ export const SecretSharingSchema = z.object({
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z.date(),
|
||||
userId: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
expiresAfterViews: z.number().nullable().optional()
|
||||
|
@ -15,7 +15,8 @@ export const SecretTagsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
createdBy: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string()
|
||||
projectId: z.string(),
|
||||
createdByActorType: z.string().default("user")
|
||||
});
|
||||
|
||||
export type TSecretTags = z.infer<typeof SecretTagsSchema>;
|
||||
|
@ -16,7 +16,10 @@ export const SuperAdminSchema = z.object({
|
||||
allowedSignUpDomain: z.string().nullable().optional(),
|
||||
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000"),
|
||||
trustSamlEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional()
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional(),
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -21,7 +21,12 @@ export const UserEncryptionKeysSchema = z.object({
|
||||
tag: z.string(),
|
||||
salt: z.string(),
|
||||
verifier: z.string(),
|
||||
userId: z.string().uuid()
|
||||
userId: z.string().uuid(),
|
||||
hashedPassword: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKey: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyIV: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyTag: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyEncoding: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;
|
||||
|
86
backend/src/ee/routes/v1/certificate-authority-crl-router.ts
Normal file
86
backend/src/ee/routes/v1/certificate-authority-crl-router.ts
Normal file
@ -0,0 +1,86 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/crl",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get CRL of the CA",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CRL.caId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
crl: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRL.crl)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { crl, ca } = await server.services.certificateAuthorityCrl.getCaCrl({
|
||||
caId: req.params.caId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CA_CRL,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
crl
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// server.route({
|
||||
// method: "GET",
|
||||
// url: "/:caId/crl/rotate",
|
||||
// config: {
|
||||
// rateLimit: writeLimit
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
// schema: {
|
||||
// description: "Rotate CRL of the CA",
|
||||
// params: z.object({
|
||||
// caId: z.string().trim()
|
||||
// }),
|
||||
// response: {
|
||||
// 200: z.object({
|
||||
// message: z.string()
|
||||
// })
|
||||
// }
|
||||
// },
|
||||
// handler: async (req) => {
|
||||
// await server.services.certificateAuthority.rotateCaCrl({
|
||||
// caId: req.params.caId,
|
||||
// actor: req.permission.type,
|
||||
// actorId: req.permission.id,
|
||||
// actorAuthMethod: req.permission.authMethod,
|
||||
// actorOrgId: req.permission.orgId
|
||||
// });
|
||||
// return {
|
||||
// message: "Successfully rotated CA CRL"
|
||||
// };
|
||||
// }
|
||||
// });
|
||||
};
|
@ -1,15 +1,18 @@
|
||||
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
|
||||
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
|
||||
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
||||
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||
import { registerGroupRouter } from "./group-router";
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerLdapRouter } from "./ldap-router";
|
||||
import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOidcRouter } from "./oidc-router";
|
||||
import { registerOrgRoleRouter } from "./org-role-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
import { registerProjectRouter } from "./project-router";
|
||||
import { registerRateLimitRouter } from "./rate-limit-router";
|
||||
import { registerSamlRouter } from "./saml-router";
|
||||
import { registerScimRouter } from "./scim-router";
|
||||
import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router";
|
||||
@ -45,6 +48,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
|
||||
await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" });
|
||||
await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" });
|
||||
await server.register(registerRateLimitRouter, { prefix: "/rate-limit" });
|
||||
|
||||
await server.register(
|
||||
async (dynamicSecretRouter) => {
|
||||
@ -54,7 +58,21 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/dynamic-secrets" }
|
||||
);
|
||||
|
||||
await server.register(registerSamlRouter, { prefix: "/sso" });
|
||||
await server.register(
|
||||
async (pkiRouter) => {
|
||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/ca" });
|
||||
},
|
||||
{ prefix: "/pki" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (ssoRouter) => {
|
||||
await ssoRouter.register(registerSamlRouter);
|
||||
await ssoRouter.register(registerOidcRouter, { prefix: "/oidc" });
|
||||
},
|
||||
{ prefix: "/sso" }
|
||||
);
|
||||
|
||||
await server.register(registerScimRouter, { prefix: "/scim" });
|
||||
await server.register(registerLdapRouter, { prefix: "/ldap" });
|
||||
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
|
||||
|
@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
// eslint-disable-next-line
|
||||
async (req: IncomingMessage, user, cb) => {
|
||||
try {
|
||||
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
|
||||
if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." });
|
||||
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
|
||||
|
||||
let groups: { dn: string; cn: string }[] | undefined;
|
||||
@ -70,10 +70,13 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
|
||||
}
|
||||
|
||||
const externalId = ldapConfig.uniqueUserAttribute ? user[ldapConfig.uniqueUserAttribute] : user.uidNumber;
|
||||
const username = ldapConfig.uniqueUserAttribute ? externalId : user.uid;
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
|
||||
externalId,
|
||||
username,
|
||||
ldapConfigId: ldapConfig.id,
|
||||
externalId: user.uidNumber,
|
||||
username: user.uid,
|
||||
firstName: user.givenName ?? user.cn ?? "",
|
||||
lastName: user.sn ?? "",
|
||||
email: user.mail,
|
||||
@ -138,6 +141,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
url: z.string(),
|
||||
bindDN: z.string(),
|
||||
bindPass: z.string(),
|
||||
uniqueUserAttribute: z.string(),
|
||||
searchBase: z.string(),
|
||||
searchFilter: z.string(),
|
||||
groupSearchBase: z.string(),
|
||||
@ -172,6 +176,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
uniqueUserAttribute: z.string().trim().default("uidNumber"),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim().default("(uid={{username}})"),
|
||||
groupSearchBase: z.string().trim(),
|
||||
@ -213,6 +218,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
uniqueUserAttribute: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim(),
|
||||
groupSearchBase: z.string().trim(),
|
||||
|
355
backend/src/ee/routes/v1/oidc-router.ts
Normal file
355
backend/src/ee/routes/v1/oidc-router.ts
Normal file
@ -0,0 +1,355 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
// All the any rules are disabled because passport typesense with fastify is really poor
|
||||
|
||||
import { Authenticator, Strategy } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { Redis } from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
|
||||
/*
|
||||
- OIDC protocol cannot work without sessions: https://github.com/panva/node-openid-client/issues/190
|
||||
- Current redis usage is not ideal and will eventually have to be refactored to use a better structure
|
||||
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
||||
*/
|
||||
const redisStore = new RedisStore({
|
||||
client: redis,
|
||||
prefix: "oidc-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
||||
await server.register(fastifySession, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
|
||||
store: redisStore,
|
||||
cookie: {
|
||||
secure: appCfg.HTTPS_ENABLED,
|
||||
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
|
||||
}
|
||||
});
|
||||
|
||||
await server.register(passport.initialize());
|
||||
await server.register(passport.secureSession());
|
||||
|
||||
// redirect to IDP for login
|
||||
server.route({
|
||||
url: "/login",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgSlug: z.string().trim(),
|
||||
callbackPort: z.string().trim().optional()
|
||||
})
|
||||
},
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const { orgSlug, callbackPort } = req.query;
|
||||
|
||||
// ensure fresh session state per login attempt
|
||||
await req.session.regenerate();
|
||||
|
||||
req.session.set<any>("oidcOrgSlug", orgSlug);
|
||||
|
||||
if (callbackPort) {
|
||||
req.session.set<any>("callbackPort", callbackPort);
|
||||
}
|
||||
|
||||
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(orgSlug, callbackPort);
|
||||
return (
|
||||
passport.authenticate(oidcStrategy as Strategy, {
|
||||
scope: "profile email openid"
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
handler: () => {}
|
||||
});
|
||||
|
||||
// callback route after login from IDP
|
||||
server.route({
|
||||
url: "/callback",
|
||||
method: "GET",
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const oidcOrgSlug = req.session.get<any>("oidcOrgSlug");
|
||||
const callbackPort = req.session.get<any>("callbackPort");
|
||||
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(oidcOrgSlug, callbackPort);
|
||||
|
||||
return (
|
||||
passport.authenticate(oidcStrategy as Strategy, {
|
||||
failureRedirect: "/api/v1/sso/oidc/login/error",
|
||||
session: false,
|
||||
failureMessage: true
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
);
|
||||
}
|
||||
|
||||
// signup
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/login/error",
|
||||
method: "GET",
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
|
||||
return res.status(500).send({
|
||||
error: "Authentication error",
|
||||
details: req.query
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/config",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgSlug: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
isActive: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true
|
||||
}).extend({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { orgSlug } = req.query;
|
||||
const oidc = await server.services.oidc.getOidc({
|
||||
orgSlug,
|
||||
type: "external",
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/config",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.default("")
|
||||
.transform((data) => {
|
||||
if (data === "") return "";
|
||||
// Trim each ID and join with ', ' to ensure formatting
|
||||
return data
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
discoveryURL: z.string().trim(),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim(),
|
||||
authorizationEndpoint: z.string().trim(),
|
||||
jwksUri: z.string().trim(),
|
||||
tokenEndpoint: z.string().trim(),
|
||||
userinfoEndpoint: z.string().trim(),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean()
|
||||
})
|
||||
.partial()
|
||||
.merge(z.object({ orgSlug: z.string() })),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true,
|
||||
isActive: true
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const oidc = await server.services.oidc.updateOidcCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.default("")
|
||||
.transform((data) => {
|
||||
if (data === "") return "";
|
||||
// Trim each ID and join with ', ' to ensure formatting
|
||||
return data
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim().optional().default(""),
|
||||
discoveryURL: z.string().trim().optional().default(""),
|
||||
authorizationEndpoint: z.string().trim().optional().default(""),
|
||||
jwksUri: z.string().trim().optional().default(""),
|
||||
tokenEndpoint: z.string().trim().optional().default(""),
|
||||
userinfoEndpoint: z.string().trim().optional().default(""),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
orgSlug: z.string().trim()
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
||||
if (!data.issuer) {
|
||||
ctx.addIssue({
|
||||
path: ["issuer"],
|
||||
message: "Issuer is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.authorizationEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["authorizationEndpoint"],
|
||||
message: "Authorization endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.jwksUri) {
|
||||
ctx.addIssue({
|
||||
path: ["jwksUri"],
|
||||
message: "JWKS URI is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.tokenEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["tokenEndpoint"],
|
||||
message: "Token endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.userinfoEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["userinfoEndpoint"],
|
||||
message: "Userinfo endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// eslint-disable-next-line no-lonely-if
|
||||
if (!data.discoveryURL) {
|
||||
ctx.addIssue({
|
||||
path: ["discoveryURL"],
|
||||
message: "Discovery URL is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
handler: async (req) => {
|
||||
const oidc = await server.services.oidc.createOidcCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
};
|
@ -143,7 +143,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId: req.params.workspaceId,
|
||||
...req.query,
|
||||
startDate: req.query.endDate || getLastMidnightDateISO(),
|
||||
endDate: req.query.endDate,
|
||||
startDate: req.query.startDate || getLastMidnightDateISO(),
|
||||
auditLogActor: req.query.actor,
|
||||
actor: req.permission.type
|
||||
});
|
||||
|
75
backend/src/ee/routes/v1/rate-limit-router.ts
Normal file
75
backend/src/ee/routes/v1/rate-limit-router.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { RateLimitSchema } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
rateLimit: RateLimitSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async () => {
|
||||
const rateLimit = await server.services.rateLimit.getRateLimits();
|
||||
if (!rateLimit) {
|
||||
throw new BadRequestError({
|
||||
name: "Get Rate Limit Error",
|
||||
message: "Rate limit configuration does not exist."
|
||||
});
|
||||
}
|
||||
return { rateLimit };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PUT",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
|
||||
schema: {
|
||||
body: z.object({
|
||||
readRateLimit: z.number(),
|
||||
writeRateLimit: z.number(),
|
||||
secretsRateLimit: z.number(),
|
||||
authRateLimit: z.number(),
|
||||
inviteUserRateLimit: z.number(),
|
||||
mfaRateLimit: z.number(),
|
||||
creationLimit: z.number(),
|
||||
publicEndpointLimit: z.number()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
rateLimit: RateLimitSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const rateLimit = await server.services.rateLimit.updateRateLimit(req.body);
|
||||
return { rateLimit };
|
||||
}
|
||||
});
|
||||
};
|
@ -32,7 +32,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db, {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
@ -54,7 +54,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db, filter);
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
|
@ -14,7 +14,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => {
|
||||
try {
|
||||
const docs = await db(TableName.AccessApprovalRequest)
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.AccessApprovalRequest)
|
||||
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
|
||||
|
||||
.leftJoin(
|
||||
@ -170,7 +171,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db);
|
||||
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
|
||||
const docs = await sql;
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
@ -207,7 +208,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getCount = async ({ projectId }: { projectId: string }) => {
|
||||
try {
|
||||
const accessRequests = await db(TableName.AccessApprovalRequest)
|
||||
const accessRequests = await db
|
||||
.replicaNode()(TableName.AccessApprovalRequest)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicy,
|
||||
`${TableName.AccessApprovalRequest}.policyId`,
|
||||
|
@ -27,7 +27,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const sqlQuery = (tx || db)(TableName.AuditLog)
|
||||
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
|
||||
.where(
|
||||
stripUndefinedInWhere({
|
||||
projectId,
|
||||
|
@ -45,18 +45,29 @@ export const auditLogQueueServiceFactory = ({
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||
let project;
|
||||
|
||||
if (!orgId) {
|
||||
// it will never be undefined for both org and project id
|
||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||
const project = await projectDAL.findById(projectId as string);
|
||||
project = await projectDAL.findById(projectId as string);
|
||||
orgId = project.orgId;
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
const ttl = plan.auditLogsRetentionDays * MS_IN_DAY;
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
if (ttl === 0) return;
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
@ -64,25 +65,31 @@ export enum EventType {
|
||||
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
|
||||
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH = "revoke-identity-universal-auth",
|
||||
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
|
||||
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
|
||||
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
|
||||
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
|
||||
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
|
||||
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
|
||||
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
|
||||
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
|
||||
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
|
||||
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
|
||||
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
|
||||
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
|
||||
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
|
||||
REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth",
|
||||
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
|
||||
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
|
||||
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
|
||||
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
|
||||
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
|
||||
REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth",
|
||||
CREATE_ENVIRONMENT = "create-environment",
|
||||
UPDATE_ENVIRONMENT = "update-environment",
|
||||
DELETE_ENVIRONMENT = "delete-environment",
|
||||
@ -104,7 +111,21 @@ export enum EventType {
|
||||
SECRET_APPROVAL_MERGED = "secret-approval-merged",
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened"
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
CREATE_CA = "create-certificate-authority",
|
||||
GET_CA = "get-certificate-authority",
|
||||
UPDATE_CA = "update-certificate-authority",
|
||||
DELETE_CA = "delete-certificate-authority",
|
||||
GET_CA_CSR = "get-certificate-authority-csr",
|
||||
GET_CA_CERT = "get-certificate-authority-cert",
|
||||
SIGN_INTERMEDIATE = "sign-intermediate",
|
||||
IMPORT_CA_CERT = "import-certificate-authority-cert",
|
||||
GET_CA_CRL = "get-certificate-authority-crl",
|
||||
ISSUE_CERT = "issue-cert",
|
||||
GET_CERT = "get-cert",
|
||||
DELETE_CERT = "delete-cert",
|
||||
REVOKE_CERT = "revoke-cert",
|
||||
GET_CERT_BODY = "get-cert-body"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -419,6 +440,13 @@ interface GetIdentityUniversalAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityUniversalAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityKubernetesAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH;
|
||||
metadata: {
|
||||
@ -442,6 +470,13 @@ interface AddIdentityKubernetesAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityKubernetesAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_KUBERNETES_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityKubernetesAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH;
|
||||
metadata: {
|
||||
@ -478,6 +513,14 @@ interface GetIdentityUniversalAuthClientSecretsEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetIdentityUniversalAuthClientSecretByIdEvent {
|
||||
type: EventType.GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
clientSecretId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RevokeIdentityUniversalAuthClientSecretEvent {
|
||||
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET;
|
||||
metadata: {
|
||||
@ -510,6 +553,13 @@ interface AddIdentityGcpAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityGcpAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_GCP_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityGcpAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_GCP_AUTH;
|
||||
metadata: {
|
||||
@ -555,6 +605,13 @@ interface AddIdentityAwsAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityAwsAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_AWS_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityAwsAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_AWS_AUTH;
|
||||
metadata: {
|
||||
@ -598,6 +655,13 @@ interface AddIdentityAzureAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityAzureAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_AZURE_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityAzureAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_AZURE_AUTH;
|
||||
metadata: {
|
||||
@ -843,6 +907,125 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateCa {
|
||||
type: EventType.CREATE_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCa {
|
||||
type: EventType.GET_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCa {
|
||||
type: EventType.UPDATE_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
status: CaStatus;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCa {
|
||||
type: EventType.DELETE_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCsr {
|
||||
type: EventType.GET_CA_CSR;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCert {
|
||||
type: EventType.GET_CA_CERT;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignIntermediate {
|
||||
type: EventType.SIGN_INTERMEDIATE;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface ImportCaCert {
|
||||
type: EventType.IMPORT_CA_CERT;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCrl {
|
||||
type: EventType.GET_CA_CRL;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface IssueCert {
|
||||
type: EventType.ISSUE_CERT;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCert {
|
||||
type: EventType.GET_CERT;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCert {
|
||||
type: EventType.DELETE_CERT;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RevokeCert {
|
||||
type: EventType.REVOKE_CERT;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCertBody {
|
||||
type: EventType.GET_CERT_BODY;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -869,24 +1052,30 @@ export type Event =
|
||||
| LoginIdentityUniversalAuthEvent
|
||||
| AddIdentityUniversalAuthEvent
|
||||
| UpdateIdentityUniversalAuthEvent
|
||||
| DeleteIdentityUniversalAuthEvent
|
||||
| GetIdentityUniversalAuthEvent
|
||||
| LoginIdentityKubernetesAuthEvent
|
||||
| DeleteIdentityKubernetesAuthEvent
|
||||
| AddIdentityKubernetesAuthEvent
|
||||
| UpdateIdentityKubernetesAuthEvent
|
||||
| GetIdentityKubernetesAuthEvent
|
||||
| CreateIdentityUniversalAuthClientSecretEvent
|
||||
| GetIdentityUniversalAuthClientSecretsEvent
|
||||
| GetIdentityUniversalAuthClientSecretByIdEvent
|
||||
| RevokeIdentityUniversalAuthClientSecretEvent
|
||||
| LoginIdentityGcpAuthEvent
|
||||
| AddIdentityGcpAuthEvent
|
||||
| DeleteIdentityGcpAuthEvent
|
||||
| UpdateIdentityGcpAuthEvent
|
||||
| GetIdentityGcpAuthEvent
|
||||
| LoginIdentityAwsAuthEvent
|
||||
| AddIdentityAwsAuthEvent
|
||||
| UpdateIdentityAwsAuthEvent
|
||||
| GetIdentityAwsAuthEvent
|
||||
| DeleteIdentityAwsAuthEvent
|
||||
| LoginIdentityAzureAuthEvent
|
||||
| AddIdentityAzureAuthEvent
|
||||
| DeleteIdentityAzureAuthEvent
|
||||
| UpdateIdentityAzureAuthEvent
|
||||
| GetIdentityAzureAuthEvent
|
||||
| CreateEnvironmentEvent
|
||||
@ -910,4 +1099,18 @@ export type Event =
|
||||
| SecretApprovalMerge
|
||||
| SecretApprovalClosed
|
||||
| SecretApprovalRequest
|
||||
| SecretApprovalReopened;
|
||||
| SecretApprovalReopened
|
||||
| CreateCa
|
||||
| GetCa
|
||||
| UpdateCa
|
||||
| DeleteCa
|
||||
| GetCaCsr
|
||||
| GetCaCert
|
||||
| SignIntermediate
|
||||
| ImportCaCert
|
||||
| GetCaCrl
|
||||
| IssueCert
|
||||
| GetCert
|
||||
| DeleteCert
|
||||
| RevokeCert
|
||||
| GetCertBody;
|
||||
|
@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TCertificateAuthorityCrlDALFactory = ReturnType<typeof certificateAuthorityCrlDALFactory>;
|
||||
|
||||
export const certificateAuthorityCrlDALFactory = (db: TDbClient) => {
|
||||
const caCrlOrm = ormify(db, TableName.CertificateAuthorityCrl);
|
||||
return caCrlOrm;
|
||||
};
|
@ -0,0 +1,172 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
|
||||
|
||||
import { TGetCrl } from "./certificate-authority-crl-types";
|
||||
|
||||
type TCertificateAuthorityCrlServiceFactoryDep = {
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decrypt" | "generateKmsKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateAuthorityCrlServiceFactory = ReturnType<typeof certificateAuthorityCrlServiceFactory>;
|
||||
|
||||
export const certificateAuthorityCrlServiceFactory = ({
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TCertificateAuthorityCrlServiceFactoryDep) => {
|
||||
/**
|
||||
* Return the Certificate Revocation List (CRL) for CA with id [caId]
|
||||
*/
|
||||
const getCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCrl) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.caCrl)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to get CA certificate revocation list (CRL) due to plan restriction. Upgrade plan to get the CA CRL."
|
||||
});
|
||||
|
||||
const caCrl = await certificateAuthorityCrlDAL.findOne({ caId: ca.id });
|
||||
if (!caCrl) throw new BadRequestError({ message: "CRL not found" });
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const decryptedCrl = await kmsService.decrypt({
|
||||
kmsId: keyId,
|
||||
cipherTextBlob: caCrl.encryptedCrl
|
||||
});
|
||||
|
||||
const crl = new x509.X509Crl(decryptedCrl);
|
||||
|
||||
const base64crl = crl.toString("base64");
|
||||
const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
|
||||
|
||||
return {
|
||||
crl: crlPem,
|
||||
ca
|
||||
};
|
||||
};
|
||||
|
||||
// const rotateCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TRotateCrlDTO) => {
|
||||
// const ca = await certificateAuthorityDAL.findById(caId);
|
||||
// if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
// const { permission } = await permissionService.getProjectPermission(
|
||||
// actor,
|
||||
// actorId,
|
||||
// ca.projectId,
|
||||
// actorAuthMethod,
|
||||
// actorOrgId
|
||||
// );
|
||||
|
||||
// ForbiddenError.from(permission).throwUnlessCan(
|
||||
// ProjectPermissionActions.Read,
|
||||
// ProjectPermissionSub.CertificateAuthorities
|
||||
// );
|
||||
|
||||
// const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id });
|
||||
|
||||
// const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
|
||||
|
||||
// const keyId = await getProjectKmsCertificateKeyId({
|
||||
// projectId: ca.projectId,
|
||||
// projectDAL,
|
||||
// kmsService
|
||||
// });
|
||||
|
||||
// const privateKey = await kmsService.decrypt({
|
||||
// kmsId: keyId,
|
||||
// cipherTextBlob: caSecret.encryptedPrivateKey
|
||||
// });
|
||||
|
||||
// const skObj = crypto.createPrivateKey({ key: privateKey, format: "der", type: "pkcs8" });
|
||||
// const sk = await crypto.subtle.importKey("pkcs8", skObj.export({ format: "der", type: "pkcs8" }), alg, true, [
|
||||
// "sign"
|
||||
// ]);
|
||||
|
||||
// const revokedCerts = await certificateDAL.find({
|
||||
// caId: ca.id,
|
||||
// status: CertStatus.REVOKED
|
||||
// });
|
||||
|
||||
// const crl = await x509.X509CrlGenerator.create({
|
||||
// issuer: ca.dn,
|
||||
// thisUpdate: new Date(),
|
||||
// nextUpdate: new Date("2025/12/12"),
|
||||
// entries: revokedCerts.map((revokedCert) => {
|
||||
// return {
|
||||
// serialNumber: revokedCert.serialNumber,
|
||||
// revocationDate: new Date(revokedCert.revokedAt as Date),
|
||||
// reason: revokedCert.revocationReason as number,
|
||||
// invalidity: new Date("2022/01/01"),
|
||||
// issuer: ca.dn
|
||||
// };
|
||||
// }),
|
||||
// signingAlgorithm: alg,
|
||||
// signingKey: sk
|
||||
// });
|
||||
|
||||
// const { cipherTextBlob: encryptedCrl } = await kmsService.encrypt({
|
||||
// kmsId: keyId,
|
||||
// plainText: Buffer.from(new Uint8Array(crl.rawData))
|
||||
// });
|
||||
|
||||
// await certificateAuthorityCrlDAL.update(
|
||||
// {
|
||||
// caId: ca.id
|
||||
// },
|
||||
// {
|
||||
// encryptedCrl
|
||||
// }
|
||||
// );
|
||||
|
||||
// const base64crl = crl.toString("base64");
|
||||
// const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
|
||||
|
||||
// return {
|
||||
// crl: crlPem
|
||||
// };
|
||||
// };
|
||||
|
||||
return {
|
||||
getCaCrl
|
||||
// rotateCaCrl
|
||||
};
|
||||
};
|
@ -0,0 +1,5 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TGetCrl = {
|
||||
caId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
@ -12,7 +12,10 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const countLeasesForDynamicSecret = async (dynamicSecretId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease).count("*").where({ dynamicSecretId }).first();
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
.count("*")
|
||||
.where({ dynamicSecretId })
|
||||
.first();
|
||||
return parseInt(doc || "0", 10);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "DynamicSecretCountLeases" });
|
||||
@ -21,7 +24,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease)
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
.where({ [`${TableName.DynamicSecretLease}.id` as "id"]: id })
|
||||
.first()
|
||||
.join(
|
||||
|
@ -12,7 +12,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findGroups = async (filter: TFindFilter<TGroups>, { offset, limit, sort, tx }: TFindOpt<TGroups> = {}) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Groups)
|
||||
const query = (tx || db.replicaNode())(TableName.Groups)
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.select(selectAllTableCols(TableName.Groups));
|
||||
@ -32,7 +32,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByOrgId = async (orgId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Groups)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Groups)
|
||||
.where(`${TableName.Groups}.orgId`, orgId)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.select(selectAllTableCols(TableName.Groups))
|
||||
@ -74,11 +74,12 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
username?: string;
|
||||
}) => {
|
||||
try {
|
||||
let query = db(TableName.OrgMembership)
|
||||
let query = db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.UserGroupMembership, function () {
|
||||
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
.leftJoin(TableName.UserGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
|
@ -18,7 +18,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
*/
|
||||
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const userProjectMemberships: string[] = await (tx || db)(TableName.ProjectMembership)
|
||||
const userProjectMemberships: string[] = await (tx || db.replicaNode())(TableName.ProjectMembership)
|
||||
.where(`${TableName.ProjectMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
|
||||
.pluck(`${TableName.ProjectMembership}.projectId`);
|
||||
@ -43,7 +43,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
// special query
|
||||
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
|
||||
try {
|
||||
const usernameDocs: string[] = await db(TableName.UserGroupMembership)
|
||||
const usernameDocs: string[] = await db
|
||||
.replicaNode()(TableName.UserGroupMembership)
|
||||
.join(
|
||||
TableName.GroupProjectMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
@ -73,7 +74,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
// get list of groups in the project with id [projectId]
|
||||
// that that are not the group with id [groupId]
|
||||
const groups: string[] = await (tx || db)(TableName.GroupProjectMembership)
|
||||
const groups: string[] = await (tx || db.replicaNode())(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId)
|
||||
.pluck(`${TableName.GroupProjectMembership}.groupId`);
|
||||
@ -83,8 +84,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.UserGroupMembership}.groupId`, groupId)
|
||||
.where(`${TableName.UserGroupMembership}.isPending`, false)
|
||||
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.ProjectMembership, function () {
|
||||
this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
|
||||
.leftJoin(TableName.ProjectMembership, (bd) => {
|
||||
bd.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
|
||||
`${TableName.ProjectMembership}.projectId`,
|
||||
"=",
|
||||
db.raw("?", [projectId])
|
||||
@ -107,9 +108,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
|
||||
)
|
||||
.where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER
|
||||
.whereNotIn(`${TableName.UserGroupMembership}.userId`, function () {
|
||||
.whereNotIn(`${TableName.UserGroupMembership}.userId`, (bd) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.select(`${TableName.UserGroupMembership}.userId`)
|
||||
bd.select(`${TableName.UserGroupMembership}.userId`)
|
||||
.from(TableName.UserGroupMembership)
|
||||
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups);
|
||||
});
|
||||
|
@ -23,6 +23,8 @@ import {
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@ -30,7 +32,9 @@ import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membe
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -50,7 +54,7 @@ import {
|
||||
TTestLdapConnectionDTO,
|
||||
TUpdateLdapCfgDTO
|
||||
} from "./ldap-config-types";
|
||||
import { testLDAPConfig } from "./ldap-fns";
|
||||
import { searchGroups, testLDAPConfig } from "./ldap-fns";
|
||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
@ -73,11 +77,19 @@ type TLdapConfigServiceFactoryDep = {
|
||||
>;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
| "findUserEncKeyByUserIdsBatch"
|
||||
| "find"
|
||||
| "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
|
||||
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
||||
@ -97,7 +109,9 @@ export const ldapConfigServiceFactory = ({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
}: TLdapConfigServiceFactoryDep) => {
|
||||
const createLdapCfg = async ({
|
||||
actor,
|
||||
@ -109,6 +123,7 @@ export const ldapConfigServiceFactory = ({
|
||||
url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
@ -187,6 +202,7 @@ export const ldapConfigServiceFactory = ({
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
@ -209,6 +225,7 @@ export const ldapConfigServiceFactory = ({
|
||||
url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
@ -231,7 +248,8 @@ export const ldapConfigServiceFactory = ({
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter
|
||||
groupSearchFilter,
|
||||
uniqueUserAttribute
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
@ -269,7 +287,7 @@ export const ldapConfigServiceFactory = ({
|
||||
return ldapConfig;
|
||||
};
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean }) => {
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter);
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
@ -332,6 +350,7 @@ export const ldapConfigServiceFactory = ({
|
||||
url: ldapConfig.url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
@ -368,6 +387,7 @@ export const ldapConfigServiceFactory = ({
|
||||
url: ldapConfig.url,
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter || "(uid={{username}})",
|
||||
// searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
|
||||
@ -398,6 +418,13 @@ export const ldapConfigServiceFactory = ({
|
||||
}: TLdapLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.LDAP)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with LDAP is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
@ -437,6 +464,21 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
|
||||
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
@ -488,7 +530,7 @@ export const ldapConfigServiceFactory = ({
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
userId: newUser.id,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
@ -592,12 +634,14 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
@ -619,6 +663,22 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
@ -664,11 +724,25 @@ export const ldapConfigServiceFactory = ({
|
||||
message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map."
|
||||
});
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.findOne({
|
||||
id: ldapConfigId,
|
||||
orgId
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId,
|
||||
id: ldapConfigId
|
||||
});
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
if (!ldapConfig.groupSearchBase) {
|
||||
throw new BadRequestError({
|
||||
message: "Configure a group search base in your LDAP configuration in order to proceed."
|
||||
});
|
||||
}
|
||||
|
||||
const groupSearchFilter = `(cn=${ldapGroupCN})`;
|
||||
const groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
|
||||
|
||||
if (!groups.some((g) => g.cn === ldapGroupCN)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find LDAP Group CN"
|
||||
});
|
||||
}
|
||||
|
||||
const group = await groupDAL.findOne({ slug: groupSlug, orgId });
|
||||
if (!group) throw new BadRequestError({ message: "Failed to find group" });
|
||||
|
@ -7,6 +7,7 @@ export type TLDAPConfig = {
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
uniqueUserAttribute: string;
|
||||
searchBase: string;
|
||||
groupSearchBase: string;
|
||||
groupSearchFilter: string;
|
||||
@ -19,6 +20,7 @@ export type TCreateLdapCfgDTO = {
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
uniqueUserAttribute: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
@ -33,6 +35,7 @@ export type TUpdateLdapCfgDTO = {
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
uniqueUserAttribute: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
|
@ -10,7 +10,8 @@ export const ldapGroupMapDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => {
|
||||
try {
|
||||
const docs = await db(TableName.LdapGroupMap)
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.LdapGroupMap)
|
||||
.where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId)
|
||||
.join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`)
|
||||
.select(selectAllTableCols(TableName.LdapGroupMap))
|
||||
|
@ -7,6 +7,8 @@ export const getDefaultOnPremFeatures = () => {
|
||||
workspacesUsed: 0,
|
||||
memberLimit: null,
|
||||
membersUsed: 0,
|
||||
identityLimit: null,
|
||||
identitiesUsed: 0,
|
||||
environmentLimit: null,
|
||||
environmentsUsed: 0,
|
||||
secretVersioning: true,
|
||||
@ -25,6 +27,7 @@ export const getDefaultOnPremFeatures = () => {
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretRotation: true
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
};
|
||||
};
|
||||
|
@ -15,6 +15,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
membersUsed: 0,
|
||||
environmentLimit: null,
|
||||
environmentsUsed: 0,
|
||||
identityLimit: null,
|
||||
identitiesUsed: 0,
|
||||
dynamicSecret: false,
|
||||
secretVersioning: true,
|
||||
pitRecovery: false,
|
||||
@ -27,6 +29,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
auditLogStreams: false,
|
||||
auditLogStreamLimit: 3,
|
||||
samlSSO: false,
|
||||
oidcSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
groups: false,
|
||||
@ -34,7 +37,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretRotation: true
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
});
|
||||
|
||||
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
|
@ -9,7 +9,7 @@ export type TLicenseDALFactory = ReturnType<typeof licenseDALFactory>;
|
||||
export const licenseDALFactory = (db: TDbClient) => {
|
||||
const countOfOrgMembers = async (orgId: string | null, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.OrgMembership)
|
||||
const doc = await (tx || db.replicaNode())(TableName.OrgMembership)
|
||||
.where({ status: OrgMembershipStatus.Accepted })
|
||||
.andWhere((bd) => {
|
||||
if (orgId) {
|
||||
@ -19,11 +19,44 @@ export const licenseDALFactory = (db: TDbClient) => {
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.Users}.isGhost`, false)
|
||||
.count();
|
||||
return doc?.[0].count;
|
||||
return Number(doc?.[0].count);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count of Org Members" });
|
||||
}
|
||||
};
|
||||
|
||||
return { countOfOrgMembers };
|
||||
const countOrgUsersAndIdentities = async (orgId: string | null, tx?: Knex) => {
|
||||
try {
|
||||
// count org users
|
||||
const userDoc = await (tx || db)(TableName.OrgMembership)
|
||||
.where({ status: OrgMembershipStatus.Accepted })
|
||||
.andWhere((bd) => {
|
||||
if (orgId) {
|
||||
void bd.where({ orgId });
|
||||
}
|
||||
})
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.Users}.isGhost`, false)
|
||||
.count();
|
||||
|
||||
const userCount = Number(userDoc?.[0].count);
|
||||
|
||||
// count org identities
|
||||
const identityDoc = await (tx || db)(TableName.IdentityOrgMembership)
|
||||
.where((bd) => {
|
||||
if (orgId) {
|
||||
void bd.where({ orgId });
|
||||
}
|
||||
})
|
||||
.count();
|
||||
|
||||
const identityCount = Number(identityDoc?.[0].count);
|
||||
|
||||
return userCount + identityCount;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count of Org Users + Identities" });
|
||||
}
|
||||
};
|
||||
|
||||
return { countOfOrgMembers, countOrgUsersAndIdentities };
|
||||
};
|
||||
|
@ -155,6 +155,7 @@ export const licenseServiceFactory = ({
|
||||
LICENSE_SERVER_CLOUD_PLAN_TTL,
|
||||
JSON.stringify(currentPlan)
|
||||
);
|
||||
|
||||
return currentPlan;
|
||||
}
|
||||
} catch (error) {
|
||||
@ -204,16 +205,22 @@ export const licenseServiceFactory = ({
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
const count = await licenseDAL.countOfOrgMembers(orgId);
|
||||
const quantity = await licenseDAL.countOfOrgMembers(orgId);
|
||||
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId);
|
||||
if (org?.customerId) {
|
||||
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
|
||||
quantity: count
|
||||
quantity,
|
||||
quantityIdentities
|
||||
});
|
||||
}
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats });
|
||||
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, {
|
||||
usedSeats,
|
||||
usedIdentitySeats
|
||||
});
|
||||
}
|
||||
await refreshPlan(orgId);
|
||||
};
|
||||
@ -575,6 +582,9 @@ export const licenseServiceFactory = ({
|
||||
getInstanceType() {
|
||||
return instanceType;
|
||||
},
|
||||
get onPremFeatures() {
|
||||
return onPremFeatures;
|
||||
},
|
||||
getPlan,
|
||||
updateSubscriptionOrgMemberCount,
|
||||
refreshPlan,
|
||||
|
@ -31,6 +31,8 @@ export type TFeatureSet = {
|
||||
dynamicSecret: false;
|
||||
memberLimit: null;
|
||||
membersUsed: 0;
|
||||
identityLimit: null;
|
||||
identitiesUsed: 0;
|
||||
environmentLimit: null;
|
||||
environmentsUsed: 0;
|
||||
secretVersioning: true;
|
||||
@ -44,6 +46,7 @@ export type TFeatureSet = {
|
||||
auditLogStreams: false;
|
||||
auditLogStreamLimit: 3;
|
||||
samlSSO: false;
|
||||
oidcSSO: false;
|
||||
scim: false;
|
||||
ldap: false;
|
||||
groups: false;
|
||||
@ -52,6 +55,7 @@ export type TFeatureSet = {
|
||||
has_used_trial: true;
|
||||
secretApproval: false;
|
||||
secretRotation: true;
|
||||
caCrl: false;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
11
backend/src/ee/services/oidc/oidc-config-dal.ts
Normal file
11
backend/src/ee/services/oidc/oidc-config-dal.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TOidcConfigDALFactory = ReturnType<typeof oidcConfigDALFactory>;
|
||||
|
||||
export const oidcConfigDALFactory = (db: TDbClient) => {
|
||||
const oidcCfgOrm = ormify(db, TableName.OidcConfig);
|
||||
|
||||
return { ...oidcCfgOrm };
|
||||
};
|
645
backend/src/ee/services/oidc/oidc-config-service.ts
Normal file
645
backend/src/ee/services/oidc/oidc-config-service.ts
Normal file
@ -0,0 +1,645 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TOidcConfigDALFactory } from "./oidc-config-dal";
|
||||
import {
|
||||
OIDCConfigurationType,
|
||||
TCreateOidcCfgDTO,
|
||||
TGetOidcCfgDTO,
|
||||
TOidcLoginDTO,
|
||||
TUpdateOidcCfgDTO
|
||||
} from "./oidc-config-types";
|
||||
|
||||
type TOidcConfigServiceFactoryDep = {
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
|
||||
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
|
||||
|
||||
export const oidcConfigServiceFactory = ({
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
licenseService,
|
||||
permissionService,
|
||||
tokenService,
|
||||
orgBotDAL,
|
||||
smtpService,
|
||||
oidcConfigDAL
|
||||
}: TOidcConfigServiceFactoryDep) => {
|
||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found",
|
||||
name: "OrgNotFound"
|
||||
});
|
||||
}
|
||||
if (dto.type === "external") {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
dto.actor,
|
||||
dto.actorId,
|
||||
org.id,
|
||||
dto.actorAuthMethod,
|
||||
dto.actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
}
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.findOne({
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
if (!oidcCfg) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find organization OIDC configuration"
|
||||
});
|
||||
}
|
||||
|
||||
// decrypt and return cfg
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
|
||||
oidcCfg;
|
||||
|
||||
let clientId = "";
|
||||
if (encryptedClientId && clientIdIV && clientIdTag) {
|
||||
clientId = decryptSymmetric({
|
||||
ciphertext: encryptedClientId,
|
||||
key,
|
||||
tag: clientIdTag,
|
||||
iv: clientIdIV
|
||||
});
|
||||
}
|
||||
|
||||
let clientSecret = "";
|
||||
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
|
||||
clientSecret = decryptSymmetric({
|
||||
key,
|
||||
tag: clientSecretTag,
|
||||
iv: clientSecretIV,
|
||||
ciphertext: encryptedClientSecret
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
id: oidcCfg.id,
|
||||
issuer: oidcCfg.issuer,
|
||||
authorizationEndpoint: oidcCfg.authorizationEndpoint,
|
||||
configurationType: oidcCfg.configurationType,
|
||||
discoveryURL: oidcCfg.discoveryURL,
|
||||
jwksUri: oidcCfg.jwksUri,
|
||||
tokenEndpoint: oidcCfg.tokenEndpoint,
|
||||
userinfoEndpoint: oidcCfg.userinfoEndpoint,
|
||||
orgId: oidcCfg.orgId,
|
||||
isActive: oidcCfg.isActive,
|
||||
allowedEmailDomains: oidcCfg.allowedEmailDomains,
|
||||
clientId,
|
||||
clientSecret
|
||||
};
|
||||
};
|
||||
|
||||
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with OIDC is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.OIDC
|
||||
});
|
||||
|
||||
const organization = await orgDAL.findOrgById(orgId);
|
||||
if (!organization) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
let user: TUsers;
|
||||
if (userAlias) {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const foundUser = await userDAL.findById(userAlias.userId, tx);
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: foundUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && foundUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return foundUser;
|
||||
});
|
||||
} else {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(externalId, userDAL);
|
||||
newUser = await userDAL.create(
|
||||
{
|
||||
email,
|
||||
firstName,
|
||||
isEmailVerified: serverCfg.trustOidcEmails,
|
||||
username: serverCfg.trustOidcEmails ? email : uniqueUsername,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.OIDC,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: newUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return newUser;
|
||||
});
|
||||
}
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authMethod: AuthMethod.OIDC,
|
||||
authType: UserAliasType.OIDC,
|
||||
isUserCompleted,
|
||||
...(callbackPort && { callbackPort })
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
const updateOidcCfg = async ({
|
||||
orgSlug,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
authorizationEndpoint,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
clientId,
|
||||
clientSecret
|
||||
}: TUpdateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(org.id);
|
||||
if (!plan.oidcSSO)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
org.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const updateQuery: TOidcConfigsUpdate = {
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
issuer,
|
||||
authorizationEndpoint,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
jwksUri,
|
||||
isActive
|
||||
};
|
||||
|
||||
if (clientId !== undefined) {
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
updateQuery.encryptedClientId = encryptedClientId;
|
||||
updateQuery.clientIdIV = clientIdIV;
|
||||
updateQuery.clientIdTag = clientIdTag;
|
||||
}
|
||||
|
||||
if (clientSecret !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
updateQuery.encryptedClientSecret = encryptedClientSecret;
|
||||
updateQuery.clientSecretIV = clientSecretIV;
|
||||
updateQuery.clientSecretTag = clientSecretTag;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
|
||||
return ssoConfig;
|
||||
};
|
||||
|
||||
const createOidcCfg = async ({
|
||||
orgSlug,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
authorizationEndpoint,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
clientId,
|
||||
clientSecret
|
||||
}: TCreateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(org.id);
|
||||
if (!plan.oidcSSO)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
org.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: org.id,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.create({
|
||||
issuer,
|
||||
isActive,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
authorizationEndpoint,
|
||||
allowedEmailDomains,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
orgId: org.id,
|
||||
encryptedClientId,
|
||||
clientIdIV,
|
||||
clientIdTag,
|
||||
encryptedClientSecret,
|
||||
clientSecretIV,
|
||||
clientSecretTag
|
||||
});
|
||||
|
||||
return oidcCfg;
|
||||
};
|
||||
|
||||
const getOrgAuthStrategy = async (orgSlug: string, callbackPort?: string) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found."
|
||||
});
|
||||
}
|
||||
|
||||
const oidcCfg = await getOidc({
|
||||
type: "internal",
|
||||
orgSlug
|
||||
});
|
||||
|
||||
if (!oidcCfg || !oidcCfg.isActive) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to authenticate with OIDC SSO"
|
||||
});
|
||||
}
|
||||
|
||||
let issuer: Issuer;
|
||||
if (oidcCfg.configurationType === OIDCConfigurationType.DISCOVERY_URL) {
|
||||
if (!oidcCfg.discoveryURL) {
|
||||
throw new BadRequestError({
|
||||
message: "OIDC not configured correctly"
|
||||
});
|
||||
}
|
||||
issuer = await Issuer.discover(oidcCfg.discoveryURL);
|
||||
} else {
|
||||
if (
|
||||
!oidcCfg.issuer ||
|
||||
!oidcCfg.authorizationEndpoint ||
|
||||
!oidcCfg.jwksUri ||
|
||||
!oidcCfg.tokenEndpoint ||
|
||||
!oidcCfg.userinfoEndpoint
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "OIDC not configured correctly"
|
||||
});
|
||||
}
|
||||
issuer = new OpenIdIssuer({
|
||||
issuer: oidcCfg.issuer,
|
||||
authorization_endpoint: oidcCfg.authorizationEndpoint,
|
||||
jwks_uri: oidcCfg.jwksUri,
|
||||
token_endpoint: oidcCfg.tokenEndpoint,
|
||||
userinfo_endpoint: oidcCfg.userinfoEndpoint
|
||||
});
|
||||
}
|
||||
|
||||
const client = new issuer.Client({
|
||||
client_id: oidcCfg.clientId,
|
||||
client_secret: oidcCfg.clientSecret,
|
||||
redirect_uris: [`${appCfg.SITE_URL}/api/v1/sso/oidc/callback`]
|
||||
});
|
||||
|
||||
const strategy = new OpenIdStrategy(
|
||||
{
|
||||
client,
|
||||
passReqToCallback: true
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(_req: any, tokenSet: TokenSet, cb: any) => {
|
||||
const claims = tokenSet.claims();
|
||||
if (!claims.email || !claims.given_name) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid request. Missing email or first name"
|
||||
});
|
||||
}
|
||||
|
||||
if (oidcCfg.allowedEmailDomains) {
|
||||
const allowedDomains = oidcCfg.allowedEmailDomains.split(", ");
|
||||
if (!allowedDomains.includes(claims.email.split("@")[1])) {
|
||||
throw new BadRequestError({
|
||||
message: "Email not allowed."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
oidcLogin({
|
||||
email: claims.email,
|
||||
externalId: claims.sub,
|
||||
firstName: claims.given_name ?? "",
|
||||
lastName: claims.family_name ?? "",
|
||||
orgId: org.id,
|
||||
callbackPort
|
||||
})
|
||||
.then(({ isUserCompleted, providerAuthToken }) => {
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
return strategy;
|
||||
};
|
||||
|
||||
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg };
|
||||
};
|
56
backend/src/ee/services/oidc/oidc-config-types.ts
Normal file
56
backend/src/ee/services/oidc/oidc-config-types.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { TGenericPermission } from "@app/lib/types";
|
||||
|
||||
export enum OIDCConfigurationType {
|
||||
CUSTOM = "custom",
|
||||
DISCOVERY_URL = "discoveryURL"
|
||||
}
|
||||
|
||||
export type TOidcLoginDTO = {
|
||||
externalId: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
orgId: string;
|
||||
callbackPort?: string;
|
||||
};
|
||||
|
||||
export type TGetOidcCfgDTO =
|
||||
| ({
|
||||
type: "external";
|
||||
orgSlug: string;
|
||||
} & TGenericPermission)
|
||||
| {
|
||||
type: "internal";
|
||||
orgSlug: string;
|
||||
};
|
||||
|
||||
export type TCreateOidcCfgDTO = {
|
||||
issuer?: string;
|
||||
authorizationEndpoint?: string;
|
||||
discoveryURL?: string;
|
||||
configurationType: OIDCConfigurationType;
|
||||
allowedEmailDomains?: string;
|
||||
jwksUri?: string;
|
||||
tokenEndpoint?: string;
|
||||
userinfoEndpoint?: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TUpdateOidcCfgDTO = Partial<{
|
||||
issuer: string;
|
||||
authorizationEndpoint: string;
|
||||
allowedEmailDomains: string;
|
||||
discoveryURL: string;
|
||||
jwksUri: string;
|
||||
configurationType: OIDCConfigurationType;
|
||||
tokenEndpoint: string;
|
||||
userinfoEndpoint: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
}> &
|
||||
TGenericPermission;
|
@ -116,7 +116,6 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.IncidentAccount);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.SecretScanning);
|
||||
|
@ -10,7 +10,8 @@ export type TPermissionDALFactory = ReturnType<typeof permissionDALFactory>;
|
||||
export const permissionDALFactory = (db: TDbClient) => {
|
||||
const getOrgPermission = async (userId: string, orgId: string) => {
|
||||
try {
|
||||
const membership = await db(TableName.OrgMembership)
|
||||
const membership = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.OrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("userId", userId)
|
||||
@ -28,7 +29,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getOrgIdentityPermission = async (identityId: string, orgId: string) => {
|
||||
try {
|
||||
const membership = await db(TableName.IdentityOrgMembership)
|
||||
const membership = await db
|
||||
.replicaNode()(TableName.IdentityOrgMembership)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("identityId", identityId)
|
||||
@ -45,11 +47,13 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const groups: string[] = await db(TableName.GroupProjectMembership)
|
||||
const groups: string[] = await db
|
||||
.replicaNode()(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.pluck(`${TableName.GroupProjectMembership}.groupId`);
|
||||
|
||||
const groupDocs = await db(TableName.UserGroupMembership)
|
||||
const groupDocs = await db
|
||||
.replicaNode()(TableName.UserGroupMembership)
|
||||
.where(`${TableName.UserGroupMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups)
|
||||
.join(
|
||||
@ -231,7 +235,8 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectIdentityPermission = async (identityId: string, projectId: string) => {
|
||||
try {
|
||||
const docs = await db(TableName.IdentityProjectMembership)
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.IdentityProjectMembership)
|
||||
.join(
|
||||
TableName.IdentityProjectMembershipRole,
|
||||
`${TableName.IdentityProjectMembershipRole}.projectMembershipId`,
|
||||
|
@ -26,7 +26,9 @@ export enum ProjectPermissionSub {
|
||||
SecretRollback = "secret-rollback",
|
||||
SecretApproval = "secret-approval",
|
||||
SecretRotation = "secret-rotation",
|
||||
Identity = "identity"
|
||||
Identity = "identity",
|
||||
CertificateAuthorities = "certificate-authorities",
|
||||
Certificates = "certificates"
|
||||
}
|
||||
|
||||
type SubjectFields = {
|
||||
@ -53,6 +55,8 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
|
||||
| [ProjectPermissionActions.Delete, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
|
||||
@ -139,6 +143,17 @@ const buildAdminPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.IpAllowList);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.IpAllowList);
|
||||
|
||||
// double check if all CRUD are needed for CA and Certificates
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.CertificateAuthorities);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
|
||||
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Project);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Project);
|
||||
|
||||
@ -205,6 +220,14 @@ const buildMemberPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
|
||||
|
||||
// double check if all CRUD are needed for CA and Certificates
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@ -229,6 +252,8 @@ const buildViewerPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
7
backend/src/ee/services/rate-limit/rate-limit-dal.ts
Normal file
7
backend/src/ee/services/rate-limit/rate-limit-dal.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TRateLimitDALFactory = ReturnType<typeof rateLimitDALFactory>;
|
||||
|
||||
export const rateLimitDALFactory = (db: TDbClient) => ormify(db, TableName.RateLimit, {});
|
106
backend/src/ee/services/rate-limit/rate-limit-service.ts
Normal file
106
backend/src/ee/services/rate-limit/rate-limit-service.ts
Normal file
@ -0,0 +1,106 @@
|
||||
import { CronJob } from "cron";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TRateLimitDALFactory } from "./rate-limit-dal";
|
||||
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
|
||||
|
||||
let rateLimitMaxConfiguration = {
|
||||
readLimit: 60,
|
||||
publicEndpointLimit: 30,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 60,
|
||||
authRateLimit: 60,
|
||||
inviteUserRateLimit: 30,
|
||||
mfaRateLimit: 20,
|
||||
creationLimit: 30
|
||||
};
|
||||
|
||||
Object.freeze(rateLimitMaxConfiguration);
|
||||
|
||||
export const getRateLimiterConfig = () => {
|
||||
return rateLimitMaxConfiguration;
|
||||
};
|
||||
|
||||
type TRateLimitServiceFactoryDep = {
|
||||
rateLimitDAL: TRateLimitDALFactory;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
|
||||
};
|
||||
|
||||
export type TRateLimitServiceFactory = ReturnType<typeof rateLimitServiceFactory>;
|
||||
|
||||
export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateLimitServiceFactoryDep) => {
|
||||
const DEFAULT_RATE_LIMIT_CONFIG_ID = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
const getRateLimits = async (): Promise<TRateLimit | undefined> => {
|
||||
let rateLimit: TRateLimit;
|
||||
|
||||
try {
|
||||
rateLimit = await rateLimitDAL.findOne({ id: DEFAULT_RATE_LIMIT_CONFIG_ID });
|
||||
if (!rateLimit) {
|
||||
// rate limit might not exist
|
||||
rateLimit = await rateLimitDAL.create({
|
||||
// @ts-expect-error id is kept as fixed because there should only be one rate limit config per instance
|
||||
id: DEFAULT_RATE_LIMIT_CONFIG_ID
|
||||
});
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise<TRateLimit> => {
|
||||
return rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates);
|
||||
};
|
||||
|
||||
const syncRateLimitConfiguration = async () => {
|
||||
try {
|
||||
const rateLimit = await getRateLimits();
|
||||
if (rateLimit) {
|
||||
const newRateLimitMaxConfiguration: typeof rateLimitMaxConfiguration = {
|
||||
readLimit: rateLimit.readRateLimit,
|
||||
publicEndpointLimit: rateLimit.publicEndpointLimit,
|
||||
writeLimit: rateLimit.writeRateLimit,
|
||||
secretsLimit: rateLimit.secretsRateLimit,
|
||||
authRateLimit: rateLimit.authRateLimit,
|
||||
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
|
||||
mfaRateLimit: rateLimit.mfaRateLimit,
|
||||
creationLimit: rateLimit.creationLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
}
|
||||
};
|
||||
|
||||
const initializeBackgroundSync = async () => {
|
||||
if (!licenseService.onPremFeatures.customRateLimits) {
|
||||
logger.info("Current license does not support custom rate limit configuration");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info("Setting up background sync process for rate limits");
|
||||
// initial sync upon startup
|
||||
await syncRateLimitConfiguration();
|
||||
|
||||
// sync rate limits configuration every 10 minutes
|
||||
const job = new CronJob("*/10 * * * *", syncRateLimitConfiguration);
|
||||
job.start();
|
||||
|
||||
return job;
|
||||
};
|
||||
|
||||
return {
|
||||
getRateLimits,
|
||||
updateRateLimit,
|
||||
initializeBackgroundSync,
|
||||
syncRateLimitConfiguration
|
||||
};
|
||||
};
|
16
backend/src/ee/services/rate-limit/rate-limit-types.ts
Normal file
16
backend/src/ee/services/rate-limit/rate-limit-types.ts
Normal file
@ -0,0 +1,16 @@
|
||||
export type TRateLimitUpdateDTO = {
|
||||
readRateLimit: number;
|
||||
writeRateLimit: number;
|
||||
secretsRateLimit: number;
|
||||
authRateLimit: number;
|
||||
inviteUserRateLimit: number;
|
||||
mfaRateLimit: number;
|
||||
creationLimit: number;
|
||||
publicEndpointLimit: number;
|
||||
};
|
||||
|
||||
export type TRateLimit = {
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
} & TRateLimitUpdateDTO;
|
@ -10,7 +10,8 @@ export const samlConfigDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findEnforceableSamlCfg = async (orgId: string) => {
|
||||
try {
|
||||
const samlCfg = await db(TableName.SamlConfig)
|
||||
const samlCfg = await db
|
||||
.replicaNode()(TableName.SamlConfig)
|
||||
.where({
|
||||
orgId,
|
||||
isActive: true
|
||||
|
@ -28,6 +28,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -41,7 +42,10 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f
|
||||
|
||||
type TSamlConfigServiceFactoryDep = {
|
||||
samlConfigDAL: Pick<TSamlConfigDALFactory, "create" | "findOne" | "update" | "findById">;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById" | "findById">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
@ -332,6 +336,13 @@ export const samlConfigServiceFactory = ({
|
||||
}: TSamlLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.SAML)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with SAML is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
@ -377,6 +388,21 @@ export const samlConfigServiceFactory = ({
|
||||
return foundUser;
|
||||
});
|
||||
} else {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
|
||||
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
@ -452,6 +478,7 @@ export const samlConfigServiceFactory = ({
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
@ -464,6 +491,7 @@ export const samlConfigServiceFactory = ({
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
|
@ -30,7 +30,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await sapFindQuery(tx || db, {
|
||||
const doc = await sapFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.SecretApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
@ -52,7 +52,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await sapFindQuery(tx || db, filter);
|
||||
const docs = await sapFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
|
@ -62,7 +62,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db);
|
||||
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
|
||||
const docs = await sql;
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
@ -102,7 +102,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
const docs = await (tx || db)
|
||||
.with(
|
||||
"temp",
|
||||
(tx || db)(TableName.SecretApprovalRequest)
|
||||
(tx || db.replicaNode())(TableName.SecretApprovalRequest)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
@ -148,7 +148,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
// akhilmhdh: If ever u wanted a 1 to so many relationship connected with pagination
|
||||
// this is the place u wanna look at.
|
||||
const query = (tx || db)(TableName.SecretApprovalRequest)
|
||||
const query = (tx || db.replicaNode())(TableName.SecretApprovalRequest)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
|
@ -47,7 +47,7 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByRequestId = async (requestId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)({
|
||||
const doc = await (tx || db.replicaNode())({
|
||||
secVerTag: TableName.SecretTag
|
||||
})
|
||||
.from(TableName.SecretApprovalRequestSecret)
|
||||
|
@ -41,7 +41,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TSecretRotations & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const data = await findQuery(filter, tx || db);
|
||||
const data = await findQuery(filter, tx || db.replicaNode());
|
||||
return sqlNestRelationships({
|
||||
data,
|
||||
key: "id",
|
||||
@ -93,7 +93,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.SecretRotation)
|
||||
const doc = await (tx || db.replicaNode())(TableName.SecretRotation)
|
||||
.join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`)
|
||||
.where({ [`${TableName.SecretRotation}.id` as "id"]: id })
|
||||
.select(selectAllTableCols(TableName.SecretRotation))
|
||||
|
@ -81,8 +81,7 @@ export const secretSnapshotServiceFactory = ({
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id);
|
||||
return count;
|
||||
return snapshotDAL.countOfSnapshotsByFolderId(folder.id);
|
||||
};
|
||||
|
||||
const listSnapshots = async ({
|
||||
|
@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
@ -11,6 +12,7 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
|
||||
|
||||
@ -19,7 +21,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const data = await (tx || db)(TableName.Snapshot)
|
||||
const data = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where(`${TableName.Snapshot}.id`, id)
|
||||
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.Snapshot))
|
||||
@ -41,7 +43,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
const countOfSnapshotsByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db)(TableName.Snapshot)
|
||||
const doc = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where({ folderId })
|
||||
.groupBy(["folderId"])
|
||||
.count("folderId")
|
||||
@ -54,7 +56,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findSecretSnapshotDataById = async (snapshotId: string, tx?: Knex) => {
|
||||
try {
|
||||
const data = await (tx || db)(TableName.Snapshot)
|
||||
const data = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where(`${TableName.Snapshot}.id`, snapshotId)
|
||||
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
|
||||
.leftJoin(TableName.SnapshotSecret, `${TableName.Snapshot}.id`, `${TableName.SnapshotSecret}.snapshotId`)
|
||||
@ -307,7 +309,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
// when we need to rollback we will pull from these snapshots
|
||||
const findLatestSnapshotByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db)(TableName.Snapshot)
|
||||
const docs = await (tx || db.replicaNode())(TableName.Snapshot)
|
||||
.where(`${TableName.Snapshot}.folderId`, folderId)
|
||||
.join<TSecretSnapshots>(
|
||||
(tx || db)(TableName.Snapshot).groupBy("folderId").max("createdAt").select("folderId").as("latestVersion"),
|
||||
@ -325,12 +327,151 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
|
||||
*
|
||||
* This function operates in three main steps:
|
||||
* 1. Pruning snapshots from current folders.
|
||||
* 2. Pruning snapshots from non-current folders (versioned ones).
|
||||
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
|
||||
*
|
||||
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
|
||||
* to manage the large datasets without overwhelming the DB.
|
||||
*
|
||||
* Steps:
|
||||
* - Fetch a batch of folder IDs.
|
||||
* - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date.
|
||||
* - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`).
|
||||
* - Repeat the process for versioned folders.
|
||||
* - Finally, delete orphaned snapshots that do not have an associated folder.
|
||||
*/
|
||||
const pruneExcessSnapshots = async () => {
|
||||
const PRUNE_FOLDER_BATCH_SIZE = 10000;
|
||||
|
||||
try {
|
||||
let uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// cleanup snapshots from current folders
|
||||
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
|
||||
while (true) {
|
||||
const folderBatch = await db(TableName.SecretFolder)
|
||||
.where("id", ">", uuidOffset)
|
||||
.where("isReserved", false)
|
||||
.orderBy("id", "asc")
|
||||
.limit(PRUNE_FOLDER_BATCH_SIZE)
|
||||
.select("id");
|
||||
|
||||
const batchEntries = folderBatch.map((folder) => folder.id);
|
||||
|
||||
if (folderBatch.length) {
|
||||
try {
|
||||
logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`);
|
||||
await db(TableName.Snapshot)
|
||||
.with("snapshot_cte", (qb) => {
|
||||
void qb
|
||||
.from(TableName.Snapshot)
|
||||
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
|
||||
.select(
|
||||
"folderId",
|
||||
`${TableName.Snapshot}.id as id`,
|
||||
db.raw(
|
||||
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
|
||||
)
|
||||
);
|
||||
})
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
|
||||
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from current folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
} finally {
|
||||
uuidOffset = batchEntries[batchEntries.length - 1];
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// cleanup snapshots from non-current folders
|
||||
uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const folderBatch = await db(TableName.SecretFolderVersion)
|
||||
.select("folderId")
|
||||
.distinct("folderId")
|
||||
.where("folderId", ">", uuidOffset)
|
||||
.orderBy("folderId", "asc")
|
||||
.limit(PRUNE_FOLDER_BATCH_SIZE);
|
||||
|
||||
const batchEntries = folderBatch.map((folder) => folder.folderId);
|
||||
|
||||
if (folderBatch.length) {
|
||||
try {
|
||||
logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`);
|
||||
await db(TableName.Snapshot)
|
||||
.with("snapshot_cte", (qb) => {
|
||||
void qb
|
||||
.from(TableName.Snapshot)
|
||||
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
|
||||
.select(
|
||||
"folderId",
|
||||
`${TableName.Snapshot}.id as id`,
|
||||
db.raw(
|
||||
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
|
||||
)
|
||||
);
|
||||
})
|
||||
.join(
|
||||
TableName.SecretFolderVersion,
|
||||
`${TableName.SecretFolderVersion}.folderId`,
|
||||
`${TableName.Snapshot}.folderId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
|
||||
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from non-current folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
} finally {
|
||||
uuidOffset = batchEntries[batchEntries.length - 1];
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// cleanup orphaned snapshots (those that don't belong to an existing folder and folder version)
|
||||
await db(TableName.Snapshot)
|
||||
.whereNotIn("folderId", (qb) => {
|
||||
void qb
|
||||
.select("folderId")
|
||||
.from(TableName.SecretFolderVersion)
|
||||
.union((qb1) => void qb1.select("id").from(TableName.SecretFolder));
|
||||
})
|
||||
.delete();
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "SnapshotPrune" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...secretSnapshotOrm,
|
||||
findById,
|
||||
findLatestSnapshotByFolderId,
|
||||
findRecursivelySnapshots,
|
||||
countOfSnapshotsByFolderId,
|
||||
findSecretSnapshotDataById
|
||||
findSecretSnapshotDataById,
|
||||
pruneExcessSnapshots
|
||||
};
|
||||
};
|
||||
|
@ -42,6 +42,13 @@ export const IDENTITIES = {
|
||||
},
|
||||
DELETE: {
|
||||
identityId: "The ID of the identity to delete."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
identityId: "The ID of the identity to get details.",
|
||||
orgId: "The ID of the org of the identity"
|
||||
},
|
||||
LIST: {
|
||||
orgId: "The ID of the organization to list identities."
|
||||
}
|
||||
} as const;
|
||||
|
||||
@ -65,6 +72,9 @@ export const UNIVERSAL_AUTH = {
|
||||
RETRIEVE: {
|
||||
identityId: "The ID of the identity to retrieve."
|
||||
},
|
||||
REVOKE: {
|
||||
identityId: "The ID of the identity to revoke."
|
||||
},
|
||||
UPDATE: {
|
||||
identityId: "The ID of the identity to update.",
|
||||
clientSecretTrustedIps: "The new list of IPs or CIDR ranges that the Client Secret can be used from.",
|
||||
@ -83,6 +93,10 @@ export const UNIVERSAL_AUTH = {
|
||||
LIST_CLIENT_SECRETS: {
|
||||
identityId: "The ID of the identity to list client secrets for."
|
||||
},
|
||||
GET_CLIENT_SECRET: {
|
||||
identityId: "The ID of the identity to get the client secret from.",
|
||||
clientSecretId: "The ID of the client secret to get details."
|
||||
},
|
||||
REVOKE_CLIENT_SECRET: {
|
||||
identityId: "The ID of the identity to revoke the client secret from.",
|
||||
clientSecretId: "The ID of the client secret to revoke."
|
||||
@ -104,6 +118,27 @@ export const AWS_AUTH = {
|
||||
iamRequestBody:
|
||||
"The base64-encoded body of the signed request. Most likely, the base64-encoding of Action=GetCallerIdentity&Version=2011-06-15.",
|
||||
iamRequestHeaders: "The base64-encoded headers of the sts:GetCallerIdentity signed request."
|
||||
},
|
||||
REVOKE: {
|
||||
identityId: "The ID of the identity to revoke."
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const AZURE_AUTH = {
|
||||
REVOKE: {
|
||||
identityId: "The ID of the identity to revoke."
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const GCP_AUTH = {
|
||||
REVOKE: {
|
||||
identityId: "The ID of the identity to revoke."
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const KUBERNETES_AUTH = {
|
||||
REVOKE: {
|
||||
identityId: "The ID of the identity to revoke."
|
||||
}
|
||||
} as const;
|
||||
|
||||
@ -343,9 +378,11 @@ export const RAW_SECRETS = {
|
||||
secretValue: "The value of the secret to create.",
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to create.",
|
||||
workspaceId: "The ID of the project to create the secret in."
|
||||
workspaceId: "The ID of the project to create the secret in.",
|
||||
tagIds: "The ID of the tags to be attached to the created secret."
|
||||
},
|
||||
GET: {
|
||||
expand: "Whether or not to expand secret references",
|
||||
secretName: "The name of the secret to get.",
|
||||
workspaceId: "The ID of the project to get the secret from.",
|
||||
workspaceSlug: "The slug of the project to get the secret from.",
|
||||
@ -364,7 +401,8 @@ export const RAW_SECRETS = {
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to update.",
|
||||
projectSlug: "The slug of the project to update the secret in.",
|
||||
workspaceId: "The ID of the project to update the secret in."
|
||||
workspaceId: "The ID of the project to update the secret in.",
|
||||
tagIds: "The ID of the tags to be attached to the updated secret."
|
||||
},
|
||||
DELETE: {
|
||||
secretName: "The name of the secret to delete.",
|
||||
@ -506,12 +544,27 @@ export const SECRET_TAGS = {
|
||||
LIST: {
|
||||
projectId: "The ID of the project to list tags from."
|
||||
},
|
||||
GET_TAG_BY_ID: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagId: "The ID of the tag to get details"
|
||||
},
|
||||
GET_TAG_BY_SLUG: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagSlug: "The slug of the tag to get details"
|
||||
},
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the tag in.",
|
||||
name: "The name of the tag to create.",
|
||||
slug: "The slug of the tag to create.",
|
||||
color: "The color of the tag to create."
|
||||
},
|
||||
UPDATE: {
|
||||
projectId: "The ID of the project to update the tag in.",
|
||||
tagId: "The ID of the tag to get details",
|
||||
name: "The name of the tag to update.",
|
||||
slug: "The slug of the tag to update.",
|
||||
color: "The color of the tag to update."
|
||||
},
|
||||
DELETE: {
|
||||
tagId: "The ID of the tag to delete.",
|
||||
projectId: "The ID of the project to delete the tag from."
|
||||
@ -639,6 +692,7 @@ export const INTEGRATION_AUTH = {
|
||||
integration: "The slug of integration for the auth object.",
|
||||
accessId: "The unique authorized access id of the external integration provider.",
|
||||
accessToken: "The unique authorized access token of the external integration provider.",
|
||||
awsAssumeIamRoleArn: "The AWS IAM Role to be assumed by Infisical",
|
||||
url: "",
|
||||
namespace: "",
|
||||
refreshToken: "The refresh token for integration authorization."
|
||||
@ -728,6 +782,104 @@ export const AUDIT_LOG_STREAMS = {
|
||||
}
|
||||
};
|
||||
|
||||
export const CERTIFICATE_AUTHORITIES = {
|
||||
CREATE: {
|
||||
projectSlug: "Slug of the project to create the CA in.",
|
||||
type: "The type of CA to create",
|
||||
friendlyName: "A friendly name for the CA",
|
||||
organization: "The organization (O) for the CA",
|
||||
ou: "The organization unit (OU) for the CA",
|
||||
country: "The country name (C) for the CA",
|
||||
province: "The state of province name for the CA",
|
||||
locality: "The locality name for the CA",
|
||||
commonName: "The common name (CN) for the CA",
|
||||
notBefore: "The date and time when the CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
maxPathLength:
|
||||
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
|
||||
keyAlgorithm:
|
||||
"The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA."
|
||||
},
|
||||
GET: {
|
||||
caId: "The ID of the CA to get"
|
||||
},
|
||||
UPDATE: {
|
||||
caId: "The ID of the CA to update",
|
||||
status: "The status of the CA to update to. This can be one of active or disabled"
|
||||
},
|
||||
DELETE: {
|
||||
caId: "The ID of the CA to delete"
|
||||
},
|
||||
GET_CSR: {
|
||||
caId: "The ID of the CA to generate CSR from",
|
||||
csr: "The generated CSR from the CA"
|
||||
},
|
||||
GET_CERT: {
|
||||
caId: "The ID of the CA to get the certificate body and certificate chain from",
|
||||
certificate: "The certificate body of the CA",
|
||||
certificateChain: "The certificate chain of the CA",
|
||||
serialNumber: "The serial number of the CA certificate"
|
||||
},
|
||||
SIGN_INTERMEDIATE: {
|
||||
caId: "The ID of the CA to sign the intermediate certificate with",
|
||||
csr: "The CSR to sign with the CA",
|
||||
notBefore: "The date and time when the intermediate CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the intermediate CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
maxPathLength:
|
||||
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
|
||||
certificate: "The signed intermediate certificate",
|
||||
certificateChain: "The certificate chain of the intermediate certificate",
|
||||
issuingCaCertificate: "The certificate of the issuing CA",
|
||||
serialNumber: "The serial number of the intermediate certificate"
|
||||
},
|
||||
IMPORT_CERT: {
|
||||
caId: "The ID of the CA to import the certificate for",
|
||||
certificate: "The certificate body to import",
|
||||
certificateChain: "The certificate chain to import"
|
||||
},
|
||||
ISSUE_CERT: {
|
||||
caId: "The ID of the CA to issue the certificate from",
|
||||
friendlyName: "A friendly name for the certificate",
|
||||
commonName: "The common name (CN) for the certificate",
|
||||
altNames:
|
||||
"A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.",
|
||||
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
|
||||
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
certificate: "The issued certificate",
|
||||
issuingCaCertificate: "The certificate of the issuing CA",
|
||||
certificateChain: "The certificate chain of the issued certificate",
|
||||
privateKey: "The private key of the issued certificate",
|
||||
serialNumber: "The serial number of the issued certificate"
|
||||
},
|
||||
GET_CRL: {
|
||||
caId: "The ID of the CA to get the certificate revocation list (CRL) for",
|
||||
crl: "The certificate revocation list (CRL) of the CA"
|
||||
}
|
||||
};
|
||||
|
||||
export const CERTIFICATES = {
|
||||
GET: {
|
||||
serialNumber: "The serial number of the certificate to get"
|
||||
},
|
||||
REVOKE: {
|
||||
serialNumber:
|
||||
"The serial number of the certificate to revoke. The revoked certificate will be added to the certificate revocation list (CRL) of the CA.",
|
||||
revocationReason: "The reason for revoking the certificate.",
|
||||
revokedAt: "The date and time when the certificate was revoked",
|
||||
serialNumberRes: "The serial number of the revoked certificate."
|
||||
},
|
||||
DELETE: {
|
||||
serialNumber: "The serial number of the certificate to delete"
|
||||
},
|
||||
GET_CERT: {
|
||||
serialNumber: "The serial number of the certificate to get the certificate body and certificate chain for",
|
||||
certificate: "The certificate body of the certificate",
|
||||
certificateChain: "The certificate chain of the certificate",
|
||||
serialNumberRes: "The serial number of the certificate"
|
||||
}
|
||||
};
|
||||
|
||||
export const PROJECT_ROLE = {
|
||||
CREATE: {
|
||||
projectSlug: "Slug of the project to create the role for.",
|
||||
|
@ -10,6 +10,14 @@ const zodStrBool = z
|
||||
.optional()
|
||||
.transform((val) => val === "true");
|
||||
|
||||
const databaseReadReplicaSchema = z
|
||||
.object({
|
||||
DB_CONNECTION_URI: z.string().describe("Postgres read replica database connection string"),
|
||||
DB_ROOT_CERT: zpStr(z.string().optional().describe("Postgres read replica database certificate string"))
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
PORT: z.coerce.number().default(4000),
|
||||
@ -29,7 +37,8 @@ const envSchema = z
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
|
||||
DB_READ_REPLICAS: zpStr(z.string().describe("Postgres read replicas").optional()),
|
||||
BCRYPT_SALT_ROUND: z.number().default(12),
|
||||
NODE_ENV: z.enum(["development", "test", "production"]).default("production"),
|
||||
SALT_ROUNDS: z.coerce.number().default(10),
|
||||
INITIAL_ORGANIZATION_NAME: zpStr(z.string().optional()),
|
||||
@ -101,6 +110,9 @@ const envSchema = z
|
||||
// azure
|
||||
CLIENT_ID_AZURE: zpStr(z.string().optional()),
|
||||
CLIENT_SECRET_AZURE: zpStr(z.string().optional()),
|
||||
// aws
|
||||
CLIENT_ID_AWS_INTEGRATION: zpStr(z.string().optional()),
|
||||
CLIENT_SECRET_AWS_INTEGRATION: zpStr(z.string().optional()),
|
||||
// gitlab
|
||||
CLIENT_ID_GITLAB: zpStr(z.string().optional()),
|
||||
CLIENT_SECRET_GITLAB: zpStr(z.string().optional()),
|
||||
@ -127,6 +139,9 @@ const envSchema = z
|
||||
})
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
DB_READ_REPLICAS: data.DB_READ_REPLICAS
|
||||
? databaseReadReplicaSchema.parse(JSON.parse(data.DB_READ_REPLICAS))
|
||||
: undefined,
|
||||
isCloud: Boolean(data.LICENSE_SERVER_KEY),
|
||||
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
||||
isRedisConfigured: Boolean(data.REDIS_URL),
|
||||
|
@ -6,7 +6,7 @@ import tweetnacl from "tweetnacl-util";
|
||||
|
||||
import { TUserEncryptionKeys } from "@app/db/schemas";
|
||||
|
||||
import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption";
|
||||
|
||||
export const generateSrpServerKey = async (salt: string, verifier: string) => {
|
||||
// eslint-disable-next-line new-cap
|
||||
@ -97,30 +97,55 @@ export const generateUserSrpKeys = async (email: string, password: string) => {
|
||||
};
|
||||
};
|
||||
|
||||
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
|
||||
const derivedKey = await argon2.hash(password, {
|
||||
salt: Buffer.from(user.salt),
|
||||
memoryCost: 65536,
|
||||
timeCost: 3,
|
||||
parallelism: 1,
|
||||
hashLength: 32,
|
||||
type: argon2.argon2id,
|
||||
raw: true
|
||||
});
|
||||
if (!derivedKey) throw new Error("Failed to derive key from password");
|
||||
const key = decryptSymmetric({
|
||||
ciphertext: user.protectedKey!,
|
||||
iv: user.protectedKeyIV!,
|
||||
tag: user.protectedKeyTag!,
|
||||
key: derivedKey.toString("base64")
|
||||
});
|
||||
const privateKey = decryptSymmetric({
|
||||
ciphertext: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag,
|
||||
key
|
||||
});
|
||||
return privateKey;
|
||||
export const getUserPrivateKey = async (
|
||||
password: string,
|
||||
user: Pick<
|
||||
TUserEncryptionKeys,
|
||||
| "protectedKeyTag"
|
||||
| "protectedKey"
|
||||
| "protectedKeyIV"
|
||||
| "encryptedPrivateKey"
|
||||
| "iv"
|
||||
| "salt"
|
||||
| "tag"
|
||||
| "encryptionVersion"
|
||||
>
|
||||
) => {
|
||||
if (user.encryptionVersion === 1) {
|
||||
return decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag,
|
||||
key: password.slice(0, 32).padStart(32 + (password.slice(0, 32).length - new Blob([password]).size), "0")
|
||||
});
|
||||
}
|
||||
if (user.encryptionVersion === 2 && user.protectedKey && user.protectedKeyIV && user.protectedKeyTag) {
|
||||
const derivedKey = await argon2.hash(password, {
|
||||
salt: Buffer.from(user.salt),
|
||||
memoryCost: 65536,
|
||||
timeCost: 3,
|
||||
parallelism: 1,
|
||||
hashLength: 32,
|
||||
type: argon2.argon2id,
|
||||
raw: true
|
||||
});
|
||||
if (!derivedKey) throw new Error("Failed to derive key from password");
|
||||
const key = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.protectedKey,
|
||||
iv: user.protectedKeyIV,
|
||||
tag: user.protectedKeyTag,
|
||||
key: derivedKey
|
||||
});
|
||||
|
||||
const privateKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag,
|
||||
key: Buffer.from(key, "hex")
|
||||
});
|
||||
return privateKey;
|
||||
}
|
||||
throw new Error(`GetUserPrivateKey: Encryption version not found`);
|
||||
};
|
||||
|
||||
export const buildUserProjectKey = async (privateKey: string, publickey: string) => {
|
||||
|
@ -59,6 +59,18 @@ export class BadRequestError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
export class NotFoundError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message ?? "The requested entity is not found");
|
||||
this.name = name || "NotFound";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
||||
export class DisableRotationErrors extends Error {
|
||||
name: string;
|
||||
|
||||
|
@ -50,7 +50,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
}),
|
||||
findById: async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const result = await (tx || db)(tableName)
|
||||
const result = await (tx || db.replicaNode())(tableName)
|
||||
.where({ id } as never)
|
||||
.first("*");
|
||||
return result;
|
||||
@ -60,7 +60,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
},
|
||||
findOne: async (filter: Partial<Tables[Tname]["base"]>, tx?: Knex) => {
|
||||
try {
|
||||
const res = await (tx || db)(tableName).where(filter).first("*");
|
||||
const res = await (tx || db.replicaNode())(tableName).where(filter).first("*");
|
||||
return res;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find one" });
|
||||
@ -71,7 +71,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
{ offset, limit, sort, tx }: TFindOpt<Tables[Tname]["base"]> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db)(tableName).where(buildFindFilter(filter));
|
||||
const query = (tx || db.replicaNode())(tableName).where(buildFindFilter(filter));
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
|
@ -58,7 +58,8 @@ const redactedKeys = [
|
||||
"decryptedSecret",
|
||||
"secrets",
|
||||
"key",
|
||||
"password"
|
||||
"password",
|
||||
"config"
|
||||
];
|
||||
|
||||
export const initLogger = async () => {
|
||||
|
@ -15,7 +15,11 @@ const run = async () => {
|
||||
const appCfg = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
readReplicas: appCfg.DB_READ_REPLICAS?.map((el) => ({
|
||||
dbRootCert: el.DB_ROOT_CERT,
|
||||
dbConnectionUri: el.DB_CONNECTION_URI
|
||||
}))
|
||||
});
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
|
@ -23,6 +23,7 @@ export enum QueueName {
|
||||
SecretPushEventScan = "secret-push-event-scan",
|
||||
UpgradeProjectToGhost = "upgrade-project-to-ghost",
|
||||
DynamicSecretRevocation = "dynamic-secret-revocation",
|
||||
CaCrlRotation = "ca-crl-rotation",
|
||||
SecretReplication = "secret-replication",
|
||||
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
|
||||
}
|
||||
@ -41,6 +42,7 @@ export enum QueueJobs {
|
||||
UpgradeProjectToGhost = "upgrade-project-to-ghost-job",
|
||||
DynamicSecretRevocation = "dynamic-secret-revocation",
|
||||
DynamicSecretPruning = "dynamic-secret-pruning",
|
||||
CaCrlRotation = "ca-crl-rotation-job",
|
||||
SecretReplication = "secret-replication",
|
||||
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
|
||||
}
|
||||
@ -55,7 +57,6 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
name: QueueJobs.SecretReminder;
|
||||
};
|
||||
|
||||
[QueueName.SecretRotation]: {
|
||||
payload: { rotationId: string };
|
||||
name: QueueJobs.SecretRotation;
|
||||
@ -121,6 +122,12 @@ export type TQueueJobTypes = {
|
||||
dynamicSecretCfgId: string;
|
||||
};
|
||||
};
|
||||
[QueueName.CaCrlRotation]: {
|
||||
name: QueueJobs.CaCrlRotation;
|
||||
payload: {
|
||||
caId: string;
|
||||
};
|
||||
};
|
||||
[QueueName.SecretReplication]: {
|
||||
name: QueueJobs.SecretReplication;
|
||||
payload: TSyncSecretsDTO;
|
||||
|
@ -71,6 +71,7 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
|
||||
if (appCfg.isProductionMode) {
|
||||
await server.register<FastifyRateLimitOptions>(ratelimiter, globalRateLimiterCfg());
|
||||
}
|
||||
|
||||
await server.register(helmet, { contentSecurityPolicy: false });
|
||||
|
||||
await server.register(maintenanceMode);
|
||||
|
@ -1,6 +1,7 @@
|
||||
import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-limit";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
|
||||
export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
|
||||
@ -21,14 +22,14 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
|
||||
// GET endpoints
|
||||
export const readLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 600,
|
||||
max: () => getRateLimiterConfig().readLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
// POST, PATCH, PUT, DELETE endpoints
|
||||
export const writeLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 200, // (too low, FA having issues so increasing it - maidul)
|
||||
max: () => getRateLimiterConfig().writeLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
@ -36,25 +37,25 @@ export const writeLimit: RateLimitOptions = {
|
||||
export const secretsLimit: RateLimitOptions = {
|
||||
// secrets, folders, secret imports
|
||||
timeWindow: 60 * 1000,
|
||||
max: 60,
|
||||
max: () => getRateLimiterConfig().secretsLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const authRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 60,
|
||||
max: () => getRateLimiterConfig().authRateLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const inviteUserRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 30,
|
||||
max: () => getRateLimiterConfig().inviteUserRateLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const mfaRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 20,
|
||||
max: () => getRateLimiterConfig().mfaRateLimit,
|
||||
keyGenerator: (req) => {
|
||||
return req.headers.authorization?.split(" ")[1] || req.realIp;
|
||||
}
|
||||
@ -63,14 +64,21 @@ export const mfaRateLimit: RateLimitOptions = {
|
||||
export const creationLimit: RateLimitOptions = {
|
||||
// identity, project, org
|
||||
timeWindow: 60 * 1000,
|
||||
max: 30,
|
||||
max: () => getRateLimiterConfig().creationLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
// Public endpoints to avoid brute force attacks
|
||||
export const publicEndpointLimit: RateLimitOptions = {
|
||||
// Shared Secrets
|
||||
// Read Shared Secrets
|
||||
timeWindow: 60 * 1000,
|
||||
max: 30,
|
||||
max: () => getRateLimiterConfig().publicEndpointLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const publicSecretShareCreationLimit: RateLimitOptions = {
|
||||
// Create Shared Secrets
|
||||
timeWindow: 60 * 1000,
|
||||
max: 5,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
@ -6,6 +6,7 @@ import {
|
||||
BadRequestError,
|
||||
DatabaseError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
} from "@app/lib/errors";
|
||||
@ -15,6 +16,8 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
req.log.error(error);
|
||||
if (error instanceof BadRequestError) {
|
||||
void res.status(400).send({ statusCode: 400, message: error.message, error: error.name });
|
||||
} else if (error instanceof NotFoundError) {
|
||||
void res.status(404).send({ statusCode: 404, message: error.message, error: error.name });
|
||||
} else if (error instanceof UnauthorizedError) {
|
||||
void res.status(403).send({ statusCode: 403, message: error.message, error: error.name });
|
||||
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user