mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-02 16:55:02 +00:00
Compare commits
248 Commits
daniel/upd
...
databricks
Author | SHA1 | Date | |
---|---|---|---|
30fb60b441 | |||
e531390922 | |||
e88ce49463 | |||
9214c93ece | |||
7a3bfa9e4c | |||
7aa0e8572c | |||
296efa975c | |||
b3e72c338f | |||
8c4c969bc2 | |||
0d424f332a | |||
f0b6382f92 | |||
72780c61b4 | |||
c4da0305ba | |||
4fdfdc1a39 | |||
d2cf296250 | |||
30284e3458 | |||
6f90d3befd | |||
f44888afa2 | |||
9877b0e5c4 | |||
b1e35d4b27 | |||
25f6947de5 | |||
ff8f1d3bfb | |||
b1b4cb1823 | |||
78f9ae7fab | |||
20bdff0094 | |||
ccf19fbcd4 | |||
41c526371d | |||
4685132409 | |||
3380d3e828 | |||
74d01c37de | |||
4de8888843 | |||
b644829bb9 | |||
6845ac0f5e | |||
4e48ab1eeb | |||
a6671b4355 | |||
6b3b13e40a | |||
3ec14ca33a | |||
732484d332 | |||
2f0b353c4e | |||
ddc819dda1 | |||
1b15cb4c35 | |||
f4e19f8a2e | |||
501022752b | |||
46821ca2ee | |||
9602b864d4 | |||
8204e970a8 | |||
6671c42d0f | |||
b9dee1e6e8 | |||
648fde8f37 | |||
9eed67c21b | |||
1e4164e1c2 | |||
cbbafcfa42 | |||
cc28ebd387 | |||
5f6870fda8 | |||
3e5a58eec4 | |||
4c7ae3475a | |||
49797c3c13 | |||
7d9c5657aa | |||
eda4abb610 | |||
e341bbae9d | |||
7286f9a9e6 | |||
1c9a9283ae | |||
8d52011173 | |||
1b5b937db5 | |||
7b8b024654 | |||
a67badf660 | |||
ba42ea736b | |||
6c7289ebe6 | |||
5cd6a66989 | |||
4e41e84491 | |||
85d71b1085 | |||
f27d483be0 | |||
9ee9d1c0e7 | |||
9d66659f72 | |||
70c9761abe | |||
6047c4489b | |||
c9d7559983 | |||
66251403bf | |||
b9c4407507 | |||
d9a0cf8dd5 | |||
624be80768 | |||
8d7b5968d3 | |||
b7d4bb0ce2 | |||
598dea0dd3 | |||
7154b19703 | |||
9ce465b3e2 | |||
598e5c0be5 | |||
72f08a6b89 | |||
55d8762351 | |||
3c92ec4dc3 | |||
f2224262a4 | |||
23eac40740 | |||
4ae88c0447 | |||
7aecaad050 | |||
cf61390e52 | |||
3f02481e78 | |||
7adc103ed2 | |||
5bdbf37171 | |||
4f874734ab | |||
eb6fd8259b | |||
1766a44dd0 | |||
624c9ef8da | |||
dfd4b13574 | |||
22b57b7a74 | |||
1ba0b9c204 | |||
a903537441 | |||
92c4d83714 | |||
a6414104ad | |||
071f37666e | |||
cd5078d8b7 | |||
110d0e95b0 | |||
a8c0bbb7ca | |||
6af8a4fab8 | |||
407fd8eda7 | |||
9d976de19b | |||
43ecd31b74 | |||
be99e40050 | |||
800d2c0454 | |||
6d0534b165 | |||
ccee0f5428 | |||
14586c7cd0 | |||
7090eea716 | |||
01d3443139 | |||
c4b23a8d4f | |||
90a2a11fff | |||
95d7c2082c | |||
ab5eb4c696 | |||
65aeb81934 | |||
a406511405 | |||
61da0db49e | |||
0968893d4b | |||
59666740ca | |||
9cc7edc869 | |||
e1b016f76d | |||
1175b9b5af | |||
09521144ec | |||
8759944077 | |||
aac3c355e9 | |||
2a28a462a5 | |||
3328e0850f | |||
216cae9b33 | |||
d24a5d96e3 | |||
89d4d4bc92 | |||
cffcb28bc9 | |||
61388753cf | |||
a6145120e6 | |||
dacffbef08 | |||
4db3e5d208 | |||
2a84d61862 | |||
a5945204ad | |||
55b0dc7f81 | |||
ba03fc256b | |||
ea28c374a7 | |||
e99eb47cf4 | |||
cf107c0c0d | |||
9fcb1c2161 | |||
70515a1ca2 | |||
955cf9303a | |||
a24ef46d7d | |||
ee49f714b9 | |||
657aca516f | |||
b5d60398d6 | |||
c3d515bb95 | |||
7f89a7c860 | |||
23cb05c16d | |||
d74b819f57 | |||
457056b600 | |||
7dc9ea4f6a | |||
3b4b520d42 | |||
23f605bda7 | |||
1c3c8dbdce | |||
317c95384e | |||
7dd959e124 | |||
2049e5668f | |||
0a3e99b334 | |||
c4ad0aa163 | |||
5bb0b7a508 | |||
96bcd42753 | |||
2c75e23acf | |||
907dd4880a | |||
6af7c5c371 | |||
72468d5428 | |||
939ee892e0 | |||
c7ec9ff816 | |||
554e268f88 | |||
a8a27c3045 | |||
27af943ee1 | |||
9b772ad55a | |||
94a1fc2809 | |||
10c10642a1 | |||
3e0f04273c | |||
91f2d0384e | |||
811dc8dd75 | |||
4ee9375a8d | |||
92f697e195 | |||
8062f0238b | |||
1181c684db | |||
dda436bcd9 | |||
89124b18d2 | |||
effd88c4bd | |||
27efc908e2 | |||
8e4226038b | |||
27425a1a64 | |||
18cf3c89c1 | |||
49e6d7a861 | |||
c4446389b0 | |||
7c21dec54d | |||
2ea5710896 | |||
f9ac7442df | |||
a534a4975c | |||
79a616dc1c | |||
a93bfa69c9 | |||
598d14fc54 | |||
08a0550cd7 | |||
d7503573b1 | |||
b5a89edeed | |||
860eaae4c8 | |||
c7a4b6c4e9 | |||
c12c6dcc6e | |||
99c9b644df | |||
d0d5556bd0 | |||
753c28a2d3 | |||
8741414cfa | |||
b8d29793ec | |||
92013dbfbc | |||
c5319588fe | |||
9efb8eaf78 | |||
dfc973c7f7 | |||
3013d1977c | |||
f358e8942d | |||
58f51411c0 | |||
c3970d1ea2 | |||
2dc00a638a | |||
94aed485a5 | |||
e382941424 | |||
bab9c1f454 | |||
2bd4770fb4 | |||
31905fab6e | |||
784acf16d0 | |||
114b89c952 | |||
81420198cb | |||
0ff18e277f | |||
e093f70301 | |||
8e2ff18f35 | |||
3fbfecf7a9 | |||
9087def21c | |||
586dbd79b0 | |||
645dfafba0 |
18
.env.example
18
.env.example
@ -26,7 +26,8 @@ SITE_URL=http://localhost:8080
|
|||||||
# Mail/SMTP
|
# Mail/SMTP
|
||||||
SMTP_HOST=
|
SMTP_HOST=
|
||||||
SMTP_PORT=
|
SMTP_PORT=
|
||||||
SMTP_NAME=
|
SMTP_FROM_ADDRESS=
|
||||||
|
SMTP_FROM_NAME=
|
||||||
SMTP_USERNAME=
|
SMTP_USERNAME=
|
||||||
SMTP_PASSWORD=
|
SMTP_PASSWORD=
|
||||||
|
|
||||||
@ -91,17 +92,24 @@ ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
|||||||
|
|
||||||
# App Connections
|
# App Connections
|
||||||
|
|
||||||
# aws assume-role
|
# aws assume-role connection
|
||||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||||
|
|
||||||
# github oauth
|
# github oauth connection
|
||||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||||
|
|
||||||
#github app
|
#github app connection
|
||||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||||
|
|
||||||
|
#gcp app connection
|
||||||
|
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||||
|
|
||||||
|
# azure app connection
|
||||||
|
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||||
|
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
262
.github/workflows/deployment-pipeline.yml
vendored
262
.github/workflows/deployment-pipeline.yml
vendored
@ -1,262 +0,0 @@
|
|||||||
name: Deployment pipeline
|
|
||||||
on: [workflow_dispatch]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
id-token: write
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: "infisical-core-deployment"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
infisical-tests:
|
|
||||||
name: Integration tests
|
|
||||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
|
||||||
uses: ./.github/workflows/run-backend-tests.yml
|
|
||||||
|
|
||||||
infisical-image:
|
|
||||||
name: Build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [infisical-tests]
|
|
||||||
steps:
|
|
||||||
- name: ☁️ Checkout source
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: 📦 Install dependencies to test all dependencies
|
|
||||||
run: npm ci --only-production
|
|
||||||
working-directory: backend
|
|
||||||
- name: Save commit hashes for tag
|
|
||||||
id: commit
|
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
|
||||||
- name: 🔧 Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- name: 🐋 Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Set up Depot CLI
|
|
||||||
uses: depot/setup-action@v1
|
|
||||||
- name: 🏗️ Build backend and push to docker hub
|
|
||||||
uses: depot/build-push-action@v1
|
|
||||||
with:
|
|
||||||
project: 64mmf0n610
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
push: true
|
|
||||||
context: .
|
|
||||||
file: Dockerfile.standalone-infisical
|
|
||||||
tags: |
|
|
||||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
|
||||||
infisical/staging_infisical:latest
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
build-args: |
|
|
||||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
|
||||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
|
||||||
|
|
||||||
gamma-deployment:
|
|
||||||
name: Deploy to gamma
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [infisical-image]
|
|
||||||
environment:
|
|
||||||
name: Gamma
|
|
||||||
steps:
|
|
||||||
- uses: twingate/github-action@v1
|
|
||||||
with:
|
|
||||||
# The Twingate Service Key used to connect Twingate to the proper service
|
|
||||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
|
||||||
#
|
|
||||||
# Required
|
|
||||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Setup Node.js environment
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: "20"
|
|
||||||
- name: Change directory to backend and install dependencies
|
|
||||||
env:
|
|
||||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
|
||||||
run: |
|
|
||||||
cd backend
|
|
||||||
npm install
|
|
||||||
npm run migration:latest
|
|
||||||
- name: Configure AWS Credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
audience: sts.amazonaws.com
|
|
||||||
aws-region: us-east-1
|
|
||||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
|
||||||
- name: Save commit hashes for tag
|
|
||||||
id: commit
|
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
|
||||||
- name: Download task definition
|
|
||||||
run: |
|
|
||||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
|
||||||
- name: Render Amazon ECS task definition
|
|
||||||
id: render-web-container
|
|
||||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
|
||||||
with:
|
|
||||||
task-definition: task-definition.json
|
|
||||||
container-name: infisical-core
|
|
||||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
|
||||||
environment-variables: "LOG_LEVEL=info"
|
|
||||||
- name: Deploy to Amazon ECS service
|
|
||||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
|
||||||
with:
|
|
||||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
|
||||||
service: infisical-core-gamma-stage
|
|
||||||
cluster: infisical-gamma-stage
|
|
||||||
wait-for-service-stability: true
|
|
||||||
|
|
||||||
production-us:
|
|
||||||
name: US production deploy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [gamma-deployment]
|
|
||||||
environment:
|
|
||||||
name: Production
|
|
||||||
steps:
|
|
||||||
- uses: twingate/github-action@v1
|
|
||||||
with:
|
|
||||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Setup Node.js environment
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: "20"
|
|
||||||
- name: Change directory to backend and install dependencies
|
|
||||||
env:
|
|
||||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
|
||||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
|
||||||
run: |
|
|
||||||
cd backend
|
|
||||||
npm install
|
|
||||||
npm run migration:latest
|
|
||||||
- name: Configure AWS Credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
audience: sts.amazonaws.com
|
|
||||||
aws-region: us-east-1
|
|
||||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
|
||||||
- name: Save commit hashes for tag
|
|
||||||
id: commit
|
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
|
||||||
- name: Download task definition
|
|
||||||
run: |
|
|
||||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
|
||||||
- name: Render Amazon ECS task definition
|
|
||||||
id: render-web-container
|
|
||||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
|
||||||
with:
|
|
||||||
task-definition: task-definition.json
|
|
||||||
container-name: infisical-core-platform
|
|
||||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
|
||||||
environment-variables: "LOG_LEVEL=info"
|
|
||||||
- name: Deploy to Amazon ECS service
|
|
||||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
|
||||||
with:
|
|
||||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
|
||||||
service: infisical-core-platform
|
|
||||||
cluster: infisical-core-platform
|
|
||||||
wait-for-service-stability: true
|
|
||||||
- name: Post slack message
|
|
||||||
uses: slackapi/slack-github-action@v2.0.0
|
|
||||||
with:
|
|
||||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
|
||||||
webhook-type: incoming-webhook
|
|
||||||
payload: |
|
|
||||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
|
||||||
blocks:
|
|
||||||
- type: "section"
|
|
||||||
text:
|
|
||||||
type: "mrkdwn"
|
|
||||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
|
||||||
- type: "section"
|
|
||||||
fields:
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Application:*\nInfisical Core"
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
|
||||||
- type: "section"
|
|
||||||
fields:
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Region:*\nUS"
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
|
||||||
|
|
||||||
|
|
||||||
production-eu:
|
|
||||||
name: EU production deploy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [production-us]
|
|
||||||
environment:
|
|
||||||
name: production-eu
|
|
||||||
steps:
|
|
||||||
- uses: twingate/github-action@v1
|
|
||||||
with:
|
|
||||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
|
||||||
- name: Configure AWS Credentials
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
audience: sts.amazonaws.com
|
|
||||||
aws-region: eu-central-1
|
|
||||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Setup Node.js environment
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: "20"
|
|
||||||
- name: Change directory to backend and install dependencies
|
|
||||||
env:
|
|
||||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
|
||||||
run: |
|
|
||||||
cd backend
|
|
||||||
npm install
|
|
||||||
npm run migration:latest
|
|
||||||
- name: Save commit hashes for tag
|
|
||||||
id: commit
|
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
|
||||||
- name: Download task definition
|
|
||||||
run: |
|
|
||||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
|
||||||
- name: Render Amazon ECS task definition
|
|
||||||
id: render-web-container
|
|
||||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
|
||||||
with:
|
|
||||||
task-definition: task-definition.json
|
|
||||||
container-name: infisical-core-platform
|
|
||||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
|
||||||
environment-variables: "LOG_LEVEL=info"
|
|
||||||
- name: Deploy to Amazon ECS service
|
|
||||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
|
||||||
with:
|
|
||||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
|
||||||
service: infisical-core-platform
|
|
||||||
cluster: infisical-core-platform
|
|
||||||
wait-for-service-stability: true
|
|
||||||
- name: Post slack message
|
|
||||||
uses: slackapi/slack-github-action@v2.0.0
|
|
||||||
with:
|
|
||||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
|
||||||
webhook-type: incoming-webhook
|
|
||||||
payload: |
|
|
||||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
|
||||||
blocks:
|
|
||||||
- type: "section"
|
|
||||||
text:
|
|
||||||
type: "mrkdwn"
|
|
||||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
|
||||||
- type: "section"
|
|
||||||
fields:
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Application:*\nInfisical Core"
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
|
||||||
- type: "section"
|
|
||||||
fields:
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Region:*\nEU"
|
|
||||||
- type: "mrkdwn"
|
|
||||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
|||||||
name: Release Helm Charts
|
name: Release Infisical Core Helm chart
|
||||||
|
|
||||||
on: [workflow_dispatch]
|
on: [workflow_dispatch]
|
||||||
|
|
||||||
@ -17,6 +17,6 @@ jobs:
|
|||||||
- name: Install Cloudsmith CLI
|
- name: Install Cloudsmith CLI
|
||||||
run: pip install --upgrade cloudsmith-cli
|
run: pip install --upgrade cloudsmith-cli
|
||||||
- name: Build and push helm package to Cloudsmith
|
- name: Build and push helm package to Cloudsmith
|
||||||
run: cd helm-charts && sh upload-to-cloudsmith.sh
|
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
||||||
env:
|
env:
|
||||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,4 +1,4 @@
|
|||||||
name: Release Docker image for K8 operator
|
name: Release image + Helm chart K8s Operator
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
@ -35,3 +35,18 @@ jobs:
|
|||||||
tags: |
|
tags: |
|
||||||
infisical/kubernetes-operator:latest
|
infisical/kubernetes-operator:latest
|
||||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Install Helm
|
||||||
|
uses: azure/setup-helm@v3
|
||||||
|
with:
|
||||||
|
version: v3.10.0
|
||||||
|
- name: Install python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
- name: Install Cloudsmith CLI
|
||||||
|
run: pip install --upgrade cloudsmith-cli
|
||||||
|
- name: Build and push helm package to Cloudsmith
|
||||||
|
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||||
|
env:
|
||||||
|
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||||
|
3
Makefile
3
Makefile
@ -30,3 +30,6 @@ reviewable-api:
|
|||||||
npm run type:check
|
npm run type:check
|
||||||
|
|
||||||
reviewable: reviewable-ui reviewable-api
|
reviewable: reviewable-ui reviewable-api
|
||||||
|
|
||||||
|
up-dev-sso:
|
||||||
|
docker compose -f docker-compose.dev.yml --profile sso up --build
|
||||||
|
@ -125,7 +125,7 @@ Install pre commit hook to scan each commit before you push to your repository
|
|||||||
infisical scan install --pre-commit-hook
|
infisical scan install --pre-commit-hook
|
||||||
```
|
```
|
||||||
|
|
||||||
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||||
|
|
||||||
## Open-source vs. paid
|
## Open-source vs. paid
|
||||||
|
|
||||||
|
@ -23,14 +23,14 @@ export default {
|
|||||||
name: "knex-env",
|
name: "knex-env",
|
||||||
transformMode: "ssr",
|
transformMode: "ssr",
|
||||||
async setup() {
|
async setup() {
|
||||||
const logger = await initLogger();
|
const logger = initLogger();
|
||||||
const cfg = initEnvConfig(logger);
|
const envConfig = initEnvConfig(logger);
|
||||||
const db = initDbConnection({
|
const db = initDbConnection({
|
||||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||||
dbRootCert: cfg.DB_ROOT_CERT
|
dbRootCert: envConfig.DB_ROOT_CERT
|
||||||
});
|
});
|
||||||
|
|
||||||
const redis = new Redis(cfg.REDIS_URL);
|
const redis = new Redis(envConfig.REDIS_URL);
|
||||||
await redis.flushdb("SYNC");
|
await redis.flushdb("SYNC");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -42,6 +42,7 @@ export default {
|
|||||||
},
|
},
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
|
|
||||||
await db.migrate.latest({
|
await db.migrate.latest({
|
||||||
directory: path.join(__dirname, "../src/db/migrations"),
|
directory: path.join(__dirname, "../src/db/migrations"),
|
||||||
extension: "ts",
|
extension: "ts",
|
||||||
@ -52,14 +53,24 @@ export default {
|
|||||||
directory: path.join(__dirname, "../src/db/seeds"),
|
directory: path.join(__dirname, "../src/db/seeds"),
|
||||||
extension: "ts"
|
extension: "ts"
|
||||||
});
|
});
|
||||||
const smtp = mockSmtpServer();
|
|
||||||
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
|
||||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
|
||||||
|
|
||||||
const hsmModule = initializeHsmModule();
|
const smtp = mockSmtpServer();
|
||||||
|
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||||
|
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||||
|
|
||||||
|
const hsmModule = initializeHsmModule(envConfig);
|
||||||
hsmModule.initialize();
|
hsmModule.initialize();
|
||||||
|
|
||||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
const server = await main({
|
||||||
|
db,
|
||||||
|
smtp,
|
||||||
|
logger,
|
||||||
|
queue,
|
||||||
|
keyStore,
|
||||||
|
hsmModule: hsmModule.getModule(),
|
||||||
|
redis,
|
||||||
|
envConfig
|
||||||
|
});
|
||||||
|
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
globalThis.testServer = server;
|
globalThis.testServer = server;
|
||||||
@ -73,8 +84,8 @@ export default {
|
|||||||
organizationId: seedData1.organization.id,
|
organizationId: seedData1.organization.id,
|
||||||
accessVersion: 1
|
accessVersion: 1
|
||||||
},
|
},
|
||||||
cfg.AUTH_SECRET,
|
envConfig.AUTH_SECRET,
|
||||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
@ -109,3 +120,4 @@ export default {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -45,24 +45,24 @@
|
|||||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
||||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
||||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
||||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
||||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
||||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
||||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
||||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
||||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
||||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
||||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
||||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
|
6
backend/src/@types/fastify.d.ts
vendored
6
backend/src/@types/fastify.d.ts
vendored
@ -93,6 +93,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
|
|||||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||||
|
|
||||||
|
declare module "@fastify/request-context" {
|
||||||
|
interface RequestContextData {
|
||||||
|
reqId: string;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
declare module "fastify" {
|
declare module "fastify" {
|
||||||
interface Session {
|
interface Session {
|
||||||
callbackPort: string;
|
callbackPort: string;
|
||||||
|
105
backend/src/auto-start-migrations.ts
Normal file
105
backend/src/auto-start-migrations.ts
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
import path from "node:path";
|
||||||
|
|
||||||
|
import dotenv from "dotenv";
|
||||||
|
import { Knex } from "knex";
|
||||||
|
import { Logger } from "pino";
|
||||||
|
|
||||||
|
import { PgSqlLock } from "./keystore/keystore";
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
type TArgs = {
|
||||||
|
auditLogDb?: Knex;
|
||||||
|
applicationDb: Knex;
|
||||||
|
logger: Logger;
|
||||||
|
};
|
||||||
|
|
||||||
|
const isProduction = process.env.NODE_ENV === "production";
|
||||||
|
const migrationConfig = {
|
||||||
|
directory: path.join(__dirname, "./db/migrations"),
|
||||||
|
loadExtensions: [".mjs", ".ts"],
|
||||||
|
tableName: "infisical_migrations"
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrationStatusCheckErrorHandler = (err: Error) => {
|
||||||
|
// happens for first time in which the migration table itself is not created yet
|
||||||
|
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
||||||
|
if (err?.message?.includes("does not exist")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
||||||
|
try {
|
||||||
|
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
||||||
|
if (isProduction) {
|
||||||
|
const migrationTable = migrationConfig.tableName;
|
||||||
|
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||||
|
if (hasMigrationTable) {
|
||||||
|
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||||
|
if (firstFile?.name?.includes(".ts")) {
|
||||||
|
await applicationDb(migrationTable).update({
|
||||||
|
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (auditLogDb) {
|
||||||
|
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||||
|
if (hasMigrationTableInAuditLog) {
|
||||||
|
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||||
|
if (firstFile?.name?.includes(".ts")) {
|
||||||
|
await auditLogDb(migrationTable).update({
|
||||||
|
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const shouldRunMigration = Boolean(
|
||||||
|
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
||||||
|
); // db.length - code.length
|
||||||
|
if (!shouldRunMigration) {
|
||||||
|
logger.info("No migrations pending: Skipping migration process.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (auditLogDb) {
|
||||||
|
await auditLogDb.transaction(async (tx) => {
|
||||||
|
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||||
|
logger.info("Running audit log migrations.");
|
||||||
|
|
||||||
|
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
||||||
|
.status(migrationConfig)
|
||||||
|
.catch(migrationStatusCheckErrorHandler));
|
||||||
|
if (didPreviousInstanceRunMigration) {
|
||||||
|
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await auditLogDb.migrate.latest(migrationConfig);
|
||||||
|
logger.info("Finished audit log migrations.");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await applicationDb.transaction(async (tx) => {
|
||||||
|
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||||
|
logger.info("Running application migrations.");
|
||||||
|
|
||||||
|
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
||||||
|
.status(migrationConfig)
|
||||||
|
.catch(migrationStatusCheckErrorHandler));
|
||||||
|
if (didPreviousInstanceRunMigration) {
|
||||||
|
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await applicationDb.migrate.latest(migrationConfig);
|
||||||
|
logger.info("Finished application migrations.");
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(err, "Boot up migration failed");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
@ -49,6 +49,9 @@ export const initDbConnection = ({
|
|||||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
|
},
|
||||||
|
migrations: {
|
||||||
|
tableName: "infisical_migrations"
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -64,6 +67,9 @@ export const initDbConnection = ({
|
|||||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
|
},
|
||||||
|
migrations: {
|
||||||
|
tableName: "infisical_migrations"
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -98,6 +104,9 @@ export const initAuditLogDbConnection = ({
|
|||||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
|
},
|
||||||
|
migrations: {
|
||||||
|
tableName: "infisical_migrations"
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -38,7 +38,8 @@ export default {
|
|||||||
directory: "./seeds"
|
directory: "./seeds"
|
||||||
},
|
},
|
||||||
migrations: {
|
migrations: {
|
||||||
tableName: "infisical_migrations"
|
tableName: "infisical_migrations",
|
||||||
|
loadExtensions: [".mjs"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
production: {
|
production: {
|
||||||
@ -62,7 +63,8 @@ export default {
|
|||||||
max: 10
|
max: 10
|
||||||
},
|
},
|
||||||
migrations: {
|
migrations: {
|
||||||
tableName: "infisical_migrations"
|
tableName: "infisical_migrations",
|
||||||
|
loadExtensions: [".mjs"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} as Knex.Config;
|
} as Knex.Config;
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
||||||
|
if (!hasManageGroupMembershipsCol) {
|
||||||
|
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
if (hasManageGroupMembershipsCol) {
|
||||||
|
t.dropColumn("manageGroupMemberships");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||||
|
t.unique(["orgId", "name"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||||
|
t.unique(["projectId", "name"]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||||
|
t.dropUnique(["orgId", "name"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||||
|
t.dropUnique(["projectId", "name"]);
|
||||||
|
});
|
||||||
|
}
|
@ -0,0 +1,37 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
||||||
|
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
||||||
|
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityGcpAuth,
|
||||||
|
"allowedServiceAccounts"
|
||||||
|
);
|
||||||
|
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
||||||
|
if (hasTable) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||||
|
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
|
||||||
|
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
|
||||||
|
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
||||||
|
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
||||||
|
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityGcpAuth,
|
||||||
|
"allowedServiceAccounts"
|
||||||
|
);
|
||||||
|
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
||||||
|
if (hasTable) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||||
|
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
|
||||||
|
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
|
||||||
|
if (hasAllowedZones) t.string("allowedZones").alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,27 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||||
|
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||||
|
|
||||||
|
if (hasSlugCol) {
|
||||||
|
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||||
|
t.dropColumn("slug");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||||
|
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||||
|
|
||||||
|
if (!hasSlugCol) {
|
||||||
|
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||||
|
t.string("slug", 32);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,31 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||||
|
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||||
|
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||||
|
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||||
|
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
|
||||||
|
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
|
||||||
|
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||||
|
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||||
|
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||||
|
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||||
|
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
|
||||||
|
if (hasLastImportMessage) t.string("lastImportMessage").alter();
|
||||||
|
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||||
|
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||||
|
import { initLogger } from "@app/lib/logger";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||||
|
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||||
|
import { createCircularCache } from "./utils/ring-buffer";
|
||||||
|
import { getMigrationEncryptionServices } from "./utils/services";
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||||
|
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||||
|
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
||||||
|
|
||||||
|
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||||
|
if (hasWebhookTable) {
|
||||||
|
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||||
|
if (!hasEncryptedKey) t.binary("encryptedPassKey");
|
||||||
|
if (!hasEncryptedUrl) t.binary("encryptedUrl");
|
||||||
|
if (hasUrl) t.string("url").nullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const projectEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
const webhooks = await knex(TableName.Webhook)
|
||||||
|
.where({})
|
||||||
|
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
||||||
|
.select(
|
||||||
|
"url",
|
||||||
|
"encryptedSecretKey",
|
||||||
|
"iv",
|
||||||
|
"tag",
|
||||||
|
"keyEncoding",
|
||||||
|
"urlCipherText",
|
||||||
|
"urlIV",
|
||||||
|
"urlTag",
|
||||||
|
knex.ref("id").withSchema(TableName.Webhook),
|
||||||
|
"envId"
|
||||||
|
)
|
||||||
|
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||||
|
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||||
|
|
||||||
|
const updatedWebhooks = await Promise.all(
|
||||||
|
webhooks.map(async (el) => {
|
||||||
|
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||||
|
if (!projectKmsService) {
|
||||||
|
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId: el.projectId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||||
|
}
|
||||||
|
|
||||||
|
let encryptedSecretKey = null;
|
||||||
|
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
||||||
|
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
||||||
|
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||||
|
iv: el.iv,
|
||||||
|
tag: el.tag,
|
||||||
|
ciphertext: el.encryptedSecretKey
|
||||||
|
});
|
||||||
|
encryptedSecretKey = projectKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decyptedSecretKey, "utf8")
|
||||||
|
}).cipherTextBlob;
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptedUrl =
|
||||||
|
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
||||||
|
? infisicalSymmetricDecrypt({
|
||||||
|
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||||
|
iv: el.urlIV,
|
||||||
|
tag: el.urlTag,
|
||||||
|
ciphertext: el.urlCipherText
|
||||||
|
})
|
||||||
|
: null;
|
||||||
|
|
||||||
|
const encryptedUrl = projectKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedUrl || el.url || "")
|
||||||
|
}).cipherTextBlob;
|
||||||
|
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.Webhook)
|
||||||
|
.insert(
|
||||||
|
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
|
||||||
|
id: el.id,
|
||||||
|
envId: el.envId,
|
||||||
|
url: "",
|
||||||
|
encryptedUrl: el.encryptedUrl,
|
||||||
|
encryptedPassKey: el.encryptedSecretKey
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasWebhookTable) {
|
||||||
|
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||||
|
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||||
|
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||||
|
|
||||||
|
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||||
|
if (hasWebhookTable) {
|
||||||
|
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||||
|
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
|
||||||
|
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,111 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||||
|
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||||
|
import { selectAllTableCols } from "@app/lib/knex";
|
||||||
|
import { initLogger } from "@app/lib/logger";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||||
|
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||||
|
import { createCircularCache } from "./utils/ring-buffer";
|
||||||
|
import { getMigrationEncryptionServices } from "./utils/services";
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||||
|
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
||||||
|
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
||||||
|
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
||||||
|
|
||||||
|
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||||
|
if (hasDynamicSecretTable) {
|
||||||
|
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||||
|
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
|
||||||
|
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
|
||||||
|
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
|
||||||
|
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const projectEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
|
||||||
|
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
|
||||||
|
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||||
|
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||||
|
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||||
|
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||||
|
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||||
|
|
||||||
|
const updatedDynamicSecrets = await Promise.all(
|
||||||
|
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
|
||||||
|
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||||
|
if (!projectKmsService) {
|
||||||
|
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptedInputData =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
||||||
|
? infisicalSymmetricDecrypt({
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.inputIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.inputTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.inputCiphertext
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const encryptedInput = projectKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedInputData)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
|
||||||
|
return { ...el, encryptedInput };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.DynamicSecret)
|
||||||
|
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasDynamicSecretTable) {
|
||||||
|
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||||
|
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||||
|
|
||||||
|
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||||
|
if (hasDynamicSecretTable) {
|
||||||
|
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||||
|
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,103 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||||
|
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||||
|
import { selectAllTableCols } from "@app/lib/knex";
|
||||||
|
import { initLogger } from "@app/lib/logger";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||||
|
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||||
|
import { createCircularCache } from "./utils/ring-buffer";
|
||||||
|
import { getMigrationEncryptionServices } from "./utils/services";
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||||
|
|
||||||
|
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||||
|
if (hasRotationTable) {
|
||||||
|
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||||
|
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const projectEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
|
||||||
|
const secretRotations = await knex(TableName.SecretRotation)
|
||||||
|
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
|
||||||
|
.select(selectAllTableCols(TableName.SecretRotation))
|
||||||
|
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||||
|
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||||
|
|
||||||
|
const updatedRotationData = await Promise.all(
|
||||||
|
secretRotations.map(async ({ projectId, ...el }) => {
|
||||||
|
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||||
|
if (!projectKmsService) {
|
||||||
|
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptedRotationData =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
||||||
|
? infisicalSymmetricDecrypt({
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.encryptedDataIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.encryptedDataTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedData
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const encryptedRotationData = projectKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedRotationData)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
return { ...el, encryptedRotationData };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.SecretRotation)
|
||||||
|
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasRotationTable) {
|
||||||
|
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||||
|
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||||
|
|
||||||
|
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||||
|
if (hasRotationTable) {
|
||||||
|
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||||
|
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,200 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||||
|
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||||
|
import { selectAllTableCols } from "@app/lib/knex";
|
||||||
|
import { initLogger } from "@app/lib/logger";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||||
|
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||||
|
import { createCircularCache } from "./utils/ring-buffer";
|
||||||
|
import { getMigrationEncryptionServices } from "./utils/services";
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||||
|
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityKubernetesAuth,
|
||||||
|
"encryptedKubernetesTokenReviewerJwt"
|
||||||
|
);
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityKubernetesAuth,
|
||||||
|
"encryptedKubernetesCaCertificate"
|
||||||
|
);
|
||||||
|
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||||
|
|
||||||
|
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
|
||||||
|
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
|
||||||
|
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
|
||||||
|
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityKubernetesAuth,
|
||||||
|
"encryptedTokenReviewerJwt"
|
||||||
|
);
|
||||||
|
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityKubernetesAuth,
|
||||||
|
"tokenReviewerJwtIV"
|
||||||
|
);
|
||||||
|
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityKubernetesAuth,
|
||||||
|
"tokenReviewerJwtTag"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (hasidentityKubernetesAuthTable) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||||
|
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||||
|
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||||
|
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||||
|
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
|
||||||
|
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
|
||||||
|
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
|
||||||
|
|
||||||
|
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
|
||||||
|
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const orgEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
|
||||||
|
.join(
|
||||||
|
TableName.IdentityOrgMembership,
|
||||||
|
`${TableName.IdentityOrgMembership}.identityId`,
|
||||||
|
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||||
|
)
|
||||||
|
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||||
|
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
|
||||||
|
.select(
|
||||||
|
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||||
|
)
|
||||||
|
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||||
|
|
||||||
|
const updatedIdentityKubernetesConfigs = [];
|
||||||
|
|
||||||
|
for await (const {
|
||||||
|
encryptedSymmetricKey,
|
||||||
|
symmetricKeyKeyEncoding,
|
||||||
|
symmetricKeyTag,
|
||||||
|
symmetricKeyIV,
|
||||||
|
orgId,
|
||||||
|
...el
|
||||||
|
} of identityKubernetesConfigs) {
|
||||||
|
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||||
|
|
||||||
|
if (!orgKmsService) {
|
||||||
|
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.Organization,
|
||||||
|
orgId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = infisicalSymmetricDecrypt({
|
||||||
|
ciphertext: encryptedSymmetricKey,
|
||||||
|
iv: symmetricKeyIV,
|
||||||
|
tag: symmetricKeyTag,
|
||||||
|
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedTokenReviewerJwt =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.tokenReviewerJwtIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.tokenReviewerJwtTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedTokenReviewerJwt
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const decryptedCertificate =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.caCertIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.caCertTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedCaCert
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedTokenReviewerJwt)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedCertificate)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
|
||||||
|
updatedIdentityKubernetesConfigs.push({
|
||||||
|
...el,
|
||||||
|
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||||
|
encryptedKubernetesCaCertificate,
|
||||||
|
encryptedKubernetesTokenReviewerJwt
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.IdentityKubernetesAuth)
|
||||||
|
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
if (hasidentityKubernetesAuthTable) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||||
|
if (!hasEncryptedKubernetesTokenReviewerJwt)
|
||||||
|
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
await reencryptIdentityK8sAuth(knex);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dropIdentityK8sColumns = async (knex: Knex) => {
|
||||||
|
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityKubernetesAuth,
|
||||||
|
"encryptedKubernetesTokenReviewerJwt"
|
||||||
|
);
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityKubernetesAuth,
|
||||||
|
"encryptedKubernetesCaCertificate"
|
||||||
|
);
|
||||||
|
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||||
|
|
||||||
|
if (hasidentityKubernetesAuthTable) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||||
|
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
|
||||||
|
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await dropIdentityK8sColumns(knex);
|
||||||
|
}
|
@ -0,0 +1,141 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||||
|
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||||
|
import { selectAllTableCols } from "@app/lib/knex";
|
||||||
|
import { initLogger } from "@app/lib/logger";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||||
|
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||||
|
import { createCircularCache } from "./utils/ring-buffer";
|
||||||
|
import { getMigrationEncryptionServices } from "./utils/services";
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityOidcAuth,
|
||||||
|
"encryptedCaCertificate"
|
||||||
|
);
|
||||||
|
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||||
|
|
||||||
|
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
|
||||||
|
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
|
||||||
|
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
|
||||||
|
|
||||||
|
if (hasidentityOidcAuthTable) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||||
|
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||||
|
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||||
|
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||||
|
|
||||||
|
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const orgEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
|
||||||
|
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
|
||||||
|
.join(
|
||||||
|
TableName.IdentityOrgMembership,
|
||||||
|
`${TableName.IdentityOrgMembership}.identityId`,
|
||||||
|
`${TableName.IdentityOidcAuth}.identityId`
|
||||||
|
)
|
||||||
|
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||||
|
.select(selectAllTableCols(TableName.IdentityOidcAuth))
|
||||||
|
.select(
|
||||||
|
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||||
|
)
|
||||||
|
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||||
|
|
||||||
|
const updatedIdentityOidcConfigs = await Promise.all(
|
||||||
|
identityOidcConfig.map(
|
||||||
|
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
|
||||||
|
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||||
|
if (!orgKmsService) {
|
||||||
|
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.Organization,
|
||||||
|
orgId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||||
|
}
|
||||||
|
const key = infisicalSymmetricDecrypt({
|
||||||
|
ciphertext: encryptedSymmetricKey,
|
||||||
|
iv: symmetricKeyIV,
|
||||||
|
tag: symmetricKeyTag,
|
||||||
|
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedCertificate =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.caCertIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.caCertTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedCaCert
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const encryptedCaCertificate = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedCertificate)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...el,
|
||||||
|
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||||
|
encryptedCaCertificate
|
||||||
|
};
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.IdentityOidcAuth)
|
||||||
|
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
await reencryptIdentityOidcAuth(knex);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dropIdentityOidcColumns = async (knex: Knex) => {
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.IdentityOidcAuth,
|
||||||
|
"encryptedCaCertificate"
|
||||||
|
);
|
||||||
|
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||||
|
|
||||||
|
if (hasidentityOidcTable) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||||
|
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await dropIdentityOidcColumns(knex);
|
||||||
|
}
|
@ -0,0 +1,493 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||||
|
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||||
|
import { selectAllTableCols } from "@app/lib/knex";
|
||||||
|
import { initLogger } from "@app/lib/logger";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
|
||||||
|
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||||
|
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||||
|
import { createCircularCache } from "./utils/ring-buffer";
|
||||||
|
import { getMigrationEncryptionServices } from "./utils/services";
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
const reencryptSamlConfig = async (knex: Knex) => {
|
||||||
|
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||||
|
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||||
|
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||||
|
|
||||||
|
if (hasSamlConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||||
|
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
|
||||||
|
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
|
||||||
|
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const orgEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
|
||||||
|
const samlConfigs = await knex(TableName.SamlConfig)
|
||||||
|
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
|
||||||
|
.select(selectAllTableCols(TableName.SamlConfig))
|
||||||
|
.select(
|
||||||
|
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||||
|
)
|
||||||
|
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||||
|
|
||||||
|
const updatedSamlConfigs = await Promise.all(
|
||||||
|
samlConfigs.map(
|
||||||
|
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||||
|
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||||
|
if (!orgKmsService) {
|
||||||
|
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.Organization,
|
||||||
|
orgId: el.orgId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||||
|
}
|
||||||
|
const key = infisicalSymmetricDecrypt({
|
||||||
|
ciphertext: encryptedSymmetricKey,
|
||||||
|
iv: symmetricKeyIV,
|
||||||
|
tag: symmetricKeyTag,
|
||||||
|
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedEntryPoint =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.entryPointIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.entryPointTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedEntryPoint
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const decryptedIssuer =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedIssuer && el.issuerIV && el.issuerTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.issuerIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.issuerTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedIssuer
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const decryptedCertificate =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedCert && el.certIV && el.certTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.certIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.certTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedCert
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const encryptedSamlIssuer = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedIssuer)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
const encryptedSamlCertificate = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedCertificate)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
const encryptedSamlEntryPoint = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedEntryPoint)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.SamlConfig)
|
||||||
|
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasSamlConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||||
|
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
|
||||||
|
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
|
||||||
|
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const reencryptLdapConfig = async (knex: Knex) => {
|
||||||
|
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||||
|
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||||
|
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||||
|
|
||||||
|
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
|
||||||
|
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
|
||||||
|
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
|
||||||
|
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
|
||||||
|
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
|
||||||
|
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
|
||||||
|
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
|
||||||
|
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
|
||||||
|
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
|
||||||
|
|
||||||
|
if (hasLdapConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||||
|
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
|
||||||
|
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||||
|
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||||
|
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
|
||||||
|
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
|
||||||
|
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
|
||||||
|
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
|
||||||
|
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
|
||||||
|
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
|
||||||
|
|
||||||
|
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
|
||||||
|
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
|
||||||
|
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const orgEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
|
||||||
|
const ldapConfigs = await knex(TableName.LdapConfig)
|
||||||
|
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
|
||||||
|
.select(selectAllTableCols(TableName.LdapConfig))
|
||||||
|
.select(
|
||||||
|
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||||
|
)
|
||||||
|
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||||
|
|
||||||
|
const updatedLdapConfigs = await Promise.all(
|
||||||
|
ldapConfigs.map(
|
||||||
|
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||||
|
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||||
|
if (!orgKmsService) {
|
||||||
|
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.Organization,
|
||||||
|
orgId: el.orgId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||||
|
}
|
||||||
|
const key = infisicalSymmetricDecrypt({
|
||||||
|
ciphertext: encryptedSymmetricKey,
|
||||||
|
iv: symmetricKeyIV,
|
||||||
|
tag: symmetricKeyTag,
|
||||||
|
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedBindDN =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.bindDNIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.bindDNTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedBindDN
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const decryptedBindPass =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.bindPassIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.bindPassTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedBindPass
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const decryptedCertificate =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedCACert && el.caCertIV && el.caCertTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.caCertIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.caCertTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedCACert
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const encryptedLdapBindDN = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedBindDN)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
const encryptedLdapBindPass = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedBindPass)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
const encryptedLdapCaCertificate = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedCertificate)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.LdapConfig)
|
||||||
|
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
if (hasLdapConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||||
|
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
|
||||||
|
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const reencryptOidcConfig = async (knex: Knex) => {
|
||||||
|
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||||
|
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
|
||||||
|
TableName.OidcConfig,
|
||||||
|
"encryptedOidcClientSecret"
|
||||||
|
);
|
||||||
|
|
||||||
|
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
|
||||||
|
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
|
||||||
|
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
|
||||||
|
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
|
||||||
|
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
|
||||||
|
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
|
||||||
|
|
||||||
|
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||||
|
|
||||||
|
if (hasOidcConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
|
||||||
|
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
|
||||||
|
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
|
||||||
|
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
|
||||||
|
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
|
||||||
|
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
|
||||||
|
|
||||||
|
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
|
||||||
|
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
initLogger();
|
||||||
|
const envConfig = getMigrationEnvConfig();
|
||||||
|
const keyStore = inMemoryKeyStore();
|
||||||
|
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||||
|
const orgEncryptionRingBuffer =
|
||||||
|
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||||
|
|
||||||
|
const oidcConfigs = await knex(TableName.OidcConfig)
|
||||||
|
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
|
||||||
|
.select(selectAllTableCols(TableName.OidcConfig))
|
||||||
|
.select(
|
||||||
|
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||||
|
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||||
|
)
|
||||||
|
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||||
|
|
||||||
|
const updatedOidcConfigs = await Promise.all(
|
||||||
|
oidcConfigs.map(
|
||||||
|
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||||
|
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||||
|
if (!orgKmsService) {
|
||||||
|
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||||
|
{
|
||||||
|
type: KmsDataKey.Organization,
|
||||||
|
orgId: el.orgId
|
||||||
|
},
|
||||||
|
knex
|
||||||
|
);
|
||||||
|
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||||
|
}
|
||||||
|
const key = infisicalSymmetricDecrypt({
|
||||||
|
ciphertext: encryptedSymmetricKey,
|
||||||
|
iv: symmetricKeyIV,
|
||||||
|
tag: symmetricKeyTag,
|
||||||
|
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedClientId =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedClientId && el.clientIdIV && el.clientIdTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.clientIdIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.clientIdTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedClientId
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const decryptedClientSecret =
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
|
||||||
|
? decryptSymmetric({
|
||||||
|
key,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
iv: el.clientSecretIV,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
tag: el.clientSecretTag,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||||
|
ciphertext: el.encryptedClientSecret
|
||||||
|
})
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const encryptedOidcClientId = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedClientId)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
const encryptedOidcClientSecret = orgKmsService.encryptor({
|
||||||
|
plainText: Buffer.from(decryptedClientSecret)
|
||||||
|
}).cipherTextBlob;
|
||||||
|
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.OidcConfig)
|
||||||
|
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||||
|
.onConflict("id")
|
||||||
|
.merge();
|
||||||
|
}
|
||||||
|
if (hasOidcConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
|
||||||
|
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
await reencryptSamlConfig(knex);
|
||||||
|
await reencryptLdapConfig(knex);
|
||||||
|
await reencryptOidcConfig(knex);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dropSamlConfigColumns = async (knex: Knex) => {
|
||||||
|
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||||
|
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||||
|
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||||
|
|
||||||
|
if (hasSamlConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||||
|
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
|
||||||
|
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
|
||||||
|
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const dropLdapConfigColumns = async (knex: Knex) => {
|
||||||
|
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||||
|
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||||
|
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||||
|
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||||
|
|
||||||
|
if (hasLdapConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||||
|
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
|
||||||
|
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
|
||||||
|
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const dropOidcConfigColumns = async (knex: Knex) => {
|
||||||
|
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||||
|
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
|
||||||
|
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||||
|
|
||||||
|
if (hasOidcConfigTable) {
|
||||||
|
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||||
|
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
|
||||||
|
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
await dropSamlConfigColumns(knex);
|
||||||
|
await dropLdapConfigColumns(knex);
|
||||||
|
await dropOidcConfigColumns(knex);
|
||||||
|
}
|
53
backend/src/db/migrations/utils/env-config.ts
Normal file
53
backend/src/db/migrations/utils/env-config.ts
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zpStr } from "@app/lib/zod";
|
||||||
|
|
||||||
|
const envSchema = z
|
||||||
|
.object({
|
||||||
|
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||||
|
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
||||||
|
),
|
||||||
|
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
||||||
|
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
||||||
|
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
|
||||||
|
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||||
|
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||||
|
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||||
|
// TODO(akhilmhdh): will be changed to one
|
||||||
|
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||||
|
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||||
|
// HSM
|
||||||
|
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||||
|
HSM_PIN: zpStr(z.string().optional()),
|
||||||
|
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||||
|
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||||
|
})
|
||||||
|
// To ensure that basic encryption is always possible.
|
||||||
|
.refine(
|
||||||
|
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||||
|
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||||
|
)
|
||||||
|
.transform((data) => ({
|
||||||
|
...data,
|
||||||
|
isHsmConfigured:
|
||||||
|
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
|
||||||
|
}));
|
||||||
|
|
||||||
|
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
||||||
|
|
||||||
|
export const getMigrationEnvConfig = () => {
|
||||||
|
const parsedEnv = envSchema.safeParse(process.env);
|
||||||
|
if (!parsedEnv.success) {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.error("Invalid environment variables. Check the error below");
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.error(
|
||||||
|
"Migration is now automatic at startup. Please remove this step from your workflow and start the application as normal."
|
||||||
|
);
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.error(parsedEnv.error.issues);
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Object.freeze(parsedEnv.data);
|
||||||
|
};
|
@ -1,105 +0,0 @@
|
|||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
import { randomSecureBytes } from "@app/lib/crypto";
|
|
||||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
|
||||||
|
|
||||||
const getInstanceRootKey = async (knex: Knex) => {
|
|
||||||
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
|
|
||||||
// if root key its base64 encoded
|
|
||||||
const isBase64 = !process.env.ENCRYPTION_KEY;
|
|
||||||
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
|
|
||||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
|
||||||
|
|
||||||
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
|
||||||
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
|
|
||||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
|
||||||
if (kmsRootConfig) {
|
|
||||||
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
|
||||||
// set the flag so that other instancen nodes can start
|
|
||||||
return decryptedRootKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newRootKey = randomSecureBytes(32);
|
|
||||||
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
|
||||||
await knex(TableName.KmsServerRootConfig).insert({
|
|
||||||
encryptedRootKey,
|
|
||||||
// eslint-disable-next-line
|
|
||||||
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
|
|
||||||
id: KMS_ROOT_CONFIG_UUID
|
|
||||||
});
|
|
||||||
return encryptedRootKey;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
|
|
||||||
const KMS_VERSION = "v01";
|
|
||||||
const KMS_VERSION_BLOB_LENGTH = 3;
|
|
||||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
|
||||||
const project = await knex(TableName.Project).where({ id: projectId }).first();
|
|
||||||
if (!project) throw new Error("Missing project id");
|
|
||||||
|
|
||||||
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
|
|
||||||
|
|
||||||
let secretManagerKmsKey;
|
|
||||||
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
|
|
||||||
if (projectSecretManagerKmsId) {
|
|
||||||
const kmsDoc = await knex(TableName.KmsKey)
|
|
||||||
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
|
|
||||||
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
|
|
||||||
.first();
|
|
||||||
if (!kmsDoc) throw new Error("missing kms");
|
|
||||||
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
|
||||||
} else {
|
|
||||||
const [kmsDoc] = await knex(TableName.KmsKey)
|
|
||||||
.insert({
|
|
||||||
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
|
||||||
orgId: project.orgId,
|
|
||||||
isReserved: false
|
|
||||||
})
|
|
||||||
.returning("*");
|
|
||||||
|
|
||||||
secretManagerKmsKey = randomSecureBytes(32);
|
|
||||||
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
|
|
||||||
await knex(TableName.InternalKms).insert({
|
|
||||||
version: 1,
|
|
||||||
encryptedKey: encryptedKeyMaterial,
|
|
||||||
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
|
|
||||||
kmsKeyId: kmsDoc.id
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
|
|
||||||
let dataKey: Buffer;
|
|
||||||
if (!encryptedSecretManagerDataKey) {
|
|
||||||
dataKey = randomSecureBytes();
|
|
||||||
// the below versioning we do it automatically in kms service
|
|
||||||
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
|
|
||||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
|
||||||
await knex(TableName.Project)
|
|
||||||
.where({ id: projectId })
|
|
||||||
.update({
|
|
||||||
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
|
||||||
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
encryptor: ({ plainText }: { plainText: Buffer }) => {
|
|
||||||
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
|
|
||||||
|
|
||||||
// Buffer#1 encrypted text + Buffer#2 version number
|
|
||||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
|
||||||
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
|
|
||||||
return { cipherTextBlob };
|
|
||||||
},
|
|
||||||
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
|
|
||||||
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
|
||||||
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
|
|
||||||
return decryptedBlob;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
export const createCircularCache = <T>(bufferSize = 10) => {
|
||||||
|
const bufferItems: { id: string; item: T }[] = [];
|
||||||
|
let bufferIndex = 0;
|
||||||
|
|
||||||
|
const push = (id: string, item: T) => {
|
||||||
|
if (bufferItems.length < bufferSize) {
|
||||||
|
bufferItems.push({ id, item });
|
||||||
|
} else {
|
||||||
|
bufferItems[bufferIndex] = { id, item };
|
||||||
|
}
|
||||||
|
bufferIndex = (bufferIndex + 1) % bufferSize;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getItem = (id: string) => {
|
||||||
|
return bufferItems.find((i) => i.id === id)?.item;
|
||||||
|
};
|
||||||
|
|
||||||
|
return { push, getItem };
|
||||||
|
};
|
52
backend/src/db/migrations/utils/services.ts
Normal file
52
backend/src/db/migrations/utils/services.ts
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||||
|
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||||
|
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||||
|
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||||
|
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||||
|
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||||
|
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||||
|
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||||
|
|
||||||
|
import { TMigrationEnvConfig } from "./env-config";
|
||||||
|
|
||||||
|
type TDependencies = {
|
||||||
|
envConfig: TMigrationEnvConfig;
|
||||||
|
db: Knex;
|
||||||
|
keyStore: TKeyStoreFactory;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
const hsmModule = initializeHsmModule(envConfig);
|
||||||
|
hsmModule.initialize();
|
||||||
|
|
||||||
|
const hsmService = hsmServiceFactory({
|
||||||
|
hsmModule: hsmModule.getModule(),
|
||||||
|
envConfig
|
||||||
|
});
|
||||||
|
|
||||||
|
const orgDAL = orgDALFactory(db);
|
||||||
|
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||||
|
const kmsDAL = kmskeyDALFactory(db);
|
||||||
|
const internalKmsDAL = internalKmsDALFactory(db);
|
||||||
|
const projectDAL = projectDALFactory(db);
|
||||||
|
|
||||||
|
const kmsService = kmsServiceFactory({
|
||||||
|
kmsRootConfigDAL,
|
||||||
|
keyStore,
|
||||||
|
kmsDAL,
|
||||||
|
internalKmsDAL,
|
||||||
|
orgDAL,
|
||||||
|
projectDAL,
|
||||||
|
hsmService,
|
||||||
|
envConfig
|
||||||
|
});
|
||||||
|
|
||||||
|
await hsmService.startService();
|
||||||
|
await kmsService.startService();
|
||||||
|
|
||||||
|
return { kmsService };
|
||||||
|
};
|
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
import path from "node:path";
|
||||||
|
|
||||||
|
import dotenv from "dotenv";
|
||||||
|
|
||||||
|
import { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||||
|
|
||||||
|
const isProduction = process.env.NODE_ENV === "production";
|
||||||
|
|
||||||
|
// Update with your config settings. .
|
||||||
|
dotenv.config({
|
||||||
|
path: path.join(__dirname, "../../../.env.migration")
|
||||||
|
});
|
||||||
|
dotenv.config({
|
||||||
|
path: path.join(__dirname, "../../../.env")
|
||||||
|
});
|
||||||
|
|
||||||
|
const runRename = async () => {
|
||||||
|
if (!isProduction) return;
|
||||||
|
const migrationTable = "infisical_migrations";
|
||||||
|
const applicationDb = initDbConnection({
|
||||||
|
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
|
||||||
|
dbRootCert: process.env.DB_ROOT_CERT
|
||||||
|
});
|
||||||
|
|
||||||
|
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
|
||||||
|
? initAuditLogDbConnection({
|
||||||
|
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||||
|
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||||
|
})
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||||
|
if (hasMigrationTable) {
|
||||||
|
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||||
|
if (firstFile?.name?.includes(".ts")) {
|
||||||
|
await applicationDb(migrationTable).update({
|
||||||
|
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (auditLogDb) {
|
||||||
|
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||||
|
if (hasMigrationTableInAuditLog) {
|
||||||
|
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||||
|
if (firstFile?.name?.includes(".ts")) {
|
||||||
|
await auditLogDb(migrationTable).update({
|
||||||
|
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await applicationDb.destroy();
|
||||||
|
await auditLogDb?.destroy();
|
||||||
|
};
|
||||||
|
|
||||||
|
void runRename();
|
@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const DynamicSecretsSchema = z.object({
|
export const DynamicSecretsSchema = z.object({
|
||||||
@ -14,16 +16,17 @@ export const DynamicSecretsSchema = z.object({
|
|||||||
type: z.string(),
|
type: z.string(),
|
||||||
defaultTTL: z.string(),
|
defaultTTL: z.string(),
|
||||||
maxTTL: z.string().nullable().optional(),
|
maxTTL: z.string().nullable().optional(),
|
||||||
inputIV: z.string(),
|
inputIV: z.string().nullable().optional(),
|
||||||
inputCiphertext: z.string(),
|
inputCiphertext: z.string().nullable().optional(),
|
||||||
inputTag: z.string(),
|
inputTag: z.string().nullable().optional(),
|
||||||
algorithm: z.string().default("aes-256-gcm"),
|
algorithm: z.string().default("aes-256-gcm"),
|
||||||
keyEncoding: z.string().default("utf8"),
|
keyEncoding: z.string().default("utf8"),
|
||||||
folderId: z.string().uuid(),
|
folderId: z.string().uuid(),
|
||||||
status: z.string().nullable().optional(),
|
status: z.string().nullable().optional(),
|
||||||
statusDetails: z.string().nullable().optional(),
|
statusDetails: z.string().nullable().optional(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date()
|
updatedAt: z.date(),
|
||||||
|
encryptedInput: zodBuffer
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||||
|
@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
|
|||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
identityId: z.string().uuid(),
|
identityId: z.string().uuid(),
|
||||||
type: z.string(),
|
type: z.string(),
|
||||||
allowedServiceAccounts: z.string(),
|
allowedServiceAccounts: z.string().nullable().optional(),
|
||||||
allowedProjects: z.string(),
|
allowedProjects: z.string().nullable().optional(),
|
||||||
allowedZones: z.string()
|
allowedZones: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
|
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const IdentityKubernetesAuthsSchema = z.object({
|
export const IdentityKubernetesAuthsSchema = z.object({
|
||||||
@ -17,15 +19,17 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
|||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
identityId: z.string().uuid(),
|
identityId: z.string().uuid(),
|
||||||
kubernetesHost: z.string(),
|
kubernetesHost: z.string(),
|
||||||
encryptedCaCert: z.string(),
|
encryptedCaCert: z.string().nullable().optional(),
|
||||||
caCertIV: z.string(),
|
caCertIV: z.string().nullable().optional(),
|
||||||
caCertTag: z.string(),
|
caCertTag: z.string().nullable().optional(),
|
||||||
encryptedTokenReviewerJwt: z.string(),
|
encryptedTokenReviewerJwt: z.string().nullable().optional(),
|
||||||
tokenReviewerJwtIV: z.string(),
|
tokenReviewerJwtIV: z.string().nullable().optional(),
|
||||||
tokenReviewerJwtTag: z.string(),
|
tokenReviewerJwtTag: z.string().nullable().optional(),
|
||||||
allowedNamespaces: z.string(),
|
allowedNamespaces: z.string(),
|
||||||
allowedNames: z.string(),
|
allowedNames: z.string(),
|
||||||
allowedAudience: z.string()
|
allowedAudience: z.string(),
|
||||||
|
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||||
|
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const IdentityOidcAuthsSchema = z.object({
|
export const IdentityOidcAuthsSchema = z.object({
|
||||||
@ -15,15 +17,16 @@ export const IdentityOidcAuthsSchema = z.object({
|
|||||||
accessTokenTrustedIps: z.unknown(),
|
accessTokenTrustedIps: z.unknown(),
|
||||||
identityId: z.string().uuid(),
|
identityId: z.string().uuid(),
|
||||||
oidcDiscoveryUrl: z.string(),
|
oidcDiscoveryUrl: z.string(),
|
||||||
encryptedCaCert: z.string(),
|
encryptedCaCert: z.string().nullable().optional(),
|
||||||
caCertIV: z.string(),
|
caCertIV: z.string().nullable().optional(),
|
||||||
caCertTag: z.string(),
|
caCertTag: z.string().nullable().optional(),
|
||||||
boundIssuer: z.string(),
|
boundIssuer: z.string(),
|
||||||
boundAudiences: z.string(),
|
boundAudiences: z.string(),
|
||||||
boundClaims: z.unknown(),
|
boundClaims: z.unknown(),
|
||||||
boundSubject: z.string().nullable().optional(),
|
boundSubject: z.string().nullable().optional(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date()
|
updatedAt: z.date(),
|
||||||
|
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||||
|
@ -16,8 +16,7 @@ export const KmsKeysSchema = z.object({
|
|||||||
name: z.string(),
|
name: z.string(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
projectId: z.string().nullable().optional(),
|
projectId: z.string().nullable().optional()
|
||||||
slug: z.string().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const LdapConfigsSchema = z.object({
|
export const LdapConfigsSchema = z.object({
|
||||||
@ -12,22 +14,25 @@ export const LdapConfigsSchema = z.object({
|
|||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean(),
|
||||||
url: z.string(),
|
url: z.string(),
|
||||||
encryptedBindDN: z.string(),
|
encryptedBindDN: z.string().nullable().optional(),
|
||||||
bindDNIV: z.string(),
|
bindDNIV: z.string().nullable().optional(),
|
||||||
bindDNTag: z.string(),
|
bindDNTag: z.string().nullable().optional(),
|
||||||
encryptedBindPass: z.string(),
|
encryptedBindPass: z.string().nullable().optional(),
|
||||||
bindPassIV: z.string(),
|
bindPassIV: z.string().nullable().optional(),
|
||||||
bindPassTag: z.string(),
|
bindPassTag: z.string().nullable().optional(),
|
||||||
searchBase: z.string(),
|
searchBase: z.string(),
|
||||||
encryptedCACert: z.string(),
|
encryptedCACert: z.string().nullable().optional(),
|
||||||
caCertIV: z.string(),
|
caCertIV: z.string().nullable().optional(),
|
||||||
caCertTag: z.string(),
|
caCertTag: z.string().nullable().optional(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
groupSearchBase: z.string().default(""),
|
groupSearchBase: z.string().default(""),
|
||||||
groupSearchFilter: z.string().default(""),
|
groupSearchFilter: z.string().default(""),
|
||||||
searchFilter: z.string().default(""),
|
searchFilter: z.string().default(""),
|
||||||
uniqueUserAttribute: z.string().default("")
|
uniqueUserAttribute: z.string().default(""),
|
||||||
|
encryptedLdapBindDN: zodBuffer,
|
||||||
|
encryptedLdapBindPass: zodBuffer,
|
||||||
|
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const OidcConfigsSchema = z.object({
|
export const OidcConfigsSchema = z.object({
|
||||||
@ -15,19 +17,22 @@ export const OidcConfigsSchema = z.object({
|
|||||||
jwksUri: z.string().nullable().optional(),
|
jwksUri: z.string().nullable().optional(),
|
||||||
tokenEndpoint: z.string().nullable().optional(),
|
tokenEndpoint: z.string().nullable().optional(),
|
||||||
userinfoEndpoint: z.string().nullable().optional(),
|
userinfoEndpoint: z.string().nullable().optional(),
|
||||||
encryptedClientId: z.string(),
|
encryptedClientId: z.string().nullable().optional(),
|
||||||
configurationType: z.string(),
|
configurationType: z.string(),
|
||||||
clientIdIV: z.string(),
|
clientIdIV: z.string().nullable().optional(),
|
||||||
clientIdTag: z.string(),
|
clientIdTag: z.string().nullable().optional(),
|
||||||
encryptedClientSecret: z.string(),
|
encryptedClientSecret: z.string().nullable().optional(),
|
||||||
clientSecretIV: z.string(),
|
clientSecretIV: z.string().nullable().optional(),
|
||||||
clientSecretTag: z.string(),
|
clientSecretTag: z.string().nullable().optional(),
|
||||||
allowedEmailDomains: z.string().nullable().optional(),
|
allowedEmailDomains: z.string().nullable().optional(),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
lastUsed: z.date().nullable().optional()
|
lastUsed: z.date().nullable().optional(),
|
||||||
|
manageGroupMemberships: z.boolean().default(false),
|
||||||
|
encryptedOidcClientId: zodBuffer,
|
||||||
|
encryptedOidcClientSecret: zodBuffer
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const SamlConfigsSchema = z.object({
|
export const SamlConfigsSchema = z.object({
|
||||||
@ -23,7 +25,10 @@ export const SamlConfigsSchema = z.object({
|
|||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
lastUsed: z.date().nullable().optional()
|
lastUsed: z.date().nullable().optional(),
|
||||||
|
encryptedSamlEntryPoint: zodBuffer,
|
||||||
|
encryptedSamlIssuer: zodBuffer,
|
||||||
|
encryptedSamlCertificate: zodBuffer
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const SecretRotationsSchema = z.object({
|
export const SecretRotationsSchema = z.object({
|
||||||
@ -22,7 +24,8 @@ export const SecretRotationsSchema = z.object({
|
|||||||
keyEncoding: z.string().nullable().optional(),
|
keyEncoding: z.string().nullable().optional(),
|
||||||
envId: z.string().uuid(),
|
envId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date()
|
updatedAt: z.date(),
|
||||||
|
encryptedRotationData: zodBuffer
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
||||||
|
@ -5,12 +5,14 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { zodBuffer } from "@app/lib/zod";
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const WebhooksSchema = z.object({
|
export const WebhooksSchema = z.object({
|
||||||
id: z.string().uuid(),
|
id: z.string().uuid(),
|
||||||
secretPath: z.string().default("/"),
|
secretPath: z.string().default("/"),
|
||||||
url: z.string(),
|
url: z.string().nullable().optional(),
|
||||||
lastStatus: z.string().nullable().optional(),
|
lastStatus: z.string().nullable().optional(),
|
||||||
lastRunErrorMessage: z.string().nullable().optional(),
|
lastRunErrorMessage: z.string().nullable().optional(),
|
||||||
isDisabled: z.boolean().default(false),
|
isDisabled: z.boolean().default(false),
|
||||||
@ -25,7 +27,9 @@ export const WebhooksSchema = z.object({
|
|||||||
urlCipherText: z.string().nullable().optional(),
|
urlCipherText: z.string().nullable().optional(),
|
||||||
urlIV: z.string().nullable().optional(),
|
urlIV: z.string().nullable().optional(),
|
||||||
urlTag: z.string().nullable().optional(),
|
urlTag: z.string().nullable().optional(),
|
||||||
type: z.string().default("general").nullable().optional()
|
type: z.string().default("general").nullable().optional(),
|
||||||
|
encryptedPassKey: zodBuffer.nullable().optional(),
|
||||||
|
encryptedUrl: zodBuffer
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||||
|
@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
|
|||||||
import LdapStrategy from "passport-ldapauth";
|
import LdapStrategy from "passport-ldapauth";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
|
import { LdapGroupMapsSchema } from "@app/db/schemas";
|
||||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
@ -22,6 +22,7 @@ import { BadRequestError } from "@app/lib/errors";
|
|||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||||
@ -187,7 +188,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
|||||||
caCert: z.string().trim().default("")
|
caCert: z.string().trim().default("")
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: LdapConfigsSchema
|
200: SanitizedLdapConfigSchema
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
@ -228,7 +229,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
|||||||
.partial()
|
.partial()
|
||||||
.merge(z.object({ organizationId: z.string() })),
|
.merge(z.object({ organizationId: z.string() })),
|
||||||
response: {
|
response: {
|
||||||
200: LdapConfigsSchema
|
200: SanitizedLdapConfigSchema
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
|
@ -11,13 +11,28 @@ import fastifySession from "@fastify/session";
|
|||||||
import RedisStore from "connect-redis";
|
import RedisStore from "connect-redis";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
import { OidcConfigsSchema } from "@app/db/schemas";
|
||||||
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
|
||||||
|
id: true,
|
||||||
|
issuer: true,
|
||||||
|
authorizationEndpoint: true,
|
||||||
|
configurationType: true,
|
||||||
|
discoveryURL: true,
|
||||||
|
jwksUri: true,
|
||||||
|
tokenEndpoint: true,
|
||||||
|
userinfoEndpoint: true,
|
||||||
|
orgId: true,
|
||||||
|
isActive: true,
|
||||||
|
allowedEmailDomains: true,
|
||||||
|
manageGroupMemberships: true
|
||||||
|
});
|
||||||
|
|
||||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||||
@ -142,7 +157,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
orgSlug: z.string().trim()
|
orgSlug: z.string().trim()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: OidcConfigsSchema.pick({
|
200: SanitizedOidcConfigSchema.pick({
|
||||||
id: true,
|
id: true,
|
||||||
issuer: true,
|
issuer: true,
|
||||||
authorizationEndpoint: true,
|
authorizationEndpoint: true,
|
||||||
@ -153,7 +168,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
discoveryURL: true,
|
discoveryURL: true,
|
||||||
isActive: true,
|
isActive: true,
|
||||||
orgId: true,
|
orgId: true,
|
||||||
allowedEmailDomains: true
|
allowedEmailDomains: true,
|
||||||
|
manageGroupMemberships: true
|
||||||
}).extend({
|
}).extend({
|
||||||
clientId: z.string(),
|
clientId: z.string(),
|
||||||
clientSecret: z.string()
|
clientSecret: z.string()
|
||||||
@ -207,12 +223,13 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
userinfoEndpoint: z.string().trim(),
|
userinfoEndpoint: z.string().trim(),
|
||||||
clientId: z.string().trim(),
|
clientId: z.string().trim(),
|
||||||
clientSecret: z.string().trim(),
|
clientSecret: z.string().trim(),
|
||||||
isActive: z.boolean()
|
isActive: z.boolean(),
|
||||||
|
manageGroupMemberships: z.boolean().optional()
|
||||||
})
|
})
|
||||||
.partial()
|
.partial()
|
||||||
.merge(z.object({ orgSlug: z.string() })),
|
.merge(z.object({ orgSlug: z.string() })),
|
||||||
response: {
|
response: {
|
||||||
200: OidcConfigsSchema.pick({
|
200: SanitizedOidcConfigSchema.pick({
|
||||||
id: true,
|
id: true,
|
||||||
issuer: true,
|
issuer: true,
|
||||||
authorizationEndpoint: true,
|
authorizationEndpoint: true,
|
||||||
@ -223,7 +240,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
userinfoEndpoint: true,
|
userinfoEndpoint: true,
|
||||||
orgId: true,
|
orgId: true,
|
||||||
allowedEmailDomains: true,
|
allowedEmailDomains: true,
|
||||||
isActive: true
|
isActive: true,
|
||||||
|
manageGroupMemberships: true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -272,7 +290,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
clientId: z.string().trim(),
|
clientId: z.string().trim(),
|
||||||
clientSecret: z.string().trim(),
|
clientSecret: z.string().trim(),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean(),
|
||||||
orgSlug: z.string().trim()
|
orgSlug: z.string().trim(),
|
||||||
|
manageGroupMemberships: z.boolean().optional().default(false)
|
||||||
})
|
})
|
||||||
.superRefine((data, ctx) => {
|
.superRefine((data, ctx) => {
|
||||||
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
||||||
@ -323,19 +342,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: OidcConfigsSchema.pick({
|
200: SanitizedOidcConfigSchema
|
||||||
id: true,
|
|
||||||
issuer: true,
|
|
||||||
authorizationEndpoint: true,
|
|
||||||
configurationType: true,
|
|
||||||
discoveryURL: true,
|
|
||||||
jwksUri: true,
|
|
||||||
tokenEndpoint: true,
|
|
||||||
userinfoEndpoint: true,
|
|
||||||
orgId: true,
|
|
||||||
isActive: true,
|
|
||||||
allowedEmailDomains: true
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -350,4 +357,25 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
|||||||
return oidc;
|
return oidc;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/manage-group-memberships",
|
||||||
|
schema: {
|
||||||
|
querystring: z.object({
|
||||||
|
orgId: z.string().trim().min(1, "Org ID is required")
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
isEnabled: z.boolean()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const isEnabled = await server.services.oidc.isOidcManageGroupMembershipsEnabled(req.query.orgId, req.permission);
|
||||||
|
|
||||||
|
return { isEnabled };
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
|
|||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { slugSchema } from "@app/server/lib/schemas";
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||||
|
@ -12,13 +12,13 @@ import { MultiSamlStrategy } from "@node-saml/passport-saml";
|
|||||||
import { FastifyRequest } from "fastify";
|
import { FastifyRequest } from "fastify";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { SamlConfigsSchema } from "@app/db/schemas";
|
|
||||||
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
type TSAMLConfig = {
|
type TSAMLConfig = {
|
||||||
@ -298,7 +298,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
cert: z.string()
|
cert: z.string()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: SamlConfigsSchema
|
200: SanitizedSamlConfigSchema
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
@ -333,7 +333,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
|||||||
.partial()
|
.partial()
|
||||||
.merge(z.object({ organizationId: z.string() })),
|
.merge(z.object({ organizationId: z.string() })),
|
||||||
response: {
|
response: {
|
||||||
200: SamlConfigsSchema
|
200: SanitizedSamlConfigSchema
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
|||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { slugSchema } from "@app/server/lib/schemas";
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/sanitizedSchema/user-additional-privilege";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
|||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { slugSchema } from "@app/server/lib/schemas";
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchema/identitiy-additional-privilege";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||||
|
@ -39,11 +39,13 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
|||||||
offset = 0,
|
offset = 0,
|
||||||
actorId,
|
actorId,
|
||||||
actorType,
|
actorType,
|
||||||
|
secretPath,
|
||||||
eventType,
|
eventType,
|
||||||
eventMetadata
|
eventMetadata
|
||||||
}: Omit<TFindQuery, "actor" | "eventType"> & {
|
}: Omit<TFindQuery, "actor" | "eventType"> & {
|
||||||
actorId?: string;
|
actorId?: string;
|
||||||
actorType?: ActorType;
|
actorType?: ActorType;
|
||||||
|
secretPath?: string;
|
||||||
eventType?: EventType[];
|
eventType?: EventType[];
|
||||||
eventMetadata?: Record<string, string>;
|
eventMetadata?: Record<string, string>;
|
||||||
},
|
},
|
||||||
@ -88,6 +90,10 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (projectId && secretPath) {
|
||||||
|
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
|
||||||
|
}
|
||||||
|
|
||||||
// Filter by actor type
|
// Filter by actor type
|
||||||
if (actorType) {
|
if (actorType) {
|
||||||
void sqlQuery.where("actor", actorType);
|
void sqlQuery.where("actor", actorType);
|
||||||
|
@ -46,10 +46,6 @@ export const auditLogServiceFactory = ({
|
|||||||
actorOrgId
|
actorOrgId
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
|
||||||
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
|
|
||||||
* to the organization level ✅
|
|
||||||
*/
|
|
||||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,6 +60,7 @@ export const auditLogServiceFactory = ({
|
|||||||
actorId: filter.auditLogActorId,
|
actorId: filter.auditLogActorId,
|
||||||
actorType: filter.actorType,
|
actorType: filter.actorType,
|
||||||
eventMetadata: filter.eventMetadata,
|
eventMetadata: filter.eventMetadata,
|
||||||
|
secretPath: filter.secretPath,
|
||||||
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
|
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -32,6 +32,7 @@ export type TListProjectAuditLogDTO = {
|
|||||||
projectId?: string;
|
projectId?: string;
|
||||||
auditLogActorId?: string;
|
auditLogActorId?: string;
|
||||||
actorType?: ActorType;
|
actorType?: ActorType;
|
||||||
|
secretPath?: string;
|
||||||
eventMetadata?: Record<string, string>;
|
eventMetadata?: Record<string, string>;
|
||||||
};
|
};
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
@ -222,6 +223,7 @@ export enum EventType {
|
|||||||
UPDATE_CMEK = "update-cmek",
|
UPDATE_CMEK = "update-cmek",
|
||||||
DELETE_CMEK = "delete-cmek",
|
DELETE_CMEK = "delete-cmek",
|
||||||
GET_CMEKS = "get-cmeks",
|
GET_CMEKS = "get-cmeks",
|
||||||
|
GET_CMEK = "get-cmek",
|
||||||
CMEK_ENCRYPT = "cmek-encrypt",
|
CMEK_ENCRYPT = "cmek-encrypt",
|
||||||
CMEK_DECRYPT = "cmek-decrypt",
|
CMEK_DECRYPT = "cmek-decrypt",
|
||||||
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
||||||
@ -248,7 +250,9 @@ export enum EventType {
|
|||||||
DELETE_SECRET_SYNC = "delete-secret-sync",
|
DELETE_SECRET_SYNC = "delete-secret-sync",
|
||||||
SECRET_SYNC_SYNC_SECRETS = "secret-sync-sync-secrets",
|
SECRET_SYNC_SYNC_SECRETS = "secret-sync-sync-secrets",
|
||||||
SECRET_SYNC_IMPORT_SECRETS = "secret-sync-import-secrets",
|
SECRET_SYNC_IMPORT_SECRETS = "secret-sync-import-secrets",
|
||||||
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets"
|
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets",
|
||||||
|
OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER = "oidc-group-membership-mapping-assign-user",
|
||||||
|
OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER = "oidc-group-membership-mapping-remove-user"
|
||||||
}
|
}
|
||||||
|
|
||||||
interface UserActorMetadata {
|
interface UserActorMetadata {
|
||||||
@ -314,6 +318,8 @@ interface GetSecretsEvent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TSecretMetadata = { key: string; value: string }[];
|
||||||
|
|
||||||
interface GetSecretEvent {
|
interface GetSecretEvent {
|
||||||
type: EventType.GET_SECRET;
|
type: EventType.GET_SECRET;
|
||||||
metadata: {
|
metadata: {
|
||||||
@ -322,6 +328,7 @@ interface GetSecretEvent {
|
|||||||
secretId: string;
|
secretId: string;
|
||||||
secretKey: string;
|
secretKey: string;
|
||||||
secretVersion: number;
|
secretVersion: number;
|
||||||
|
secretMetadata?: TSecretMetadata;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -333,6 +340,7 @@ interface CreateSecretEvent {
|
|||||||
secretId: string;
|
secretId: string;
|
||||||
secretKey: string;
|
secretKey: string;
|
||||||
secretVersion: number;
|
secretVersion: number;
|
||||||
|
secretMetadata?: TSecretMetadata;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -341,7 +349,12 @@ interface CreateSecretBatchEvent {
|
|||||||
metadata: {
|
metadata: {
|
||||||
environment: string;
|
environment: string;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
|
secrets: Array<{
|
||||||
|
secretId: string;
|
||||||
|
secretKey: string;
|
||||||
|
secretVersion: number;
|
||||||
|
secretMetadata?: TSecretMetadata;
|
||||||
|
}>;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -353,6 +366,7 @@ interface UpdateSecretEvent {
|
|||||||
secretId: string;
|
secretId: string;
|
||||||
secretKey: string;
|
secretKey: string;
|
||||||
secretVersion: number;
|
secretVersion: number;
|
||||||
|
secretMetadata?: TSecretMetadata;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -361,7 +375,7 @@ interface UpdateSecretBatchEvent {
|
|||||||
metadata: {
|
metadata: {
|
||||||
environment: string;
|
environment: string;
|
||||||
secretPath: string;
|
secretPath: string;
|
||||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
|
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number; secretMetadata?: TSecretMetadata }>;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -759,9 +773,9 @@ interface AddIdentityGcpAuthEvent {
|
|||||||
metadata: {
|
metadata: {
|
||||||
identityId: string;
|
identityId: string;
|
||||||
type: string;
|
type: string;
|
||||||
allowedServiceAccounts: string;
|
allowedServiceAccounts?: string | null;
|
||||||
allowedProjects: string;
|
allowedProjects?: string | null;
|
||||||
allowedZones: string;
|
allowedZones?: string | null;
|
||||||
accessTokenTTL: number;
|
accessTokenTTL: number;
|
||||||
accessTokenMaxTTL: number;
|
accessTokenMaxTTL: number;
|
||||||
accessTokenNumUsesLimit: number;
|
accessTokenNumUsesLimit: number;
|
||||||
@ -781,9 +795,9 @@ interface UpdateIdentityGcpAuthEvent {
|
|||||||
metadata: {
|
metadata: {
|
||||||
identityId: string;
|
identityId: string;
|
||||||
type?: string;
|
type?: string;
|
||||||
allowedServiceAccounts?: string;
|
allowedServiceAccounts?: string | null;
|
||||||
allowedProjects?: string;
|
allowedProjects?: string | null;
|
||||||
allowedZones?: string;
|
allowedZones?: string | null;
|
||||||
accessTokenTTL?: number;
|
accessTokenTTL?: number;
|
||||||
accessTokenMaxTTL?: number;
|
accessTokenMaxTTL?: number;
|
||||||
accessTokenNumUsesLimit?: number;
|
accessTokenNumUsesLimit?: number;
|
||||||
@ -1834,6 +1848,13 @@ interface GetCmeksEvent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface GetCmekEvent {
|
||||||
|
type: EventType.GET_CMEK;
|
||||||
|
metadata: {
|
||||||
|
keyId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
interface CmekEncryptEvent {
|
interface CmekEncryptEvent {
|
||||||
type: EventType.CMEK_ENCRYPT;
|
type: EventType.CMEK_ENCRYPT;
|
||||||
metadata: {
|
metadata: {
|
||||||
@ -2043,6 +2064,26 @@ interface SecretSyncRemoveSecretsEvent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface OidcGroupMembershipMappingAssignUserEvent {
|
||||||
|
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER;
|
||||||
|
metadata: {
|
||||||
|
assignedToGroups: { id: string; name: string }[];
|
||||||
|
userId: string;
|
||||||
|
userEmail: string;
|
||||||
|
userGroupsClaim: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface OidcGroupMembershipMappingRemoveUserEvent {
|
||||||
|
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER;
|
||||||
|
metadata: {
|
||||||
|
removedFromGroups: { id: string; name: string }[];
|
||||||
|
userId: string;
|
||||||
|
userEmail: string;
|
||||||
|
userGroupsClaim: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export type Event =
|
export type Event =
|
||||||
| GetSecretsEvent
|
| GetSecretsEvent
|
||||||
| GetSecretEvent
|
| GetSecretEvent
|
||||||
@ -2204,6 +2245,7 @@ export type Event =
|
|||||||
| CreateCmekEvent
|
| CreateCmekEvent
|
||||||
| UpdateCmekEvent
|
| UpdateCmekEvent
|
||||||
| DeleteCmekEvent
|
| DeleteCmekEvent
|
||||||
|
| GetCmekEvent
|
||||||
| GetCmeksEvent
|
| GetCmeksEvent
|
||||||
| CmekEncryptEvent
|
| CmekEncryptEvent
|
||||||
| CmekDecryptEvent
|
| CmekDecryptEvent
|
||||||
@ -2231,4 +2273,6 @@ export type Event =
|
|||||||
| DeleteSecretSyncEvent
|
| DeleteSecretSyncEvent
|
||||||
| SecretSyncSyncSecretsEvent
|
| SecretSyncSyncSecretsEvent
|
||||||
| SecretSyncImportSecretsEvent
|
| SecretSyncImportSecretsEvent
|
||||||
| SecretSyncRemoveSecretsEvent;
|
| SecretSyncRemoveSecretsEvent
|
||||||
|
| OidcGroupMembershipMappingAssignUserEvent
|
||||||
|
| OidcGroupMembershipMappingRemoveUserEvent;
|
||||||
|
@ -37,11 +37,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
|||||||
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
|
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
|
||||||
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
|
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
|
||||||
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
|
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
|
||||||
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
|
db.ref("encryptedInput").withSchema(TableName.DynamicSecret).as("dynEncryptedInput"),
|
||||||
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
|
|
||||||
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
|
|
||||||
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
|
|
||||||
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
|
|
||||||
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
|
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
|
||||||
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
|
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
|
||||||
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
|
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
|
||||||
@ -59,11 +55,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
|||||||
type: doc.dynType,
|
type: doc.dynType,
|
||||||
defaultTTL: doc.dynDefaultTTL,
|
defaultTTL: doc.dynDefaultTTL,
|
||||||
maxTTL: doc.dynMaxTTL,
|
maxTTL: doc.dynMaxTTL,
|
||||||
inputIV: doc.dynInputIV,
|
encryptedInput: doc.dynEncryptedInput,
|
||||||
inputTag: doc.dynInputTag,
|
|
||||||
inputCiphertext: doc.dynInputCiphertext,
|
|
||||||
algorithm: doc.dynAlgorithm,
|
|
||||||
keyEncoding: doc.dynKeyEncoding,
|
|
||||||
folderId: doc.dynFolderId,
|
folderId: doc.dynFolderId,
|
||||||
status: doc.dynStatus,
|
status: doc.dynStatus,
|
||||||
statusDetails: doc.dynStatusDetails,
|
statusDetails: doc.dynStatusDetails,
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
|
||||||
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
|
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
|
||||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
import { NotFoundError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
|
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||||
|
|
||||||
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
||||||
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
|
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
|
||||||
@ -14,6 +16,8 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
|
|||||||
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
|
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
|
||||||
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
|
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
|
||||||
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
|
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
|
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
|
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
|
||||||
@ -22,7 +26,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
|||||||
queueService,
|
queueService,
|
||||||
dynamicSecretDAL,
|
dynamicSecretDAL,
|
||||||
dynamicSecretProviders,
|
dynamicSecretProviders,
|
||||||
dynamicSecretLeaseDAL
|
dynamicSecretLeaseDAL,
|
||||||
|
kmsService,
|
||||||
|
folderDAL
|
||||||
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
|
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
|
||||||
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
|
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
|
||||||
await queueService.queue(
|
await queueService.queue(
|
||||||
@ -76,15 +82,21 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
|||||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||||
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
|
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
|
||||||
|
|
||||||
|
const folder = await folderDAL.findById(dynamicSecretLease.dynamicSecret.folderId);
|
||||||
|
if (!folder)
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `Failed to find folder with ${dynamicSecretLease.dynamicSecret.folderId}`
|
||||||
|
});
|
||||||
|
|
||||||
|
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId: folder.projectId
|
||||||
|
});
|
||||||
|
|
||||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||||
const decryptedStoredInput = JSON.parse(
|
const decryptedStoredInput = JSON.parse(
|
||||||
infisicalSymmetricDecrypt({
|
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
|
||||||
tag: dynamicSecretCfg.inputTag,
|
|
||||||
iv: dynamicSecretCfg.inputIV
|
|
||||||
})
|
|
||||||
) as object;
|
) as object;
|
||||||
|
|
||||||
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
|
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
|
||||||
@ -100,16 +112,22 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
|||||||
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
|
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
|
||||||
throw new DisableRotationErrors({ message: "Document not deleted" });
|
throw new DisableRotationErrors({ message: "Document not deleted" });
|
||||||
|
|
||||||
|
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
|
||||||
|
if (!folder)
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `Failed to find folder with ${dynamicSecretCfg.folderId}`
|
||||||
|
});
|
||||||
|
|
||||||
|
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId: folder.projectId
|
||||||
|
});
|
||||||
|
|
||||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
|
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
|
||||||
if (dynamicSecretLeases.length) {
|
if (dynamicSecretLeases.length) {
|
||||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||||
const decryptedStoredInput = JSON.parse(
|
const decryptedStoredInput = JSON.parse(
|
||||||
infisicalSymmetricDecrypt({
|
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
|
||||||
tag: dynamicSecretCfg.inputTag,
|
|
||||||
iv: dynamicSecretCfg.inputIV
|
|
||||||
})
|
|
||||||
) as object;
|
) as object;
|
||||||
|
|
||||||
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
|
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { ForbiddenError, subject } from "@casl/ability";
|
import { ForbiddenError, subject } from "@casl/ability";
|
||||||
import ms from "ms";
|
import ms from "ms";
|
||||||
|
|
||||||
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
import { ActionProjectType } from "@app/db/schemas";
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import {
|
import {
|
||||||
@ -9,9 +9,10 @@ import {
|
|||||||
ProjectPermissionSub
|
ProjectPermissionSub
|
||||||
} from "@app/ee/services/permission/project-permission";
|
} from "@app/ee/services/permission/project-permission";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||||
|
|
||||||
@ -37,6 +38,7 @@ type TDynamicSecretLeaseServiceFactoryDep = {
|
|||||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
|
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
|
||||||
@ -49,7 +51,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
permissionService,
|
permissionService,
|
||||||
dynamicSecretQueueService,
|
dynamicSecretQueueService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
licenseService
|
licenseService,
|
||||||
|
kmsService
|
||||||
}: TDynamicSecretLeaseServiceFactoryDep) => {
|
}: TDynamicSecretLeaseServiceFactoryDep) => {
|
||||||
const create = async ({
|
const create = async ({
|
||||||
environmentSlug,
|
environmentSlug,
|
||||||
@ -104,13 +107,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
|
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
|
||||||
|
|
||||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||||
|
|
||||||
|
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
const decryptedStoredInput = JSON.parse(
|
const decryptedStoredInput = JSON.parse(
|
||||||
infisicalSymmetricDecrypt({
|
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
|
||||||
tag: dynamicSecretCfg.inputTag,
|
|
||||||
iv: dynamicSecretCfg.inputIV
|
|
||||||
})
|
|
||||||
) as object;
|
) as object;
|
||||||
|
|
||||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||||
@ -160,6 +164,11 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
const plan = await licenseService.getPlan(actorOrgId);
|
const plan = await licenseService.getPlan(actorOrgId);
|
||||||
if (!plan?.dynamicSecret) {
|
if (!plan?.dynamicSecret) {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
@ -181,12 +190,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||||
const decryptedStoredInput = JSON.parse(
|
const decryptedStoredInput = JSON.parse(
|
||||||
infisicalSymmetricDecrypt({
|
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
|
||||||
tag: dynamicSecretCfg.inputTag,
|
|
||||||
iv: dynamicSecretCfg.inputIV
|
|
||||||
})
|
|
||||||
) as object;
|
) as object;
|
||||||
|
|
||||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||||
@ -240,6 +244,11 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||||
if (!folder)
|
if (!folder)
|
||||||
throw new NotFoundError({
|
throw new NotFoundError({
|
||||||
@ -253,12 +262,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
|||||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||||
const decryptedStoredInput = JSON.parse(
|
const decryptedStoredInput = JSON.parse(
|
||||||
infisicalSymmetricDecrypt({
|
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
|
||||||
tag: dynamicSecretCfg.inputTag,
|
|
||||||
iv: dynamicSecretCfg.inputIV
|
|
||||||
})
|
|
||||||
) as object;
|
) as object;
|
||||||
|
|
||||||
const revokeResponse = await selectedProvider
|
const revokeResponse = await selectedProvider
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
import { ForbiddenError, subject } from "@casl/ability";
|
import { ForbiddenError, subject } from "@casl/ability";
|
||||||
|
|
||||||
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
import { ActionProjectType } from "@app/db/schemas";
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import {
|
import {
|
||||||
ProjectPermissionDynamicSecretActions,
|
ProjectPermissionDynamicSecretActions,
|
||||||
ProjectPermissionSub
|
ProjectPermissionSub
|
||||||
} from "@app/ee/services/permission/project-permission";
|
} from "@app/ee/services/permission/project-permission";
|
||||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||||
|
|
||||||
@ -42,6 +43,7 @@ type TDynamicSecretServiceFactoryDep = {
|
|||||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
||||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
|
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
|
||||||
@ -54,7 +56,8 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
dynamicSecretProviders,
|
dynamicSecretProviders,
|
||||||
permissionService,
|
permissionService,
|
||||||
dynamicSecretQueueService,
|
dynamicSecretQueueService,
|
||||||
projectDAL
|
projectDAL,
|
||||||
|
kmsService
|
||||||
}: TDynamicSecretServiceFactoryDep) => {
|
}: TDynamicSecretServiceFactoryDep) => {
|
||||||
const create = async ({
|
const create = async ({
|
||||||
path,
|
path,
|
||||||
@ -108,16 +111,15 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||||
|
|
||||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
|
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
const dynamicSecretCfg = await dynamicSecretDAL.create({
|
const dynamicSecretCfg = await dynamicSecretDAL.create({
|
||||||
type: provider.type,
|
type: provider.type,
|
||||||
version: 1,
|
version: 1,
|
||||||
inputIV: encryptedInput.iv,
|
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob,
|
||||||
inputTag: encryptedInput.tag,
|
|
||||||
inputCiphertext: encryptedInput.ciphertext,
|
|
||||||
algorithm: encryptedInput.algorithm,
|
|
||||||
keyEncoding: encryptedInput.encoding,
|
|
||||||
maxTTL,
|
maxTTL,
|
||||||
defaultTTL,
|
defaultTTL,
|
||||||
folderId: folder.id,
|
folderId: folder.id,
|
||||||
@ -180,15 +182,15 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
if (existingDynamicSecret)
|
if (existingDynamicSecret)
|
||||||
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
|
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
|
||||||
}
|
}
|
||||||
|
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||||
|
await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||||
const decryptedStoredInput = JSON.parse(
|
const decryptedStoredInput = JSON.parse(
|
||||||
infisicalSymmetricDecrypt({
|
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
|
||||||
tag: dynamicSecretCfg.inputTag,
|
|
||||||
iv: dynamicSecretCfg.inputIV
|
|
||||||
})
|
|
||||||
) as object;
|
) as object;
|
||||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||||
@ -196,13 +198,8 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||||
|
|
||||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
|
|
||||||
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
|
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
|
||||||
inputIV: encryptedInput.iv,
|
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(updatedInput)) }).cipherTextBlob,
|
||||||
inputTag: encryptedInput.tag,
|
|
||||||
inputCiphertext: encryptedInput.ciphertext,
|
|
||||||
algorithm: encryptedInput.algorithm,
|
|
||||||
keyEncoding: encryptedInput.encoding,
|
|
||||||
maxTTL,
|
maxTTL,
|
||||||
defaultTTL,
|
defaultTTL,
|
||||||
name: newName ?? name,
|
name: newName ?? name,
|
||||||
@ -315,13 +312,13 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
if (!dynamicSecretCfg) {
|
if (!dynamicSecretCfg) {
|
||||||
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
|
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
|
||||||
}
|
}
|
||||||
|
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
const decryptedStoredInput = JSON.parse(
|
const decryptedStoredInput = JSON.parse(
|
||||||
infisicalSymmetricDecrypt({
|
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
|
||||||
tag: dynamicSecretCfg.inputTag,
|
|
||||||
iv: dynamicSecretCfg.inputIV
|
|
||||||
})
|
|
||||||
) as object;
|
) as object;
|
||||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||||
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
|
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
|
||||||
|
@ -111,7 +111,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (search) {
|
if (search) {
|
||||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike '%${search}%'`);
|
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike ?`, [`%${search}%`]);
|
||||||
} else if (username) {
|
} else if (username) {
|
||||||
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ import { ForbiddenError } from "@casl/ability";
|
|||||||
import slugify from "@sindresorhus/slugify";
|
import slugify from "@sindresorhus/slugify";
|
||||||
|
|
||||||
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
|
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
|
||||||
|
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
@ -45,6 +46,7 @@ type TGroupServiceFactoryDep = {
|
|||||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
|
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
|
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TGroupServiceFactory = ReturnType<typeof groupServiceFactory>;
|
export type TGroupServiceFactory = ReturnType<typeof groupServiceFactory>;
|
||||||
@ -59,7 +61,8 @@ export const groupServiceFactory = ({
|
|||||||
projectBotDAL,
|
projectBotDAL,
|
||||||
projectKeyDAL,
|
projectKeyDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService
|
licenseService,
|
||||||
|
oidcConfigDAL
|
||||||
}: TGroupServiceFactoryDep) => {
|
}: TGroupServiceFactoryDep) => {
|
||||||
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
|
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
|
||||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||||
@ -311,6 +314,18 @@ export const groupServiceFactory = ({
|
|||||||
message: `Failed to find group with ID ${id}`
|
message: `Failed to find group with ID ${id}`
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const oidcConfig = await oidcConfigDAL.findOne({
|
||||||
|
orgId: group.orgId,
|
||||||
|
isActive: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (oidcConfig?.manageGroupMemberships) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message:
|
||||||
|
"Cannot add user to group: OIDC group membership mapping is enabled - user must be assigned to this group in your OIDC provider."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||||
|
|
||||||
// check if user has broader or equal to privileges than group
|
// check if user has broader or equal to privileges than group
|
||||||
@ -366,6 +381,18 @@ export const groupServiceFactory = ({
|
|||||||
message: `Failed to find group with ID ${id}`
|
message: `Failed to find group with ID ${id}`
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const oidcConfig = await oidcConfigDAL.findOne({
|
||||||
|
orgId: group.orgId,
|
||||||
|
isActive: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (oidcConfig?.manageGroupMemberships) {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message:
|
||||||
|
"Cannot remove user from group: OIDC group membership mapping is enabled - user must be removed from this group in your OIDC provider."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||||
|
|
||||||
// check if user has broader or equal to privileges than group
|
// check if user has broader or equal to privileges than group
|
||||||
|
@ -1,25 +1,23 @@
|
|||||||
import * as pkcs11js from "pkcs11js";
|
import * as pkcs11js from "pkcs11js";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { TEnvConfig } from "@app/lib/config/env";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
import { HsmModule } from "./hsm-types";
|
import { HsmModule } from "./hsm-types";
|
||||||
|
|
||||||
export const initializeHsmModule = () => {
|
export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured" | "HSM_LIB_PATH">) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
|
|
||||||
// Create a new instance of PKCS11 module
|
// Create a new instance of PKCS11 module
|
||||||
const pkcs11 = new pkcs11js.PKCS11();
|
const pkcs11 = new pkcs11js.PKCS11();
|
||||||
let isInitialized = false;
|
let isInitialized = false;
|
||||||
|
|
||||||
const initialize = () => {
|
const initialize = () => {
|
||||||
if (!appCfg.isHsmConfigured) {
|
if (!envConfig.isHsmConfigured) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Load the PKCS#11 module
|
// Load the PKCS#11 module
|
||||||
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
pkcs11.load(envConfig.HSM_LIB_PATH!);
|
||||||
|
|
||||||
// Initialize the module
|
// Initialize the module
|
||||||
pkcs11.C_Initialize();
|
pkcs11.C_Initialize();
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
import pkcs11js from "pkcs11js";
|
import pkcs11js from "pkcs11js";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { TEnvConfig } from "@app/lib/config/env";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
import { HsmKeyType, HsmModule } from "./hsm-types";
|
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||||
|
|
||||||
type THsmServiceFactoryDep = {
|
type THsmServiceFactoryDep = {
|
||||||
hsmModule: HsmModule;
|
hsmModule: HsmModule;
|
||||||
|
envConfig: Pick<TEnvConfig, "HSM_PIN" | "HSM_SLOT" | "HSM_LIB_PATH" | "HSM_KEY_LABEL" | "isHsmConfigured">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||||
@ -15,9 +16,7 @@ type SyncOrAsync<T> = T | Promise<T>;
|
|||||||
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||||
|
|
||||||
// eslint-disable-next-line no-empty-pattern
|
// eslint-disable-next-line no-empty-pattern
|
||||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envConfig }: THsmServiceFactoryDep) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
|
|
||||||
// Constants for buffer structures
|
// Constants for buffer structures
|
||||||
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||||
const BLOCK_SIZE = 16;
|
const BLOCK_SIZE = 16;
|
||||||
@ -63,11 +62,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
throw new Error("No slots available");
|
throw new Error("No slots available");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (appCfg.HSM_SLOT >= slots.length) {
|
if (envConfig.HSM_SLOT >= slots.length) {
|
||||||
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
throw new Error(`HSM slot ${envConfig.HSM_SLOT} not found or not initialized`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const slotId = slots[appCfg.HSM_SLOT];
|
const slotId = slots[envConfig.HSM_SLOT];
|
||||||
|
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
while (Date.now() - startTime < MAX_TIMEOUT) {
|
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||||
@ -78,7 +77,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
|
|
||||||
// Login
|
// Login
|
||||||
try {
|
try {
|
||||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, envConfig.HSM_PIN);
|
||||||
logger.info("HSM: Successfully authenticated");
|
logger.info("HSM: Successfully authenticated");
|
||||||
break;
|
break;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -86,7 +85,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||||
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||||
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${envConfig.HSM_SLOT}`);
|
||||||
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||||
}
|
}
|
||||||
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||||
@ -133,7 +132,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
};
|
};
|
||||||
|
|
||||||
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||||
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
const label = type === HsmKeyType.HMAC ? `${envConfig.HSM_KEY_LABEL}_HMAC` : envConfig.HSM_KEY_LABEL;
|
||||||
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||||
|
|
||||||
const template = [
|
const template = [
|
||||||
@ -360,7 +359,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
};
|
};
|
||||||
|
|
||||||
const isActive = async () => {
|
const isActive = async () => {
|
||||||
if (!isInitialized || !appCfg.isHsmConfigured) {
|
if (!isInitialized || !envConfig.isHsmConfigured) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -372,11 +371,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
logger.error(err, "HSM: Error testing PKCS#11 module");
|
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||||
}
|
}
|
||||||
|
|
||||||
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
return envConfig.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||||
};
|
};
|
||||||
|
|
||||||
const startService = async () => {
|
const startService = async () => {
|
||||||
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
if (!envConfig.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await $withSession(async (sessionHandle) => {
|
await $withSession(async (sessionHandle) => {
|
||||||
@ -395,7 +394,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||||
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||||
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
{ type: pkcs11js.CKA_LABEL, value: envConfig.HSM_KEY_LABEL! },
|
||||||
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||||
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||||
...genericAttributes
|
...genericAttributes
|
||||||
@ -410,7 +409,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
keyTemplate
|
keyTemplate
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
logger.info(`HSM: Master key created successfully with label: ${envConfig.HSM_KEY_LABEL}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if HMAC key exists, create if not
|
// Check if HMAC key exists, create if not
|
||||||
@ -419,7 +418,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||||
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||||
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
{ type: pkcs11js.CKA_LABEL, value: `${envConfig.HSM_KEY_LABEL!}_HMAC` },
|
||||||
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||||
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||||
...genericAttributes
|
...genericAttributes
|
||||||
@ -434,7 +433,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
|||||||
hmacKeyTemplate
|
hmacKeyTemplate
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
logger.info(`HSM: HMAC key created successfully with label: ${envConfig.HSM_KEY_LABEL}_HMAC`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get slot info to check supported mechanisms
|
// Get slot info to check supported mechanisms
|
||||||
|
@ -5,7 +5,7 @@ import ms from "ms";
|
|||||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||||
import { ActorType } from "@app/services/auth/auth-type";
|
import { ActorType } from "@app/services/auth/auth-type";
|
||||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||||
|
@ -5,7 +5,7 @@ import ms from "ms";
|
|||||||
import { ActionProjectType } from "@app/db/schemas";
|
import { ActionProjectType } from "@app/db/schemas";
|
||||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||||
import { ActorType } from "@app/services/auth/auth-type";
|
import { ActorType } from "@app/services/auth/auth-type";
|
||||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||||
|
@ -1,25 +1,18 @@
|
|||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError } from "@casl/ability";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
|
|
||||||
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import {
|
|
||||||
decryptSymmetric,
|
|
||||||
encryptSymmetric,
|
|
||||||
generateAsymmetricKeyPair,
|
|
||||||
generateSymmetricKey,
|
|
||||||
infisicalSymmetricDecrypt,
|
|
||||||
infisicalSymmetricEncypt
|
|
||||||
} from "@app/lib/crypto/encryption";
|
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||||
@ -59,7 +52,6 @@ type TLdapConfigServiceFactoryDep = {
|
|||||||
TOrgDALFactory,
|
TOrgDALFactory,
|
||||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||||
>;
|
>;
|
||||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
|
||||||
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
|
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
|
||||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||||
@ -84,6 +76,7 @@ type TLdapConfigServiceFactoryDep = {
|
|||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||||
smtpService: Pick<TSmtpService, "sendMail">;
|
smtpService: Pick<TSmtpService, "sendMail">;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
||||||
@ -93,7 +86,6 @@ export const ldapConfigServiceFactory = ({
|
|||||||
ldapGroupMapDAL,
|
ldapGroupMapDAL,
|
||||||
orgDAL,
|
orgDAL,
|
||||||
orgMembershipDAL,
|
orgMembershipDAL,
|
||||||
orgBotDAL,
|
|
||||||
groupDAL,
|
groupDAL,
|
||||||
groupProjectDAL,
|
groupProjectDAL,
|
||||||
projectKeyDAL,
|
projectKeyDAL,
|
||||||
@ -105,7 +97,8 @@ export const ldapConfigServiceFactory = ({
|
|||||||
permissionService,
|
permissionService,
|
||||||
licenseService,
|
licenseService,
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService
|
smtpService,
|
||||||
|
kmsService
|
||||||
}: TLdapConfigServiceFactoryDep) => {
|
}: TLdapConfigServiceFactoryDep) => {
|
||||||
const createLdapCfg = async ({
|
const createLdapCfg = async ({
|
||||||
actor,
|
actor,
|
||||||
@ -133,77 +126,23 @@ export const ldapConfigServiceFactory = ({
|
|||||||
message:
|
message:
|
||||||
"Failed to create LDAP configuration due to plan restriction. Upgrade plan to create LDAP configuration."
|
"Failed to create LDAP configuration due to plan restriction. Upgrade plan to create LDAP configuration."
|
||||||
});
|
});
|
||||||
|
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
type: KmsDataKey.Organization,
|
||||||
const doc = await orgBotDAL.findOne({ orgId }, tx);
|
orgId
|
||||||
if (doc) return doc;
|
|
||||||
|
|
||||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
|
||||||
const key = generateSymmetricKey();
|
|
||||||
const {
|
|
||||||
ciphertext: encryptedPrivateKey,
|
|
||||||
iv: privateKeyIV,
|
|
||||||
tag: privateKeyTag,
|
|
||||||
encoding: privateKeyKeyEncoding,
|
|
||||||
algorithm: privateKeyAlgorithm
|
|
||||||
} = infisicalSymmetricEncypt(privateKey);
|
|
||||||
const {
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
encoding: symmetricKeyKeyEncoding,
|
|
||||||
algorithm: symmetricKeyAlgorithm
|
|
||||||
} = infisicalSymmetricEncypt(key);
|
|
||||||
|
|
||||||
return orgBotDAL.create(
|
|
||||||
{
|
|
||||||
name: "Infisical org bot",
|
|
||||||
publicKey,
|
|
||||||
privateKeyIV,
|
|
||||||
encryptedPrivateKey,
|
|
||||||
symmetricKeyIV,
|
|
||||||
symmetricKeyTag,
|
|
||||||
encryptedSymmetricKey,
|
|
||||||
symmetricKeyAlgorithm,
|
|
||||||
orgId,
|
|
||||||
privateKeyTag,
|
|
||||||
privateKeyAlgorithm,
|
|
||||||
privateKeyKeyEncoding,
|
|
||||||
symmetricKeyKeyEncoding
|
|
||||||
},
|
|
||||||
tx
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
|
||||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
|
||||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
|
||||||
|
|
||||||
const ldapConfig = await ldapConfigDAL.create({
|
const ldapConfig = await ldapConfigDAL.create({
|
||||||
orgId,
|
orgId,
|
||||||
isActive,
|
isActive,
|
||||||
url,
|
url,
|
||||||
encryptedBindDN,
|
|
||||||
bindDNIV,
|
|
||||||
bindDNTag,
|
|
||||||
encryptedBindPass,
|
|
||||||
bindPassIV,
|
|
||||||
bindPassTag,
|
|
||||||
uniqueUserAttribute,
|
uniqueUserAttribute,
|
||||||
searchBase,
|
searchBase,
|
||||||
searchFilter,
|
searchFilter,
|
||||||
groupSearchBase,
|
groupSearchBase,
|
||||||
groupSearchFilter,
|
groupSearchFilter,
|
||||||
encryptedCACert,
|
encryptedLdapCaCertificate: encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob,
|
||||||
caCertIV,
|
encryptedLdapBindDN: encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob,
|
||||||
caCertTag
|
encryptedLdapBindPass: encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob
|
||||||
});
|
});
|
||||||
|
|
||||||
return ldapConfig;
|
return ldapConfig;
|
||||||
@ -246,38 +185,21 @@ export const ldapConfigServiceFactory = ({
|
|||||||
uniqueUserAttribute
|
uniqueUserAttribute
|
||||||
};
|
};
|
||||||
|
|
||||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
if (!orgBot)
|
type: KmsDataKey.Organization,
|
||||||
throw new NotFoundError({
|
orgId
|
||||||
message: `Organization bot in organization with ID '${orgId}' not found`,
|
|
||||||
name: "OrgBotNotFound"
|
|
||||||
});
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (bindDN !== undefined) {
|
if (bindDN !== undefined) {
|
||||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
updateQuery.encryptedLdapBindDN = encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob;
|
||||||
updateQuery.encryptedBindDN = encryptedBindDN;
|
|
||||||
updateQuery.bindDNIV = bindDNIV;
|
|
||||||
updateQuery.bindDNTag = bindDNTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (bindPass !== undefined) {
|
if (bindPass !== undefined) {
|
||||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
updateQuery.encryptedLdapBindPass = encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob;
|
||||||
updateQuery.encryptedBindPass = encryptedBindPass;
|
|
||||||
updateQuery.bindPassIV = bindPassIV;
|
|
||||||
updateQuery.bindPassTag = bindPassTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (caCert !== undefined) {
|
if (caCert !== undefined) {
|
||||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||||
updateQuery.encryptedCACert = encryptedCACert;
|
|
||||||
updateQuery.caCertIV = caCertIV;
|
|
||||||
updateQuery.caCertTag = caCertTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
||||||
@ -293,61 +215,24 @@ export const ldapConfigServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const orgBot = await orgBotDAL.findOne({ orgId: ldapConfig.orgId });
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
if (!orgBot) {
|
type: KmsDataKey.Organization,
|
||||||
throw new NotFoundError({
|
orgId: ldapConfig.orgId
|
||||||
message: `Organization bot not found in organization with ID ${ldapConfig.orgId}`,
|
|
||||||
name: "OrgBotNotFound"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const {
|
|
||||||
encryptedBindDN,
|
|
||||||
bindDNIV,
|
|
||||||
bindDNTag,
|
|
||||||
encryptedBindPass,
|
|
||||||
bindPassIV,
|
|
||||||
bindPassTag,
|
|
||||||
encryptedCACert,
|
|
||||||
caCertIV,
|
|
||||||
caCertTag
|
|
||||||
} = ldapConfig;
|
|
||||||
|
|
||||||
let bindDN = "";
|
let bindDN = "";
|
||||||
if (encryptedBindDN && bindDNIV && bindDNTag) {
|
if (ldapConfig.encryptedLdapBindDN) {
|
||||||
bindDN = decryptSymmetric({
|
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||||
ciphertext: encryptedBindDN,
|
|
||||||
key,
|
|
||||||
tag: bindDNTag,
|
|
||||||
iv: bindDNIV
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let bindPass = "";
|
let bindPass = "";
|
||||||
if (encryptedBindPass && bindPassIV && bindPassTag) {
|
if (ldapConfig.encryptedLdapBindPass) {
|
||||||
bindPass = decryptSymmetric({
|
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||||
ciphertext: encryptedBindPass,
|
|
||||||
key,
|
|
||||||
tag: bindPassTag,
|
|
||||||
iv: bindPassIV
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let caCert = "";
|
let caCert = "";
|
||||||
if (encryptedCACert && caCertIV && caCertTag) {
|
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||||
caCert = decryptSymmetric({
|
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||||
ciphertext: encryptedCACert,
|
|
||||||
key,
|
|
||||||
tag: caCertTag,
|
|
||||||
iv: caCertIV
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -3,28 +3,31 @@ import { ForbiddenError } from "@casl/ability";
|
|||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
|
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
|
||||||
|
|
||||||
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
import { OrgMembershipStatus, TableName, TUsers } from "@app/db/schemas";
|
||||||
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
|
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
|
||||||
|
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||||
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||||
|
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||||
|
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import {
|
|
||||||
decryptSymmetric,
|
|
||||||
encryptSymmetric,
|
|
||||||
generateAsymmetricKeyPair,
|
|
||||||
generateSymmetricKey,
|
|
||||||
infisicalSymmetricDecrypt,
|
|
||||||
infisicalSymmetricEncypt
|
|
||||||
} from "@app/lib/crypto/encryption";
|
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
|
import { ActorType, AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||||
|
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||||
|
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||||
|
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||||
@ -45,7 +48,14 @@ import {
|
|||||||
type TOidcConfigServiceFactoryDep = {
|
type TOidcConfigServiceFactoryDep = {
|
||||||
userDAL: Pick<
|
userDAL: Pick<
|
||||||
TUserDALFactory,
|
TUserDALFactory,
|
||||||
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
|
| "create"
|
||||||
|
| "findOne"
|
||||||
|
| "updateById"
|
||||||
|
| "findById"
|
||||||
|
| "findUserEncKeyByUserId"
|
||||||
|
| "findUserEncKeyByUserIdsBatch"
|
||||||
|
| "find"
|
||||||
|
| "transaction"
|
||||||
>;
|
>;
|
||||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||||
orgDAL: Pick<
|
orgDAL: Pick<
|
||||||
@ -53,12 +63,27 @@ type TOidcConfigServiceFactoryDep = {
|
|||||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||||
>;
|
>;
|
||||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getUserOrgPermission">;
|
||||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||||
|
groupDAL: Pick<TGroupDALFactory, "findByOrgId">;
|
||||||
|
userGroupMembershipDAL: Pick<
|
||||||
|
TUserGroupMembershipDALFactory,
|
||||||
|
| "find"
|
||||||
|
| "transaction"
|
||||||
|
| "insertMany"
|
||||||
|
| "findGroupMembershipsByUserIdInOrg"
|
||||||
|
| "delete"
|
||||||
|
| "filterProjectsByUserMembership"
|
||||||
|
>;
|
||||||
|
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||||
|
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||||
|
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||||
|
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||||
|
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
|
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
|
||||||
@ -71,9 +96,16 @@ export const oidcConfigServiceFactory = ({
|
|||||||
licenseService,
|
licenseService,
|
||||||
permissionService,
|
permissionService,
|
||||||
tokenService,
|
tokenService,
|
||||||
orgBotDAL,
|
|
||||||
smtpService,
|
smtpService,
|
||||||
oidcConfigDAL
|
oidcConfigDAL,
|
||||||
|
userGroupMembershipDAL,
|
||||||
|
groupDAL,
|
||||||
|
groupProjectDAL,
|
||||||
|
projectKeyDAL,
|
||||||
|
projectDAL,
|
||||||
|
projectBotDAL,
|
||||||
|
auditLogService,
|
||||||
|
kmsService
|
||||||
}: TOidcConfigServiceFactoryDep) => {
|
}: TOidcConfigServiceFactoryDep) => {
|
||||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||||
@ -104,43 +136,19 @@ export const oidcConfigServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// decrypt and return cfg
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
|
type: KmsDataKey.Organization,
|
||||||
if (!orgBot) {
|
orgId: oidcCfg.orgId
|
||||||
throw new NotFoundError({
|
|
||||||
message: `Organization bot for organization with ID '${oidcCfg.orgId}' not found`,
|
|
||||||
name: "OrgBotNotFound"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
|
|
||||||
oidcCfg;
|
|
||||||
|
|
||||||
let clientId = "";
|
let clientId = "";
|
||||||
if (encryptedClientId && clientIdIV && clientIdTag) {
|
if (oidcCfg.encryptedOidcClientId) {
|
||||||
clientId = decryptSymmetric({
|
clientId = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientId }).toString();
|
||||||
ciphertext: encryptedClientId,
|
|
||||||
key,
|
|
||||||
tag: clientIdTag,
|
|
||||||
iv: clientIdIV
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let clientSecret = "";
|
let clientSecret = "";
|
||||||
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
|
if (oidcCfg.encryptedOidcClientSecret) {
|
||||||
clientSecret = decryptSymmetric({
|
clientSecret = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientSecret }).toString();
|
||||||
key,
|
|
||||||
tag: clientSecretTag,
|
|
||||||
iv: clientSecretIV,
|
|
||||||
ciphertext: encryptedClientSecret
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -156,11 +164,21 @@ export const oidcConfigServiceFactory = ({
|
|||||||
isActive: oidcCfg.isActive,
|
isActive: oidcCfg.isActive,
|
||||||
allowedEmailDomains: oidcCfg.allowedEmailDomains,
|
allowedEmailDomains: oidcCfg.allowedEmailDomains,
|
||||||
clientId,
|
clientId,
|
||||||
clientSecret
|
clientSecret,
|
||||||
|
manageGroupMemberships: oidcCfg.manageGroupMemberships
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
|
const oidcLogin = async ({
|
||||||
|
externalId,
|
||||||
|
email,
|
||||||
|
firstName,
|
||||||
|
lastName,
|
||||||
|
orgId,
|
||||||
|
callbackPort,
|
||||||
|
groups = [],
|
||||||
|
manageGroupMemberships
|
||||||
|
}: TOidcLoginDTO) => {
|
||||||
const serverCfg = await getServerCfg();
|
const serverCfg = await getServerCfg();
|
||||||
|
|
||||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
|
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
|
||||||
@ -315,6 +333,83 @@ export const oidcConfigServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (manageGroupMemberships) {
|
||||||
|
const userGroups = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(user.id, orgId);
|
||||||
|
const orgGroups = await groupDAL.findByOrgId(orgId);
|
||||||
|
|
||||||
|
const userGroupsNames = userGroups.map((membership) => membership.groupName);
|
||||||
|
const missingGroupsMemberships = groups.filter((groupName) => !userGroupsNames.includes(groupName));
|
||||||
|
const groupsToAddUserTo = orgGroups.filter((group) => missingGroupsMemberships.includes(group.name));
|
||||||
|
|
||||||
|
for await (const group of groupsToAddUserTo) {
|
||||||
|
await addUsersToGroupByUserIds({
|
||||||
|
userIds: [user.id],
|
||||||
|
group,
|
||||||
|
userDAL,
|
||||||
|
userGroupMembershipDAL,
|
||||||
|
orgDAL,
|
||||||
|
groupProjectDAL,
|
||||||
|
projectKeyDAL,
|
||||||
|
projectDAL,
|
||||||
|
projectBotDAL
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (groupsToAddUserTo.length) {
|
||||||
|
await auditLogService.createAuditLog({
|
||||||
|
actor: {
|
||||||
|
type: ActorType.PLATFORM,
|
||||||
|
metadata: {}
|
||||||
|
},
|
||||||
|
orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER,
|
||||||
|
metadata: {
|
||||||
|
userId: user.id,
|
||||||
|
userEmail: user.email ?? user.username,
|
||||||
|
assignedToGroups: groupsToAddUserTo.map(({ id, name }) => ({ id, name })),
|
||||||
|
userGroupsClaim: groups
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const membershipsToRemove = userGroups
|
||||||
|
.filter((membership) => !groups.includes(membership.groupName))
|
||||||
|
.map((membership) => membership.groupId);
|
||||||
|
const groupsToRemoveUserFrom = orgGroups.filter((group) => membershipsToRemove.includes(group.id));
|
||||||
|
|
||||||
|
for await (const group of groupsToRemoveUserFrom) {
|
||||||
|
await removeUsersFromGroupByUserIds({
|
||||||
|
userIds: [user.id],
|
||||||
|
group,
|
||||||
|
userDAL,
|
||||||
|
userGroupMembershipDAL,
|
||||||
|
groupProjectDAL,
|
||||||
|
projectKeyDAL
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (groupsToRemoveUserFrom.length) {
|
||||||
|
await auditLogService.createAuditLog({
|
||||||
|
actor: {
|
||||||
|
type: ActorType.PLATFORM,
|
||||||
|
metadata: {}
|
||||||
|
},
|
||||||
|
orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER,
|
||||||
|
metadata: {
|
||||||
|
userId: user.id,
|
||||||
|
userEmail: user.email ?? user.username,
|
||||||
|
removedFromGroups: groupsToRemoveUserFrom.map(({ id, name }) => ({ id, name })),
|
||||||
|
userGroupsClaim: groups
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||||
|
|
||||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||||
@ -385,7 +480,8 @@ export const oidcConfigServiceFactory = ({
|
|||||||
tokenEndpoint,
|
tokenEndpoint,
|
||||||
userinfoEndpoint,
|
userinfoEndpoint,
|
||||||
clientId,
|
clientId,
|
||||||
clientSecret
|
clientSecret,
|
||||||
|
manageGroupMemberships
|
||||||
}: TUpdateOidcCfgDTO) => {
|
}: TUpdateOidcCfgDTO) => {
|
||||||
const org = await orgDAL.findOne({
|
const org = await orgDAL.findOne({
|
||||||
slug: orgSlug
|
slug: orgSlug
|
||||||
@ -413,12 +509,10 @@ export const oidcConfigServiceFactory = ({
|
|||||||
);
|
);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
|
||||||
|
|
||||||
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
|
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
if (!orgBot)
|
type: KmsDataKey.Organization,
|
||||||
throw new NotFoundError({
|
orgId: org.id
|
||||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
});
|
||||||
name: "OrgBotNotFound"
|
|
||||||
});
|
|
||||||
|
|
||||||
const serverCfg = await getServerCfg();
|
const serverCfg = await getServerCfg();
|
||||||
if (isActive && !serverCfg.trustOidcEmails) {
|
if (isActive && !serverCfg.trustOidcEmails) {
|
||||||
@ -431,13 +525,6 @@ export const oidcConfigServiceFactory = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const updateQuery: TOidcConfigsUpdate = {
|
const updateQuery: TOidcConfigsUpdate = {
|
||||||
allowedEmailDomains,
|
allowedEmailDomains,
|
||||||
configurationType,
|
configurationType,
|
||||||
@ -448,26 +535,16 @@ export const oidcConfigServiceFactory = ({
|
|||||||
userinfoEndpoint,
|
userinfoEndpoint,
|
||||||
jwksUri,
|
jwksUri,
|
||||||
isActive,
|
isActive,
|
||||||
lastUsed: null
|
lastUsed: null,
|
||||||
|
manageGroupMemberships
|
||||||
};
|
};
|
||||||
|
|
||||||
if (clientId !== undefined) {
|
if (clientId !== undefined) {
|
||||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
updateQuery.encryptedOidcClientId = encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob;
|
||||||
updateQuery.encryptedClientId = encryptedClientId;
|
|
||||||
updateQuery.clientIdIV = clientIdIV;
|
|
||||||
updateQuery.clientIdTag = clientIdTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (clientSecret !== undefined) {
|
if (clientSecret !== undefined) {
|
||||||
const {
|
updateQuery.encryptedOidcClientSecret = encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob;
|
||||||
ciphertext: encryptedClientSecret,
|
|
||||||
iv: clientSecretIV,
|
|
||||||
tag: clientSecretTag
|
|
||||||
} = encryptSymmetric(clientSecret, key);
|
|
||||||
|
|
||||||
updateQuery.encryptedClientSecret = encryptedClientSecret;
|
|
||||||
updateQuery.clientSecretIV = clientSecretIV;
|
|
||||||
updateQuery.clientSecretTag = clientSecretTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
|
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
|
||||||
@ -491,7 +568,8 @@ export const oidcConfigServiceFactory = ({
|
|||||||
tokenEndpoint,
|
tokenEndpoint,
|
||||||
userinfoEndpoint,
|
userinfoEndpoint,
|
||||||
clientId,
|
clientId,
|
||||||
clientSecret
|
clientSecret,
|
||||||
|
manageGroupMemberships
|
||||||
}: TCreateOidcCfgDTO) => {
|
}: TCreateOidcCfgDTO) => {
|
||||||
const org = await orgDAL.findOne({
|
const org = await orgDAL.findOne({
|
||||||
slug: orgSlug
|
slug: orgSlug
|
||||||
@ -518,61 +596,11 @@ export const oidcConfigServiceFactory = ({
|
|||||||
);
|
);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
|
||||||
|
|
||||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
|
type: KmsDataKey.Organization,
|
||||||
if (doc) return doc;
|
orgId: org.id
|
||||||
|
|
||||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
|
||||||
const key = generateSymmetricKey();
|
|
||||||
const {
|
|
||||||
ciphertext: encryptedPrivateKey,
|
|
||||||
iv: privateKeyIV,
|
|
||||||
tag: privateKeyTag,
|
|
||||||
encoding: privateKeyKeyEncoding,
|
|
||||||
algorithm: privateKeyAlgorithm
|
|
||||||
} = infisicalSymmetricEncypt(privateKey);
|
|
||||||
const {
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
encoding: symmetricKeyKeyEncoding,
|
|
||||||
algorithm: symmetricKeyAlgorithm
|
|
||||||
} = infisicalSymmetricEncypt(key);
|
|
||||||
|
|
||||||
return orgBotDAL.create(
|
|
||||||
{
|
|
||||||
name: "Infisical org bot",
|
|
||||||
publicKey,
|
|
||||||
privateKeyIV,
|
|
||||||
encryptedPrivateKey,
|
|
||||||
symmetricKeyIV,
|
|
||||||
symmetricKeyTag,
|
|
||||||
encryptedSymmetricKey,
|
|
||||||
symmetricKeyAlgorithm,
|
|
||||||
orgId: org.id,
|
|
||||||
privateKeyTag,
|
|
||||||
privateKeyAlgorithm,
|
|
||||||
privateKeyKeyEncoding,
|
|
||||||
symmetricKeyKeyEncoding
|
|
||||||
},
|
|
||||||
tx
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
|
||||||
const {
|
|
||||||
ciphertext: encryptedClientSecret,
|
|
||||||
iv: clientSecretIV,
|
|
||||||
tag: clientSecretTag
|
|
||||||
} = encryptSymmetric(clientSecret, key);
|
|
||||||
|
|
||||||
const oidcCfg = await oidcConfigDAL.create({
|
const oidcCfg = await oidcConfigDAL.create({
|
||||||
issuer,
|
issuer,
|
||||||
isActive,
|
isActive,
|
||||||
@ -584,12 +612,9 @@ export const oidcConfigServiceFactory = ({
|
|||||||
tokenEndpoint,
|
tokenEndpoint,
|
||||||
userinfoEndpoint,
|
userinfoEndpoint,
|
||||||
orgId: org.id,
|
orgId: org.id,
|
||||||
encryptedClientId,
|
manageGroupMemberships,
|
||||||
clientIdIV,
|
encryptedOidcClientId: encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob,
|
||||||
clientIdTag,
|
encryptedOidcClientSecret: encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob
|
||||||
encryptedClientSecret,
|
|
||||||
clientSecretIV,
|
|
||||||
clientSecretTag
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return oidcCfg;
|
return oidcCfg;
|
||||||
@ -683,7 +708,9 @@ export const oidcConfigServiceFactory = ({
|
|||||||
firstName: claims.given_name ?? "",
|
firstName: claims.given_name ?? "",
|
||||||
lastName: claims.family_name ?? "",
|
lastName: claims.family_name ?? "",
|
||||||
orgId: org.id,
|
orgId: org.id,
|
||||||
callbackPort
|
groups: claims.groups as string[] | undefined,
|
||||||
|
callbackPort,
|
||||||
|
manageGroupMemberships: oidcCfg.manageGroupMemberships
|
||||||
})
|
})
|
||||||
.then(({ isUserCompleted, providerAuthToken }) => {
|
.then(({ isUserCompleted, providerAuthToken }) => {
|
||||||
cb(null, { isUserCompleted, providerAuthToken });
|
cb(null, { isUserCompleted, providerAuthToken });
|
||||||
@ -697,5 +724,16 @@ export const oidcConfigServiceFactory = ({
|
|||||||
return strategy;
|
return strategy;
|
||||||
};
|
};
|
||||||
|
|
||||||
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg };
|
const isOidcManageGroupMembershipsEnabled = async (orgId: string, actor: OrgServiceActor) => {
|
||||||
|
await permissionService.getUserOrgPermission(actor.id, orgId, actor.authMethod, actor.orgId);
|
||||||
|
|
||||||
|
const oidcConfig = await oidcConfigDAL.findOne({
|
||||||
|
orgId,
|
||||||
|
isActive: true
|
||||||
|
});
|
||||||
|
|
||||||
|
return Boolean(oidcConfig?.manageGroupMemberships);
|
||||||
|
};
|
||||||
|
|
||||||
|
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg, isOidcManageGroupMembershipsEnabled };
|
||||||
};
|
};
|
||||||
|
@ -12,6 +12,8 @@ export type TOidcLoginDTO = {
|
|||||||
lastName?: string;
|
lastName?: string;
|
||||||
orgId: string;
|
orgId: string;
|
||||||
callbackPort?: string;
|
callbackPort?: string;
|
||||||
|
groups?: string[];
|
||||||
|
manageGroupMemberships?: boolean | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TGetOidcCfgDTO =
|
export type TGetOidcCfgDTO =
|
||||||
@ -37,6 +39,7 @@ export type TCreateOidcCfgDTO = {
|
|||||||
clientSecret: string;
|
clientSecret: string;
|
||||||
isActive: boolean;
|
isActive: boolean;
|
||||||
orgSlug: string;
|
orgSlug: string;
|
||||||
|
manageGroupMemberships: boolean;
|
||||||
} & TGenericPermission;
|
} & TGenericPermission;
|
||||||
|
|
||||||
export type TUpdateOidcCfgDTO = Partial<{
|
export type TUpdateOidcCfgDTO = Partial<{
|
||||||
@ -52,5 +55,6 @@ export type TUpdateOidcCfgDTO = Partial<{
|
|||||||
clientSecret: string;
|
clientSecret: string;
|
||||||
isActive: boolean;
|
isActive: boolean;
|
||||||
orgSlug: string;
|
orgSlug: string;
|
||||||
|
manageGroupMemberships: boolean;
|
||||||
}> &
|
}> &
|
||||||
TGenericPermission;
|
TGenericPermission;
|
||||||
|
@ -6,7 +6,7 @@ import {
|
|||||||
CASL_ACTION_SCHEMA_NATIVE_ENUM
|
CASL_ACTION_SCHEMA_NATIVE_ENUM
|
||||||
} from "@app/ee/services/permission/permission-schemas";
|
} from "@app/ee/services/permission/permission-schemas";
|
||||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||||
|
|
||||||
import { PermissionConditionSchema } from "./permission-types";
|
import { PermissionConditionSchema } from "./permission-types";
|
||||||
|
|
||||||
@ -163,6 +163,27 @@ export type ProjectPermissionSet =
|
|||||||
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
|
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
|
||||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
|
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
|
||||||
|
|
||||||
|
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
|
||||||
|
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
|
||||||
|
z.string().refine((val) => val.startsWith("/"), SECRET_PATH_MISSING_SLASH_ERR_MSG),
|
||||||
|
z
|
||||||
|
.object({
|
||||||
|
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ].refine(
|
||||||
|
(val) => val.startsWith("/"),
|
||||||
|
SECRET_PATH_MISSING_SLASH_ERR_MSG
|
||||||
|
),
|
||||||
|
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ].refine(
|
||||||
|
(val) => val.startsWith("/"),
|
||||||
|
SECRET_PATH_MISSING_SLASH_ERR_MSG
|
||||||
|
),
|
||||||
|
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN].refine(
|
||||||
|
(val) => val.every((el) => el.startsWith("/")),
|
||||||
|
SECRET_PATH_MISSING_SLASH_ERR_MSG
|
||||||
|
),
|
||||||
|
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||||
|
})
|
||||||
|
.partial()
|
||||||
|
]);
|
||||||
// akhilmhdh: don't modify this for v2
|
// akhilmhdh: don't modify this for v2
|
||||||
// if you want to update create a new schema
|
// if you want to update create a new schema
|
||||||
const SecretConditionV1Schema = z
|
const SecretConditionV1Schema = z
|
||||||
@ -177,17 +198,7 @@ const SecretConditionV1Schema = z
|
|||||||
})
|
})
|
||||||
.partial()
|
.partial()
|
||||||
]),
|
]),
|
||||||
secretPath: z.union([
|
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
|
||||||
z.string(),
|
|
||||||
z
|
|
||||||
.object({
|
|
||||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
|
||||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
|
||||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
|
||||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
|
||||||
})
|
|
||||||
.partial()
|
|
||||||
])
|
|
||||||
})
|
})
|
||||||
.partial();
|
.partial();
|
||||||
|
|
||||||
@ -204,17 +215,7 @@ const SecretConditionV2Schema = z
|
|||||||
})
|
})
|
||||||
.partial()
|
.partial()
|
||||||
]),
|
]),
|
||||||
secretPath: z.union([
|
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA,
|
||||||
z.string(),
|
|
||||||
z
|
|
||||||
.object({
|
|
||||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
|
||||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
|
||||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
|
||||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
|
||||||
})
|
|
||||||
.partial()
|
|
||||||
]),
|
|
||||||
secretName: z.union([
|
secretName: z.union([
|
||||||
z.string(),
|
z.string(),
|
||||||
z
|
z
|
||||||
|
@ -15,7 +15,7 @@ import {
|
|||||||
} from "@app/ee/services/project-template/project-template-types";
|
} from "@app/ee/services/project-template/project-template-types";
|
||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { OrgServiceActor } from "@app/lib/types";
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||||
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
||||||
|
|
||||||
import { TProjectTemplateDALFactory } from "./project-template-dal";
|
import { TProjectTemplateDALFactory } from "./project-template-dal";
|
||||||
|
@ -2,7 +2,7 @@ import { z } from "zod";
|
|||||||
|
|
||||||
import { TProjectEnvironments } from "@app/db/schemas";
|
import { TProjectEnvironments } from "@app/db/schemas";
|
||||||
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||||
|
|
||||||
export type TProjectTemplateEnvironment = Pick<TProjectEnvironments, "name" | "slug" | "position">;
|
export type TProjectTemplateEnvironment = Pick<TProjectEnvironments, "name" | "slug" | "position">;
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ import ms from "ms";
|
|||||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||||
import { ActorType } from "@app/services/auth/auth-type";
|
import { ActorType } from "@app/services/auth/auth-type";
|
||||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||||
|
|
||||||
|
@ -1,29 +1,15 @@
|
|||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError } from "@casl/ability";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
|
|
||||||
import {
|
import { OrgMembershipStatus, TableName, TSamlConfigs, TSamlConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||||
OrgMembershipStatus,
|
|
||||||
SecretKeyEncoding,
|
|
||||||
TableName,
|
|
||||||
TSamlConfigs,
|
|
||||||
TSamlConfigsUpdate,
|
|
||||||
TUsers
|
|
||||||
} from "@app/db/schemas";
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import {
|
|
||||||
decryptSymmetric,
|
|
||||||
encryptSymmetric,
|
|
||||||
generateAsymmetricKeyPair,
|
|
||||||
generateSymmetricKey,
|
|
||||||
infisicalSymmetricDecrypt,
|
|
||||||
infisicalSymmetricEncypt
|
|
||||||
} from "@app/lib/crypto/encryption";
|
|
||||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||||
import { TIdentityMetadataDALFactory } from "@app/services/identity/identity-metadata-dal";
|
import { TIdentityMetadataDALFactory } from "@app/services/identity/identity-metadata-dal";
|
||||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||||
@ -52,21 +38,19 @@ type TSamlConfigServiceFactoryDep = {
|
|||||||
TOrgDALFactory,
|
TOrgDALFactory,
|
||||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||||
>;
|
>;
|
||||||
|
|
||||||
identityMetadataDAL: Pick<TIdentityMetadataDALFactory, "delete" | "insertMany" | "transaction">;
|
identityMetadataDAL: Pick<TIdentityMetadataDALFactory, "delete" | "insertMany" | "transaction">;
|
||||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||||
smtpService: Pick<TSmtpService, "sendMail">;
|
smtpService: Pick<TSmtpService, "sendMail">;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TSamlConfigServiceFactory = ReturnType<typeof samlConfigServiceFactory>;
|
export type TSamlConfigServiceFactory = ReturnType<typeof samlConfigServiceFactory>;
|
||||||
|
|
||||||
export const samlConfigServiceFactory = ({
|
export const samlConfigServiceFactory = ({
|
||||||
samlConfigDAL,
|
samlConfigDAL,
|
||||||
orgBotDAL,
|
|
||||||
orgDAL,
|
orgDAL,
|
||||||
orgMembershipDAL,
|
orgMembershipDAL,
|
||||||
userDAL,
|
userDAL,
|
||||||
@ -75,7 +59,8 @@ export const samlConfigServiceFactory = ({
|
|||||||
licenseService,
|
licenseService,
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService,
|
smtpService,
|
||||||
identityMetadataDAL
|
identityMetadataDAL,
|
||||||
|
kmsService
|
||||||
}: TSamlConfigServiceFactoryDep) => {
|
}: TSamlConfigServiceFactoryDep) => {
|
||||||
const createSamlCfg = async ({
|
const createSamlCfg = async ({
|
||||||
cert,
|
cert,
|
||||||
@ -99,70 +84,18 @@ export const samlConfigServiceFactory = ({
|
|||||||
"Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to create SSO configuration."
|
"Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to create SSO configuration."
|
||||||
});
|
});
|
||||||
|
|
||||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
const doc = await orgBotDAL.findOne({ orgId }, tx);
|
type: KmsDataKey.Organization,
|
||||||
if (doc) return doc;
|
orgId
|
||||||
|
|
||||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
|
||||||
const key = generateSymmetricKey();
|
|
||||||
const {
|
|
||||||
ciphertext: encryptedPrivateKey,
|
|
||||||
iv: privateKeyIV,
|
|
||||||
tag: privateKeyTag,
|
|
||||||
encoding: privateKeyKeyEncoding,
|
|
||||||
algorithm: privateKeyAlgorithm
|
|
||||||
} = infisicalSymmetricEncypt(privateKey);
|
|
||||||
const {
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
encoding: symmetricKeyKeyEncoding,
|
|
||||||
algorithm: symmetricKeyAlgorithm
|
|
||||||
} = infisicalSymmetricEncypt(key);
|
|
||||||
|
|
||||||
return orgBotDAL.create(
|
|
||||||
{
|
|
||||||
name: "Infisical org bot",
|
|
||||||
publicKey,
|
|
||||||
privateKeyIV,
|
|
||||||
encryptedPrivateKey,
|
|
||||||
symmetricKeyIV,
|
|
||||||
symmetricKeyTag,
|
|
||||||
encryptedSymmetricKey,
|
|
||||||
symmetricKeyAlgorithm,
|
|
||||||
orgId,
|
|
||||||
privateKeyTag,
|
|
||||||
privateKeyAlgorithm,
|
|
||||||
privateKeyKeyEncoding,
|
|
||||||
symmetricKeyKeyEncoding
|
|
||||||
},
|
|
||||||
tx
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const { ciphertext: encryptedEntryPoint, iv: entryPointIV, tag: entryPointTag } = encryptSymmetric(entryPoint, key);
|
|
||||||
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
|
|
||||||
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
|
|
||||||
const samlConfig = await samlConfigDAL.create({
|
const samlConfig = await samlConfigDAL.create({
|
||||||
orgId,
|
orgId,
|
||||||
authProvider,
|
authProvider,
|
||||||
isActive,
|
isActive,
|
||||||
encryptedEntryPoint,
|
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
|
||||||
entryPointIV,
|
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
|
||||||
entryPointTag,
|
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
|
||||||
encryptedIssuer,
|
|
||||||
issuerIV,
|
|
||||||
issuerTag,
|
|
||||||
encryptedCert,
|
|
||||||
certIV,
|
|
||||||
certTag
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return samlConfig;
|
return samlConfig;
|
||||||
@ -190,40 +123,21 @@ export const samlConfigServiceFactory = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const updateQuery: TSamlConfigsUpdate = { authProvider, isActive, lastUsed: null };
|
const updateQuery: TSamlConfigsUpdate = { authProvider, isActive, lastUsed: null };
|
||||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
if (!orgBot)
|
type: KmsDataKey.Organization,
|
||||||
throw new NotFoundError({
|
orgId
|
||||||
message: `Organization bot not found for organization with ID '${orgId}'`,
|
|
||||||
name: "OrgBotNotFound"
|
|
||||||
});
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (entryPoint !== undefined) {
|
if (entryPoint !== undefined) {
|
||||||
const {
|
updateQuery.encryptedSamlEntryPoint = encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob;
|
||||||
ciphertext: encryptedEntryPoint,
|
|
||||||
iv: entryPointIV,
|
|
||||||
tag: entryPointTag
|
|
||||||
} = encryptSymmetric(entryPoint, key);
|
|
||||||
updateQuery.encryptedEntryPoint = encryptedEntryPoint;
|
|
||||||
updateQuery.entryPointIV = entryPointIV;
|
|
||||||
updateQuery.entryPointTag = entryPointTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (issuer !== undefined) {
|
if (issuer !== undefined) {
|
||||||
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
|
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
|
||||||
updateQuery.encryptedIssuer = encryptedIssuer;
|
|
||||||
updateQuery.issuerIV = issuerIV;
|
|
||||||
updateQuery.issuerTag = issuerTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cert !== undefined) {
|
if (cert !== undefined) {
|
||||||
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
|
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
|
||||||
updateQuery.encryptedCert = encryptedCert;
|
|
||||||
updateQuery.certIV = certIV;
|
|
||||||
updateQuery.certTag = certTag;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
|
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
|
||||||
@ -233,14 +147,14 @@ export const samlConfigServiceFactory = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const getSaml = async (dto: TGetSamlCfgDTO) => {
|
const getSaml = async (dto: TGetSamlCfgDTO) => {
|
||||||
let ssoConfig: TSamlConfigs | undefined;
|
let samlConfig: TSamlConfigs | undefined;
|
||||||
if (dto.type === "org") {
|
if (dto.type === "org") {
|
||||||
ssoConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
||||||
if (!ssoConfig) return;
|
if (!samlConfig) return;
|
||||||
} else if (dto.type === "orgSlug") {
|
} else if (dto.type === "orgSlug") {
|
||||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||||
if (!org) return;
|
if (!org) return;
|
||||||
ssoConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
||||||
} else if (dto.type === "ssoId") {
|
} else if (dto.type === "ssoId") {
|
||||||
// TODO:
|
// TODO:
|
||||||
// We made this change because saml config ids were not moved over during the migration
|
// We made this change because saml config ids were not moved over during the migration
|
||||||
@ -259,81 +173,51 @@ export const samlConfigServiceFactory = ({
|
|||||||
|
|
||||||
const id = UUIDToMongoId[dto.id] ?? dto.id;
|
const id = UUIDToMongoId[dto.id] ?? dto.id;
|
||||||
|
|
||||||
ssoConfig = await samlConfigDAL.findById(id);
|
samlConfig = await samlConfigDAL.findById(id);
|
||||||
}
|
}
|
||||||
if (!ssoConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
|
if (!samlConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
|
||||||
|
|
||||||
// when dto is type id means it's internally used
|
// when dto is type id means it's internally used
|
||||||
if (dto.type === "org") {
|
if (dto.type === "org") {
|
||||||
const { permission } = await permissionService.getOrgPermission(
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
dto.actor,
|
dto.actor,
|
||||||
dto.actorId,
|
dto.actorId,
|
||||||
ssoConfig.orgId,
|
samlConfig.orgId,
|
||||||
dto.actorAuthMethod,
|
dto.actorAuthMethod,
|
||||||
dto.actorOrgId
|
dto.actorOrgId
|
||||||
);
|
);
|
||||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||||
}
|
}
|
||||||
const {
|
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
entryPointTag,
|
type: KmsDataKey.Organization,
|
||||||
entryPointIV,
|
orgId: samlConfig.orgId
|
||||||
encryptedEntryPoint,
|
|
||||||
certTag,
|
|
||||||
certIV,
|
|
||||||
encryptedCert,
|
|
||||||
issuerTag,
|
|
||||||
issuerIV,
|
|
||||||
encryptedIssuer
|
|
||||||
} = ssoConfig;
|
|
||||||
|
|
||||||
const orgBot = await orgBotDAL.findOne({ orgId: ssoConfig.orgId });
|
|
||||||
if (!orgBot)
|
|
||||||
throw new NotFoundError({
|
|
||||||
message: `Organization bot not found in organization with ID '${ssoConfig.orgId}'`,
|
|
||||||
name: "OrgBotNotFound"
|
|
||||||
});
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: orgBot.encryptedSymmetricKey,
|
|
||||||
iv: orgBot.symmetricKeyIV,
|
|
||||||
tag: orgBot.symmetricKeyTag,
|
|
||||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let entryPoint = "";
|
let entryPoint = "";
|
||||||
if (encryptedEntryPoint && entryPointIV && entryPointTag) {
|
if (samlConfig.encryptedSamlEntryPoint) {
|
||||||
entryPoint = decryptSymmetric({
|
entryPoint = decryptor({ cipherTextBlob: samlConfig.encryptedSamlEntryPoint }).toString();
|
||||||
ciphertext: encryptedEntryPoint,
|
|
||||||
key,
|
|
||||||
tag: entryPointTag,
|
|
||||||
iv: entryPointIV
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let issuer = "";
|
let issuer = "";
|
||||||
if (encryptedIssuer && issuerTag && issuerIV) {
|
if (samlConfig.encryptedSamlIssuer) {
|
||||||
issuer = decryptSymmetric({
|
issuer = decryptor({ cipherTextBlob: samlConfig.encryptedSamlIssuer }).toString();
|
||||||
key,
|
|
||||||
tag: issuerTag,
|
|
||||||
iv: issuerIV,
|
|
||||||
ciphertext: encryptedIssuer
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let cert = "";
|
let cert = "";
|
||||||
if (encryptedCert && certTag && certIV) {
|
if (samlConfig.encryptedSamlCertificate) {
|
||||||
cert = decryptSymmetric({ key, tag: certTag, iv: certIV, ciphertext: encryptedCert });
|
cert = decryptor({ cipherTextBlob: samlConfig.encryptedSamlCertificate }).toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: ssoConfig.id,
|
id: samlConfig.id,
|
||||||
organization: ssoConfig.orgId,
|
organization: samlConfig.orgId,
|
||||||
orgId: ssoConfig.orgId,
|
orgId: samlConfig.orgId,
|
||||||
authProvider: ssoConfig.authProvider,
|
authProvider: samlConfig.authProvider,
|
||||||
isActive: ssoConfig.isActive,
|
isActive: samlConfig.isActive,
|
||||||
entryPoint,
|
entryPoint,
|
||||||
issuer,
|
issuer,
|
||||||
cert,
|
cert,
|
||||||
lastUsed: ssoConfig.lastUsed
|
lastUsed: samlConfig.lastUsed
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -5,13 +5,9 @@ import {
|
|||||||
IAMClient
|
IAMClient
|
||||||
} from "@aws-sdk/client-iam";
|
} from "@aws-sdk/client-iam";
|
||||||
|
|
||||||
import { SecretKeyEncoding, SecretType } from "@app/db/schemas";
|
import { SecretType } from "@app/db/schemas";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import {
|
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
|
||||||
encryptSymmetric128BitHexKeyUTF8,
|
|
||||||
infisicalSymmetricDecrypt,
|
|
||||||
infisicalSymmetricEncypt
|
|
||||||
} from "@app/lib/crypto/encryption";
|
|
||||||
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
|
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
|
||||||
import { NotFoundError } from "@app/lib/errors";
|
import { NotFoundError } from "@app/lib/errors";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@ -135,20 +131,15 @@ export const secretRotationQueueFactory = ({
|
|||||||
|
|
||||||
// deep copy
|
// deep copy
|
||||||
const provider = JSON.parse(JSON.stringify(rotationProvider)) as TSecretRotationProviderTemplate;
|
const provider = JSON.parse(JSON.stringify(rotationProvider)) as TSecretRotationProviderTemplate;
|
||||||
|
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||||
|
await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId: secretRotation.projectId
|
||||||
|
});
|
||||||
|
|
||||||
// now get the encrypted variable values
|
const decryptedData = secretManagerDecryptor({
|
||||||
// in includes the inputs, the previous outputs
|
cipherTextBlob: secretRotation.encryptedRotationData
|
||||||
// internal mapping variables etc
|
}).toString();
|
||||||
const { encryptedDataTag, encryptedDataIV, encryptedData, keyEncoding } = secretRotation;
|
|
||||||
if (!encryptedDataTag || !encryptedDataIV || !encryptedData || !keyEncoding) {
|
|
||||||
throw new DisableRotationErrors({ message: "No inputs found" });
|
|
||||||
}
|
|
||||||
const decryptedData = infisicalSymmetricDecrypt({
|
|
||||||
keyEncoding: keyEncoding as SecretKeyEncoding,
|
|
||||||
ciphertext: encryptedData,
|
|
||||||
iv: encryptedDataIV,
|
|
||||||
tag: encryptedDataTag
|
|
||||||
});
|
|
||||||
|
|
||||||
const variables = JSON.parse(decryptedData) as TSecretRotationEncData;
|
const variables = JSON.parse(decryptedData) as TSecretRotationEncData;
|
||||||
// rotation set cycle
|
// rotation set cycle
|
||||||
@ -303,11 +294,9 @@ export const secretRotationQueueFactory = ({
|
|||||||
outputs: newCredential.outputs,
|
outputs: newCredential.outputs,
|
||||||
internal: newCredential.internal
|
internal: newCredential.internal
|
||||||
});
|
});
|
||||||
const encVarData = infisicalSymmetricEncypt(JSON.stringify(variables));
|
const encryptedRotationData = secretManagerEncryptor({
|
||||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
plainText: Buffer.from(JSON.stringify(variables))
|
||||||
type: KmsDataKey.SecretManager,
|
}).cipherTextBlob;
|
||||||
projectId: secretRotation.projectId
|
|
||||||
});
|
|
||||||
|
|
||||||
const numberOfSecretsRotated = rotationOutputs.length;
|
const numberOfSecretsRotated = rotationOutputs.length;
|
||||||
if (shouldUseSecretV2Bridge) {
|
if (shouldUseSecretV2Bridge) {
|
||||||
@ -323,11 +312,7 @@ export const secretRotationQueueFactory = ({
|
|||||||
await secretRotationDAL.updateById(
|
await secretRotationDAL.updateById(
|
||||||
rotationId,
|
rotationId,
|
||||||
{
|
{
|
||||||
encryptedData: encVarData.ciphertext,
|
encryptedRotationData,
|
||||||
encryptedDataIV: encVarData.iv,
|
|
||||||
encryptedDataTag: encVarData.tag,
|
|
||||||
keyEncoding: encVarData.encoding,
|
|
||||||
algorithm: encVarData.algorithm,
|
|
||||||
lastRotatedAt: new Date(),
|
lastRotatedAt: new Date(),
|
||||||
statusMessage: "Rotated successfull",
|
statusMessage: "Rotated successfull",
|
||||||
status: "success"
|
status: "success"
|
||||||
@ -371,11 +356,7 @@ export const secretRotationQueueFactory = ({
|
|||||||
await secretRotationDAL.updateById(
|
await secretRotationDAL.updateById(
|
||||||
rotationId,
|
rotationId,
|
||||||
{
|
{
|
||||||
encryptedData: encVarData.ciphertext,
|
encryptedRotationData,
|
||||||
encryptedDataIV: encVarData.iv,
|
|
||||||
encryptedDataTag: encVarData.tag,
|
|
||||||
keyEncoding: encVarData.encoding,
|
|
||||||
algorithm: encVarData.algorithm,
|
|
||||||
lastRotatedAt: new Date(),
|
lastRotatedAt: new Date(),
|
||||||
statusMessage: "Rotated successfull",
|
statusMessage: "Rotated successfull",
|
||||||
status: "success"
|
status: "success"
|
||||||
|
@ -2,9 +2,11 @@ import { ForbiddenError, subject } from "@casl/ability";
|
|||||||
import Ajv from "ajv";
|
import Ajv from "ajv";
|
||||||
|
|
||||||
import { ActionProjectType, ProjectVersion, TableName } from "@app/db/schemas";
|
import { ActionProjectType, ProjectVersion, TableName } from "@app/db/schemas";
|
||||||
import { decryptSymmetric128BitHexKeyUTF8, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
|
||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { TProjectPermission } from "@app/lib/types";
|
import { TProjectPermission } from "@app/lib/types";
|
||||||
|
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||||
|
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||||
@ -30,6 +32,7 @@ type TSecretRotationServiceFactoryDep = {
|
|||||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||||
secretRotationQueue: TSecretRotationQueueFactory;
|
secretRotationQueue: TSecretRotationQueueFactory;
|
||||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||||
|
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TSecretRotationServiceFactory = ReturnType<typeof secretRotationServiceFactory>;
|
export type TSecretRotationServiceFactory = ReturnType<typeof secretRotationServiceFactory>;
|
||||||
@ -44,7 +47,8 @@ export const secretRotationServiceFactory = ({
|
|||||||
folderDAL,
|
folderDAL,
|
||||||
secretDAL,
|
secretDAL,
|
||||||
projectBotService,
|
projectBotService,
|
||||||
secretV2BridgeDAL
|
secretV2BridgeDAL,
|
||||||
|
kmsService
|
||||||
}: TSecretRotationServiceFactoryDep) => {
|
}: TSecretRotationServiceFactoryDep) => {
|
||||||
const getProviderTemplates = async ({
|
const getProviderTemplates = async ({
|
||||||
actor,
|
actor,
|
||||||
@ -156,7 +160,11 @@ export const secretRotationServiceFactory = ({
|
|||||||
inputs: formattedInputs,
|
inputs: formattedInputs,
|
||||||
creds: []
|
creds: []
|
||||||
};
|
};
|
||||||
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
|
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||||
|
type: KmsDataKey.SecretManager,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
|
|
||||||
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
|
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
|
||||||
const doc = await secretRotationDAL.create(
|
const doc = await secretRotationDAL.create(
|
||||||
{
|
{
|
||||||
@ -164,11 +172,8 @@ export const secretRotationServiceFactory = ({
|
|||||||
secretPath,
|
secretPath,
|
||||||
interval,
|
interval,
|
||||||
envId: folder.envId,
|
envId: folder.envId,
|
||||||
encryptedDataTag: encData.tag,
|
encryptedRotationData: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(unencryptedData)) })
|
||||||
encryptedDataIV: encData.iv,
|
.cipherTextBlob
|
||||||
encryptedData: encData.ciphertext,
|
|
||||||
algorithm: encData.algorithm,
|
|
||||||
keyEncoding: encData.encoding
|
|
||||||
},
|
},
|
||||||
tx
|
tx
|
||||||
);
|
);
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
|
||||||
|
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
|
||||||
import { ForbiddenError, subject } from "@casl/ability";
|
import { ForbiddenError, subject } from "@casl/ability";
|
||||||
|
|
||||||
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
|
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
/* eslint-disable no-await-in-loop */
|
/* eslint-disable no-await-in-loop,@typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
|
||||||
import { Knex } from "knex";
|
import { Knex } from "knex";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
|
@ -2,6 +2,12 @@ import { Redis } from "ioredis";
|
|||||||
|
|
||||||
import { Redlock, Settings } from "@app/lib/red-lock";
|
import { Redlock, Settings } from "@app/lib/red-lock";
|
||||||
|
|
||||||
|
export enum PgSqlLock {
|
||||||
|
BootUpMigration = 2023,
|
||||||
|
SuperAdminInit = 2024,
|
||||||
|
KmsRootKeyInit = 2025
|
||||||
|
}
|
||||||
|
|
||||||
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
|
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
|
||||||
|
|
||||||
// all the key prefixes used must be set here to avoid conflict
|
// all the key prefixes used must be set here to avoid conflict
|
||||||
|
38
backend/src/keystore/memory.ts
Normal file
38
backend/src/keystore/memory.ts
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import { Lock } from "@app/lib/red-lock";
|
||||||
|
|
||||||
|
import { TKeyStoreFactory } from "./keystore";
|
||||||
|
|
||||||
|
export const inMemoryKeyStore = (): TKeyStoreFactory => {
|
||||||
|
const store: Record<string, string | number | Buffer> = {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
setItem: async (key, value) => {
|
||||||
|
store[key] = value;
|
||||||
|
return "OK";
|
||||||
|
},
|
||||||
|
setItemWithExpiry: async (key, value) => {
|
||||||
|
store[key] = value;
|
||||||
|
return "OK";
|
||||||
|
},
|
||||||
|
deleteItem: async (key) => {
|
||||||
|
delete store[key];
|
||||||
|
return 1;
|
||||||
|
},
|
||||||
|
getItem: async (key) => {
|
||||||
|
const value = store[key];
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
incrementBy: async () => {
|
||||||
|
return 1;
|
||||||
|
},
|
||||||
|
acquireLock: () => {
|
||||||
|
return Promise.resolve({
|
||||||
|
release: () => {}
|
||||||
|
}) as Promise<Lock>;
|
||||||
|
},
|
||||||
|
waitTillReady: async () => {}
|
||||||
|
};
|
||||||
|
};
|
@ -688,7 +688,9 @@ export const RAW_SECRETS = {
|
|||||||
environment: "The slug of the environment to list secrets from.",
|
environment: "The slug of the environment to list secrets from.",
|
||||||
secretPath: "The secret path to list secrets from.",
|
secretPath: "The secret path to list secrets from.",
|
||||||
includeImports: "Weather to include imported secrets or not.",
|
includeImports: "Weather to include imported secrets or not.",
|
||||||
tagSlugs: "The comma separated tag slugs to filter secrets."
|
tagSlugs: "The comma separated tag slugs to filter secrets.",
|
||||||
|
metadataFilter:
|
||||||
|
"The secret metadata key-value pairs to filter secrets by. When querying for multiple metadata pairs, the query is treated as an AND operation. Secret metadata format is key=value1,value=value2|key=value3,value=value4."
|
||||||
},
|
},
|
||||||
CREATE: {
|
CREATE: {
|
||||||
secretName: "The name of the secret to create.",
|
secretName: "The name of the secret to create.",
|
||||||
@ -828,6 +830,8 @@ export const AUDIT_LOGS = {
|
|||||||
projectId:
|
projectId:
|
||||||
"Optionally filter logs by project ID. If not provided, logs from the entire organization will be returned.",
|
"Optionally filter logs by project ID. If not provided, logs from the entire organization will be returned.",
|
||||||
eventType: "The type of the event to export.",
|
eventType: "The type of the event to export.",
|
||||||
|
secretPath:
|
||||||
|
"The path of the secret to query audit logs for. Note that the projectId parameter must also be provided.",
|
||||||
userAgentType: "Choose which consuming application to export audit logs for.",
|
userAgentType: "Choose which consuming application to export audit logs for.",
|
||||||
eventMetadata:
|
eventMetadata:
|
||||||
"Filter by event metadata key-value pairs. Formatted as `key1=value1,key2=value2`, with comma-separation.",
|
"Filter by event metadata key-value pairs. Formatted as `key1=value1,key2=value2`, with comma-separation.",
|
||||||
@ -1589,6 +1593,13 @@ export const KMS = {
|
|||||||
orderDirection: "The direction to order keys in.",
|
orderDirection: "The direction to order keys in.",
|
||||||
search: "The text string to filter key names by."
|
search: "The text string to filter key names by."
|
||||||
},
|
},
|
||||||
|
GET_KEY_BY_ID: {
|
||||||
|
keyId: "The ID of the KMS key to retrieve."
|
||||||
|
},
|
||||||
|
GET_KEY_BY_NAME: {
|
||||||
|
keyName: "The name of the KMS key to retrieve.",
|
||||||
|
projectId: "The ID of the project the key belongs to."
|
||||||
|
},
|
||||||
ENCRYPT: {
|
ENCRYPT: {
|
||||||
keyId: "The ID of the key to encrypt the data with.",
|
keyId: "The ID of the key to encrypt the data with.",
|
||||||
plaintext: "The plaintext to be encrypted (base64 encoded)."
|
plaintext: "The plaintext to be encrypted (base64 encoded)."
|
||||||
@ -1707,21 +1718,40 @@ export const SecretSyncs = {
|
|||||||
SYNC_OPTIONS: (destination: SecretSync) => {
|
SYNC_OPTIONS: (destination: SecretSync) => {
|
||||||
const destinationName = SECRET_SYNC_NAME_MAP[destination];
|
const destinationName = SECRET_SYNC_NAME_MAP[destination];
|
||||||
return {
|
return {
|
||||||
INITIAL_SYNC_BEHAVIOR: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`,
|
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`
|
||||||
PREPEND_PREFIX: `Optionally prepend a prefix to your secrets' keys when syncing to ${destinationName}.`,
|
|
||||||
APPEND_SUFFIX: `Optionally append a suffix to your secrets' keys when syncing to ${destinationName}.`
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
DESTINATION_CONFIG: {
|
DESTINATION_CONFIG: {
|
||||||
AWS_PARAMETER_STORE: {
|
AWS_PARAMETER_STORE: {
|
||||||
REGION: "The AWS region to sync secrets to.",
|
region: "The AWS region to sync secrets to.",
|
||||||
PATH: "The Parameter Store path to sync secrets to."
|
path: "The Parameter Store path to sync secrets to."
|
||||||
|
},
|
||||||
|
AWS_SECRETS_MANAGER: {
|
||||||
|
region: "The AWS region to sync secrets to.",
|
||||||
|
mappingBehavior: "How secrets from Infisical should be mapped to AWS Secrets Manager; one-to-one or many-to-one.",
|
||||||
|
secretName: "The secret name in AWS Secrets Manager to sync to when using mapping behavior many-to-one."
|
||||||
},
|
},
|
||||||
GITHUB: {
|
GITHUB: {
|
||||||
ORG: "The name of the GitHub organization.",
|
scope: "The GitHub scope that secrets should be synced to",
|
||||||
OWNER: "The name of the GitHub account owner of the repository.",
|
org: "The name of the GitHub organization.",
|
||||||
REPO: "The name of the GitHub repository.",
|
owner: "The name of the GitHub account owner of the repository.",
|
||||||
ENV: "The name of the GitHub environment."
|
repo: "The name of the GitHub repository.",
|
||||||
|
env: "The name of the GitHub environment."
|
||||||
|
},
|
||||||
|
AZURE_KEY_VAULT: {
|
||||||
|
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||||
|
},
|
||||||
|
AZURE_APP_CONFIGURATION: {
|
||||||
|
configurationUrl:
|
||||||
|
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
||||||
|
label: "An optional label to assign to secrets created in Azure App Configuration."
|
||||||
|
},
|
||||||
|
GCP: {
|
||||||
|
scope: "The Google project scope that secrets should be synced to.",
|
||||||
|
projectId: "The ID of the Google project secrets should be synced to."
|
||||||
|
},
|
||||||
|
DATABRICKS: {
|
||||||
|
scope: "The Databricks secret scope that secrets should be synced to."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -201,6 +201,13 @@ const envSchema = z
|
|||||||
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
|
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
|
||||||
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
|
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
|
||||||
|
|
||||||
|
// gcp app
|
||||||
|
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
|
||||||
|
|
||||||
|
// azure app
|
||||||
|
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
|
||||||
|
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||||
|
|
||||||
/* CORS ----------------------------------------------------------------------------- */
|
/* CORS ----------------------------------------------------------------------------- */
|
||||||
|
|
||||||
CORS_ALLOWED_ORIGINS: zpStr(
|
CORS_ALLOWED_ORIGINS: zpStr(
|
||||||
@ -251,7 +258,8 @@ const envSchema = z
|
|||||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let envCfg: Readonly<z.infer<typeof envSchema>>;
|
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
|
||||||
|
let envCfg: TEnvConfig;
|
||||||
|
|
||||||
export const getConfig = () => envCfg;
|
export const getConfig = () => envCfg;
|
||||||
// cannot import singleton logger directly as it needs config to load various transport
|
// cannot import singleton logger directly as it needs config to load various transport
|
||||||
|
@ -116,7 +116,7 @@ export const decryptAsymmetric = ({ ciphertext, nonce, publicKey, privateKey }:
|
|||||||
|
|
||||||
export const generateSymmetricKey = (size = 32) => crypto.randomBytes(size).toString("base64");
|
export const generateSymmetricKey = (size = 32) => crypto.randomBytes(size).toString("base64");
|
||||||
|
|
||||||
export const generateHash = (value: string) => crypto.createHash("sha256").update(value).digest("hex");
|
export const generateHash = (value: string | Buffer) => crypto.createHash("sha256").update(value).digest("hex");
|
||||||
|
|
||||||
export const generateAsymmetricKeyPair = () => {
|
export const generateAsymmetricKeyPair = () => {
|
||||||
const pair = nacl.box.keyPair();
|
const pair = nacl.box.keyPair();
|
||||||
|
4
backend/src/lib/error-codes/database.ts
Normal file
4
backend/src/lib/error-codes/database.ts
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
export enum DatabaseErrorCode {
|
||||||
|
ForeignKeyViolation = "23503",
|
||||||
|
UniqueViolation = "23505"
|
||||||
|
}
|
1
backend/src/lib/error-codes/index.ts
Normal file
1
backend/src/lib/error-codes/index.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export * from "./database";
|
@ -7,6 +7,7 @@ import { buildDynamicKnexQuery, TKnexDynamicOperator } from "./dynamic";
|
|||||||
|
|
||||||
export * from "./connection";
|
export * from "./connection";
|
||||||
export * from "./join";
|
export * from "./join";
|
||||||
|
export * from "./prependTableNameToFindFilter";
|
||||||
export * from "./select";
|
export * from "./select";
|
||||||
|
|
||||||
export const withTransaction = <K extends object>(db: Knex, dal: K) => ({
|
export const withTransaction = <K extends object>(db: Knex, dal: K) => ({
|
||||||
|
13
backend/src/lib/knex/prependTableNameToFindFilter.ts
Normal file
13
backend/src/lib/knex/prependTableNameToFindFilter.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
import { buildFindFilter } from "@app/lib/knex/index";
|
||||||
|
|
||||||
|
type TFindFilterParameters = Parameters<typeof buildFindFilter<object>>[0];
|
||||||
|
|
||||||
|
export const prependTableNameToFindFilter = (tableName: TableName, filterObj: object): TFindFilterParameters =>
|
||||||
|
Object.fromEntries(
|
||||||
|
Object.entries(filterObj).map(([key, value]) =>
|
||||||
|
key.startsWith("$")
|
||||||
|
? [key, prependTableNameToFindFilter(tableName, value as object)]
|
||||||
|
: [`${tableName}.${key}`, value]
|
||||||
|
)
|
||||||
|
);
|
@ -98,7 +98,7 @@ const extractReqId = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const initLogger = async () => {
|
export const initLogger = () => {
|
||||||
const cfg = loggerConfig.parse(process.env);
|
const cfg = loggerConfig.parse(process.env);
|
||||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||||
{
|
{
|
||||||
|
@ -2,14 +2,13 @@ import "./lib/telemetry/instrumentation";
|
|||||||
|
|
||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import { Redis } from "ioredis";
|
import { Redis } from "ioredis";
|
||||||
import path from "path";
|
|
||||||
|
|
||||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||||
|
|
||||||
|
import { runMigrations } from "./auto-start-migrations";
|
||||||
import { initAuditLogDbConnection, initDbConnection } from "./db";
|
import { initAuditLogDbConnection, initDbConnection } from "./db";
|
||||||
import { keyStoreFactory } from "./keystore/keystore";
|
import { keyStoreFactory } from "./keystore/keystore";
|
||||||
import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env";
|
import { formatSmtpConfig, initEnvConfig } from "./lib/config/env";
|
||||||
import { isMigrationMode } from "./lib/fn";
|
|
||||||
import { initLogger } from "./lib/logger";
|
import { initLogger } from "./lib/logger";
|
||||||
import { queueServiceFactory } from "./queue";
|
import { queueServiceFactory } from "./queue";
|
||||||
import { main } from "./server/app";
|
import { main } from "./server/app";
|
||||||
@ -19,58 +18,53 @@ import { smtpServiceFactory } from "./services/smtp/smtp-service";
|
|||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const run = async () => {
|
const run = async () => {
|
||||||
const logger = await initLogger();
|
const logger = initLogger();
|
||||||
const appCfg = initEnvConfig(logger);
|
const envConfig = initEnvConfig(logger);
|
||||||
|
|
||||||
const db = initDbConnection({
|
const db = initDbConnection({
|
||||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
dbRootCert: envConfig.DB_ROOT_CERT,
|
||||||
readReplicas: appCfg.DB_READ_REPLICAS?.map((el) => ({
|
readReplicas: envConfig.DB_READ_REPLICAS?.map((el) => ({
|
||||||
dbRootCert: el.DB_ROOT_CERT,
|
dbRootCert: el.DB_ROOT_CERT,
|
||||||
dbConnectionUri: el.DB_CONNECTION_URI
|
dbConnectionUri: el.DB_CONNECTION_URI
|
||||||
}))
|
}))
|
||||||
});
|
});
|
||||||
|
|
||||||
const auditLogDb = appCfg.AUDIT_LOGS_DB_CONNECTION_URI
|
const auditLogDb = envConfig.AUDIT_LOGS_DB_CONNECTION_URI
|
||||||
? initAuditLogDbConnection({
|
? initAuditLogDbConnection({
|
||||||
dbConnectionUri: appCfg.AUDIT_LOGS_DB_CONNECTION_URI,
|
dbConnectionUri: envConfig.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||||
dbRootCert: appCfg.AUDIT_LOGS_DB_ROOT_CERT
|
dbRootCert: envConfig.AUDIT_LOGS_DB_ROOT_CERT
|
||||||
})
|
})
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
// Case: App is running in packaged mode (binary), and migration mode is enabled.
|
await runMigrations({ applicationDb: db, auditLogDb, logger });
|
||||||
// Run the migrations and exit the process after completion.
|
|
||||||
if (IS_PACKAGED && isMigrationMode()) {
|
|
||||||
try {
|
|
||||||
logger.info("Running Postgres migrations..");
|
|
||||||
await db.migrate.latest({
|
|
||||||
directory: path.join(__dirname, "./db/migrations")
|
|
||||||
});
|
|
||||||
logger.info("Postgres migrations completed");
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(err, "Failed to run migrations");
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||||
|
|
||||||
const queue = queueServiceFactory(appCfg.REDIS_URL, {
|
const queue = queueServiceFactory(envConfig.REDIS_URL, {
|
||||||
dbConnectionUrl: appCfg.DB_CONNECTION_URI,
|
dbConnectionUrl: envConfig.DB_CONNECTION_URI,
|
||||||
dbRootCert: appCfg.DB_ROOT_CERT
|
dbRootCert: envConfig.DB_ROOT_CERT
|
||||||
});
|
});
|
||||||
|
|
||||||
await queue.initialize();
|
await queue.initialize();
|
||||||
|
|
||||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||||
const redis = new Redis(appCfg.REDIS_URL);
|
const redis = new Redis(envConfig.REDIS_URL);
|
||||||
|
|
||||||
const hsmModule = initializeHsmModule();
|
const hsmModule = initializeHsmModule(envConfig);
|
||||||
hsmModule.initialize();
|
hsmModule.initialize();
|
||||||
|
|
||||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore, redis });
|
const server = await main({
|
||||||
|
db,
|
||||||
|
auditLogDb,
|
||||||
|
hsmModule: hsmModule.getModule(),
|
||||||
|
smtp,
|
||||||
|
logger,
|
||||||
|
queue,
|
||||||
|
keyStore,
|
||||||
|
redis,
|
||||||
|
envConfig
|
||||||
|
});
|
||||||
const bootstrap = await bootstrapCheck({ db });
|
const bootstrap = await bootstrapCheck({ db });
|
||||||
|
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
@ -90,8 +84,8 @@ const run = async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
await server.listen({
|
await server.listen({
|
||||||
port: appCfg.PORT,
|
port: envConfig.PORT,
|
||||||
host: appCfg.HOST,
|
host: envConfig.HOST,
|
||||||
listenTextResolver: (address) => {
|
listenTextResolver: (address) => {
|
||||||
void bootstrap();
|
void bootstrap();
|
||||||
return address;
|
return address;
|
||||||
|
@ -17,7 +17,7 @@ import { Knex } from "knex";
|
|||||||
|
|
||||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
import { getConfig, IS_PACKAGED, TEnvConfig } from "@app/lib/config/env";
|
||||||
import { CustomLogger } from "@app/lib/logger/logger";
|
import { CustomLogger } from "@app/lib/logger/logger";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { TQueueServiceFactory } from "@app/queue";
|
import { TQueueServiceFactory } from "@app/queue";
|
||||||
@ -43,10 +43,11 @@ type TMain = {
|
|||||||
keyStore: TKeyStoreFactory;
|
keyStore: TKeyStoreFactory;
|
||||||
hsmModule: HsmModule;
|
hsmModule: HsmModule;
|
||||||
redis: Redis;
|
redis: Redis;
|
||||||
|
envConfig: TEnvConfig;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Run the server!
|
// Run the server!
|
||||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis }: TMain) => {
|
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis, envConfig }: TMain) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
|
|
||||||
const server = fastify({
|
const server = fastify({
|
||||||
@ -127,7 +128,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
|||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule, envConfig });
|
||||||
|
|
||||||
await server.register(registerServeUI, {
|
await server.register(registerServeUI, {
|
||||||
standaloneMode: appCfg.STANDALONE_MODE || IS_PACKAGED,
|
standaloneMode: appCfg.STANDALONE_MODE || IS_PACKAGED,
|
||||||
|
@ -85,7 +85,7 @@ import { sshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certi
|
|||||||
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
|
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
|
||||||
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig, TEnvConfig } from "@app/lib/config/env";
|
||||||
import { TQueueServiceFactory } from "@app/queue";
|
import { TQueueServiceFactory } from "@app/queue";
|
||||||
import { readLimit } from "@app/server/config/rateLimiter";
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
||||||
@ -244,7 +244,8 @@ export const registerRoutes = async (
|
|||||||
hsmModule,
|
hsmModule,
|
||||||
smtp: smtpService,
|
smtp: smtpService,
|
||||||
queue: queueService,
|
queue: queueService,
|
||||||
keyStore
|
keyStore,
|
||||||
|
envConfig
|
||||||
}: {
|
}: {
|
||||||
auditLogDb?: Knex;
|
auditLogDb?: Knex;
|
||||||
db: Knex;
|
db: Knex;
|
||||||
@ -252,6 +253,7 @@ export const registerRoutes = async (
|
|||||||
smtp: TSmtpService;
|
smtp: TSmtpService;
|
||||||
queue: TQueueServiceFactory;
|
queue: TQueueServiceFactory;
|
||||||
keyStore: TKeyStoreFactory;
|
keyStore: TKeyStoreFactory;
|
||||||
|
envConfig: TEnvConfig;
|
||||||
}
|
}
|
||||||
) => {
|
) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
@ -391,7 +393,8 @@ export const registerRoutes = async (
|
|||||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||||
|
|
||||||
const hsmService = hsmServiceFactory({
|
const hsmService = hsmServiceFactory({
|
||||||
hsmModule
|
hsmModule,
|
||||||
|
envConfig
|
||||||
});
|
});
|
||||||
|
|
||||||
const kmsService = kmsServiceFactory({
|
const kmsService = kmsServiceFactory({
|
||||||
@ -401,7 +404,8 @@ export const registerRoutes = async (
|
|||||||
internalKmsDAL,
|
internalKmsDAL,
|
||||||
orgDAL,
|
orgDAL,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
hsmService
|
hsmService,
|
||||||
|
envConfig
|
||||||
});
|
});
|
||||||
|
|
||||||
const externalKmsService = externalKmsServiceFactory({
|
const externalKmsService = externalKmsServiceFactory({
|
||||||
@ -447,7 +451,6 @@ export const registerRoutes = async (
|
|||||||
const samlService = samlConfigServiceFactory({
|
const samlService = samlConfigServiceFactory({
|
||||||
identityMetadataDAL,
|
identityMetadataDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
orgBotDAL,
|
|
||||||
orgDAL,
|
orgDAL,
|
||||||
orgMembershipDAL,
|
orgMembershipDAL,
|
||||||
userDAL,
|
userDAL,
|
||||||
@ -455,7 +458,8 @@ export const registerRoutes = async (
|
|||||||
samlConfigDAL,
|
samlConfigDAL,
|
||||||
licenseService,
|
licenseService,
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService
|
smtpService,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
const groupService = groupServiceFactory({
|
const groupService = groupServiceFactory({
|
||||||
userDAL,
|
userDAL,
|
||||||
@ -467,7 +471,8 @@ export const registerRoutes = async (
|
|||||||
projectBotDAL,
|
projectBotDAL,
|
||||||
projectKeyDAL,
|
projectKeyDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService
|
licenseService,
|
||||||
|
oidcConfigDAL
|
||||||
});
|
});
|
||||||
const groupProjectService = groupProjectServiceFactory({
|
const groupProjectService = groupProjectServiceFactory({
|
||||||
groupDAL,
|
groupDAL,
|
||||||
@ -505,7 +510,6 @@ export const registerRoutes = async (
|
|||||||
ldapGroupMapDAL,
|
ldapGroupMapDAL,
|
||||||
orgDAL,
|
orgDAL,
|
||||||
orgMembershipDAL,
|
orgMembershipDAL,
|
||||||
orgBotDAL,
|
|
||||||
groupDAL,
|
groupDAL,
|
||||||
groupProjectDAL,
|
groupProjectDAL,
|
||||||
projectKeyDAL,
|
projectKeyDAL,
|
||||||
@ -517,7 +521,8 @@ export const registerRoutes = async (
|
|||||||
permissionService,
|
permissionService,
|
||||||
licenseService,
|
licenseService,
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService
|
smtpService,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
|
|
||||||
const telemetryService = telemetryServiceFactory({
|
const telemetryService = telemetryServiceFactory({
|
||||||
@ -848,7 +853,8 @@ export const registerRoutes = async (
|
|||||||
secretVersionTagDAL,
|
secretVersionTagDAL,
|
||||||
secretVersionV2BridgeDAL,
|
secretVersionV2BridgeDAL,
|
||||||
secretVersionTagV2BridgeDAL,
|
secretVersionTagV2BridgeDAL,
|
||||||
resourceMetadataDAL
|
resourceMetadataDAL,
|
||||||
|
appConnectionDAL
|
||||||
});
|
});
|
||||||
|
|
||||||
const secretQueueService = secretQueueFactory({
|
const secretQueueService = secretQueueFactory({
|
||||||
@ -967,7 +973,8 @@ export const registerRoutes = async (
|
|||||||
permissionService,
|
permissionService,
|
||||||
webhookDAL,
|
webhookDAL,
|
||||||
projectEnvDAL,
|
projectEnvDAL,
|
||||||
projectDAL
|
projectDAL,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
|
|
||||||
const secretTagService = secretTagServiceFactory({ secretTagDAL, permissionService });
|
const secretTagService = secretTagServiceFactory({ secretTagDAL, permissionService });
|
||||||
@ -1147,7 +1154,8 @@ export const registerRoutes = async (
|
|||||||
secretDAL,
|
secretDAL,
|
||||||
folderDAL,
|
folderDAL,
|
||||||
projectBotService,
|
projectBotService,
|
||||||
secretV2BridgeDAL
|
secretV2BridgeDAL,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
|
|
||||||
const integrationService = integrationServiceFactory({
|
const integrationService = integrationServiceFactory({
|
||||||
@ -1236,9 +1244,9 @@ export const registerRoutes = async (
|
|||||||
identityKubernetesAuthDAL,
|
identityKubernetesAuthDAL,
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
orgBotDAL,
|
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService
|
licenseService,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
const identityGcpAuthService = identityGcpAuthServiceFactory({
|
const identityGcpAuthService = identityGcpAuthServiceFactory({
|
||||||
identityGcpAuthDAL,
|
identityGcpAuthDAL,
|
||||||
@ -1270,7 +1278,7 @@ export const registerRoutes = async (
|
|||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService,
|
licenseService,
|
||||||
orgBotDAL
|
kmsService
|
||||||
});
|
});
|
||||||
|
|
||||||
const identityJwtAuthService = identityJwtAuthServiceFactory({
|
const identityJwtAuthService = identityJwtAuthServiceFactory({
|
||||||
@ -1287,7 +1295,9 @@ export const registerRoutes = async (
|
|||||||
queueService,
|
queueService,
|
||||||
dynamicSecretLeaseDAL,
|
dynamicSecretLeaseDAL,
|
||||||
dynamicSecretProviders,
|
dynamicSecretProviders,
|
||||||
dynamicSecretDAL
|
dynamicSecretDAL,
|
||||||
|
folderDAL,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
const dynamicSecretService = dynamicSecretServiceFactory({
|
const dynamicSecretService = dynamicSecretServiceFactory({
|
||||||
projectDAL,
|
projectDAL,
|
||||||
@ -1297,7 +1307,8 @@ export const registerRoutes = async (
|
|||||||
dynamicSecretProviders,
|
dynamicSecretProviders,
|
||||||
folderDAL,
|
folderDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService
|
licenseService,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
const dynamicSecretLeaseService = dynamicSecretLeaseServiceFactory({
|
const dynamicSecretLeaseService = dynamicSecretLeaseServiceFactory({
|
||||||
projectDAL,
|
projectDAL,
|
||||||
@ -1307,7 +1318,8 @@ export const registerRoutes = async (
|
|||||||
dynamicSecretLeaseDAL,
|
dynamicSecretLeaseDAL,
|
||||||
dynamicSecretProviders,
|
dynamicSecretProviders,
|
||||||
folderDAL,
|
folderDAL,
|
||||||
licenseService
|
licenseService,
|
||||||
|
kmsService
|
||||||
});
|
});
|
||||||
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
|
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
|
||||||
auditLogDAL,
|
auditLogDAL,
|
||||||
@ -1335,9 +1347,16 @@ export const registerRoutes = async (
|
|||||||
licenseService,
|
licenseService,
|
||||||
tokenService,
|
tokenService,
|
||||||
smtpService,
|
smtpService,
|
||||||
orgBotDAL,
|
kmsService,
|
||||||
permissionService,
|
permissionService,
|
||||||
oidcConfigDAL
|
oidcConfigDAL,
|
||||||
|
projectBotDAL,
|
||||||
|
projectKeyDAL,
|
||||||
|
projectDAL,
|
||||||
|
userGroupMembershipDAL,
|
||||||
|
groupProjectDAL,
|
||||||
|
groupDAL,
|
||||||
|
auditLogService
|
||||||
});
|
});
|
||||||
|
|
||||||
const userEngagementService = userEngagementServiceFactory({
|
const userEngagementService = userEngagementServiceFactory({
|
||||||
|
@ -0,0 +1,42 @@
|
|||||||
|
import { LdapConfigsSchema, OidcConfigsSchema, SamlConfigsSchema } from "@app/db/schemas";
|
||||||
|
|
||||||
|
export const SanitizedSamlConfigSchema = SamlConfigsSchema.pick({
|
||||||
|
id: true,
|
||||||
|
orgId: true,
|
||||||
|
isActive: true,
|
||||||
|
lastUsed: true,
|
||||||
|
createdAt: true,
|
||||||
|
updatedAt: true,
|
||||||
|
authProvider: true
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SanitizedLdapConfigSchema = LdapConfigsSchema.pick({
|
||||||
|
updatedAt: true,
|
||||||
|
createdAt: true,
|
||||||
|
isActive: true,
|
||||||
|
orgId: true,
|
||||||
|
id: true,
|
||||||
|
url: true,
|
||||||
|
searchBase: true,
|
||||||
|
searchFilter: true,
|
||||||
|
groupSearchBase: true,
|
||||||
|
uniqueUserAttribute: true,
|
||||||
|
groupSearchFilter: true
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
|
||||||
|
id: true,
|
||||||
|
orgId: true,
|
||||||
|
isActive: true,
|
||||||
|
createdAt: true,
|
||||||
|
updatedAt: true,
|
||||||
|
lastUsed: true,
|
||||||
|
issuer: true,
|
||||||
|
jwksUri: true,
|
||||||
|
discoveryURL: true,
|
||||||
|
tokenEndpoint: true,
|
||||||
|
userinfoEndpoint: true,
|
||||||
|
configurationType: true,
|
||||||
|
allowedEmailDomains: true,
|
||||||
|
authorizationEndpoint: true
|
||||||
|
});
|
@ -11,7 +11,7 @@ import {
|
|||||||
} from "@app/db/schemas";
|
} from "@app/db/schemas";
|
||||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||||
|
|
||||||
import { UnpackedPermissionSchema } from "./santizedSchemas/permission";
|
import { UnpackedPermissionSchema } from "./sanitizedSchema/permission";
|
||||||
|
|
||||||
// sometimes the return data must be santizied to avoid leaking important values
|
// sometimes the return data must be santizied to avoid leaking important values
|
||||||
// always prefer pick over omit in zod
|
// always prefer pick over omit in zod
|
||||||
@ -110,7 +110,6 @@ export const secretRawSchema = z.object({
|
|||||||
secretReminderNote: z.string().nullable().optional(),
|
secretReminderNote: z.string().nullable().optional(),
|
||||||
secretReminderRepeatDays: z.number().nullable().optional(),
|
secretReminderRepeatDays: z.number().nullable().optional(),
|
||||||
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
|
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
|
||||||
metadata: z.unknown().nullable().optional(),
|
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date()
|
updatedAt: z.date()
|
||||||
});
|
});
|
||||||
@ -202,10 +201,11 @@ export const SanitizedRoleSchemaV1 = ProjectRolesSchema.extend({
|
|||||||
});
|
});
|
||||||
|
|
||||||
export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
|
export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
|
||||||
|
encryptedInput: true,
|
||||||
|
keyEncoding: true,
|
||||||
|
inputCiphertext: true,
|
||||||
inputIV: true,
|
inputIV: true,
|
||||||
inputTag: true,
|
inputTag: true,
|
||||||
inputCiphertext: true,
|
|
||||||
keyEncoding: true,
|
|
||||||
algorithm: true
|
algorithm: true
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -73,7 +73,13 @@ export const registerAppConnectionEndpoints = <T extends TAppConnection, I exten
|
|||||||
description: `List the ${appName} Connections the current user has permission to establish connections with.`,
|
description: `List the ${appName} Connections the current user has permission to establish connections with.`,
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
appConnections: z.object({ app: z.literal(app), name: z.string(), id: z.string().uuid() }).array()
|
appConnections: z
|
||||||
|
.object({
|
||||||
|
app: z.literal(app),
|
||||||
|
name: z.string(),
|
||||||
|
id: z.string().uuid()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -4,18 +4,39 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
|||||||
import { readLimit } from "@app/server/config/rateLimiter";
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
|
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
|
||||||
|
import {
|
||||||
|
AzureAppConfigurationConnectionListItemSchema,
|
||||||
|
SanitizedAzureAppConfigurationConnectionSchema
|
||||||
|
} from "@app/services/app-connection/azure-app-configuration";
|
||||||
|
import {
|
||||||
|
AzureKeyVaultConnectionListItemSchema,
|
||||||
|
SanitizedAzureKeyVaultConnectionSchema
|
||||||
|
} from "@app/services/app-connection/azure-key-vault";
|
||||||
|
import {
|
||||||
|
DatabricksConnectionListItemSchema,
|
||||||
|
SanitizedDatabricksConnectionSchema
|
||||||
|
} from "@app/services/app-connection/databricks";
|
||||||
|
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
|
||||||
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
|
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
// can't use discriminated due to multiple schemas for certain apps
|
// can't use discriminated due to multiple schemas for certain apps
|
||||||
const SanitizedAppConnectionSchema = z.union([
|
const SanitizedAppConnectionSchema = z.union([
|
||||||
...SanitizedAwsConnectionSchema.options,
|
...SanitizedAwsConnectionSchema.options,
|
||||||
...SanitizedGitHubConnectionSchema.options
|
...SanitizedGitHubConnectionSchema.options,
|
||||||
|
...SanitizedGcpConnectionSchema.options,
|
||||||
|
...SanitizedAzureKeyVaultConnectionSchema.options,
|
||||||
|
...SanitizedAzureAppConfigurationConnectionSchema.options,
|
||||||
|
...SanitizedDatabricksConnectionSchema.options
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||||
AwsConnectionListItemSchema,
|
AwsConnectionListItemSchema,
|
||||||
GitHubConnectionListItemSchema
|
GitHubConnectionListItemSchema,
|
||||||
|
GcpConnectionListItemSchema,
|
||||||
|
AzureKeyVaultConnectionListItemSchema,
|
||||||
|
AzureAppConfigurationConnectionListItemSchema,
|
||||||
|
DatabricksConnectionListItemSchema
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
@ -0,0 +1,18 @@
|
|||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateAzureAppConfigurationConnectionSchema,
|
||||||
|
SanitizedAzureAppConfigurationConnectionSchema,
|
||||||
|
UpdateAzureAppConfigurationConnectionSchema
|
||||||
|
} from "@app/services/app-connection/azure-app-configuration";
|
||||||
|
|
||||||
|
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||||
|
|
||||||
|
export const registerAzureAppConfigurationConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
registerAppConnectionEndpoints({
|
||||||
|
app: AppConnection.AzureAppConfiguration,
|
||||||
|
server,
|
||||||
|
sanitizedResponseSchema: SanitizedAzureAppConfigurationConnectionSchema,
|
||||||
|
createSchema: CreateAzureAppConfigurationConnectionSchema,
|
||||||
|
updateSchema: UpdateAzureAppConfigurationConnectionSchema
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,18 @@
|
|||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateAzureKeyVaultConnectionSchema,
|
||||||
|
SanitizedAzureKeyVaultConnectionSchema,
|
||||||
|
UpdateAzureKeyVaultConnectionSchema
|
||||||
|
} from "@app/services/app-connection/azure-key-vault";
|
||||||
|
|
||||||
|
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||||
|
|
||||||
|
export const registerAzureKeyVaultConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
registerAppConnectionEndpoints({
|
||||||
|
app: AppConnection.AzureKeyVault,
|
||||||
|
server,
|
||||||
|
sanitizedResponseSchema: SanitizedAzureKeyVaultConnectionSchema,
|
||||||
|
createSchema: CreateAzureKeyVaultConnectionSchema,
|
||||||
|
updateSchema: UpdateAzureKeyVaultConnectionSchema
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,54 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateDatabricksConnectionSchema,
|
||||||
|
SanitizedDatabricksConnectionSchema,
|
||||||
|
UpdateDatabricksConnectionSchema
|
||||||
|
} from "@app/services/app-connection/databricks";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||||
|
|
||||||
|
export const registerDatabricksConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
registerAppConnectionEndpoints({
|
||||||
|
app: AppConnection.Databricks,
|
||||||
|
server,
|
||||||
|
sanitizedResponseSchema: SanitizedDatabricksConnectionSchema,
|
||||||
|
createSchema: CreateDatabricksConnectionSchema,
|
||||||
|
updateSchema: UpdateDatabricksConnectionSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
// The below endpoints are not exposed and for Infisical App use
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/secret-scopes`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
secretScopes: z.object({ name: z.string() }).array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
|
const secretScopes = await server.services.appConnection.databricks.listSecretScopes(
|
||||||
|
connectionId,
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
return { secretScopes };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,48 @@
|
|||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
import { readLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
import {
|
||||||
|
CreateGcpConnectionSchema,
|
||||||
|
SanitizedGcpConnectionSchema,
|
||||||
|
UpdateGcpConnectionSchema
|
||||||
|
} from "@app/services/app-connection/gcp";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||||
|
|
||||||
|
export const registerGcpConnectionRouter = async (server: FastifyZodProvider) => {
|
||||||
|
registerAppConnectionEndpoints({
|
||||||
|
app: AppConnection.GCP,
|
||||||
|
server,
|
||||||
|
sanitizedResponseSchema: SanitizedGcpConnectionSchema,
|
||||||
|
createSchema: CreateGcpConnectionSchema,
|
||||||
|
updateSchema: UpdateGcpConnectionSchema
|
||||||
|
});
|
||||||
|
|
||||||
|
// The below endpoints are not exposed and for Infisical App use
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: `/:connectionId/secret-manager-projects`,
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
connectionId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({ id: z.string(), name: z.string() }).array()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
|
const projects = await server.services.appConnection.gcp.listSecretManagerProjects(connectionId, req.permission);
|
||||||
|
|
||||||
|
return projects;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
@ -41,7 +41,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const { connectionId } = req.params;
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
@ -67,7 +67,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const { connectionId } = req.params;
|
const { connectionId } = req.params;
|
||||||
|
|
||||||
@ -97,7 +97,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
handler: async (req) => {
|
handler: async (req) => {
|
||||||
const { connectionId } = req.params;
|
const { connectionId } = req.params;
|
||||||
const { repo, owner } = req.query;
|
const { repo, owner } = req.query;
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||||
|
|
||||||
import { registerAwsConnectionRouter } from "./aws-connection-router";
|
import { registerAwsConnectionRouter } from "./aws-connection-router";
|
||||||
|
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
|
||||||
|
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
|
||||||
|
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
|
||||||
|
import { registerGcpConnectionRouter } from "./gcp-connection-router";
|
||||||
import { registerGitHubConnectionRouter } from "./github-connection-router";
|
import { registerGitHubConnectionRouter } from "./github-connection-router";
|
||||||
|
|
||||||
export * from "./app-connection-router";
|
export * from "./app-connection-router";
|
||||||
@ -8,5 +12,9 @@ export * from "./app-connection-router";
|
|||||||
export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server: FastifyZodProvider) => Promise<void>> =
|
export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server: FastifyZodProvider) => Promise<void>> =
|
||||||
{
|
{
|
||||||
[AppConnection.AWS]: registerAwsConnectionRouter,
|
[AppConnection.AWS]: registerAwsConnectionRouter,
|
||||||
[AppConnection.GitHub]: registerGitHubConnectionRouter
|
[AppConnection.GitHub]: registerGitHubConnectionRouter,
|
||||||
|
[AppConnection.GCP]: registerGcpConnectionRouter,
|
||||||
|
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
|
||||||
|
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
|
||||||
|
[AppConnection.Databricks]: registerDatabricksConnectionRouter
|
||||||
};
|
};
|
||||||
|
@ -15,6 +15,10 @@ import { CmekOrderBy } from "@app/services/cmek/cmek-types";
|
|||||||
const keyNameSchema = slugSchema({ min: 1, max: 32, field: "Name" });
|
const keyNameSchema = slugSchema({ min: 1, max: 32, field: "Name" });
|
||||||
const keyDescriptionSchema = z.string().trim().max(500).optional();
|
const keyDescriptionSchema = z.string().trim().max(500).optional();
|
||||||
|
|
||||||
|
const CmekSchema = KmsKeysSchema.merge(InternalKmsSchema.pick({ version: true, encryptionAlgorithm: true })).omit({
|
||||||
|
isReserved: true
|
||||||
|
});
|
||||||
|
|
||||||
const base64Schema = z.string().superRefine((val, ctx) => {
|
const base64Schema = z.string().superRefine((val, ctx) => {
|
||||||
if (!isBase64(val)) {
|
if (!isBase64(val)) {
|
||||||
ctx.addIssue({
|
ctx.addIssue({
|
||||||
@ -53,7 +57,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
key: KmsKeysSchema
|
key: CmekSchema
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -106,7 +110,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
key: KmsKeysSchema
|
key: CmekSchema
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -150,7 +154,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
key: KmsKeysSchema
|
key: CmekSchema
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -201,7 +205,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
keys: KmsKeysSchema.merge(InternalKmsSchema.pick({ version: true, encryptionAlgorithm: true })).array(),
|
keys: CmekSchema.array(),
|
||||||
totalCount: z.number()
|
totalCount: z.number()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -230,6 +234,92 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/keys/:keyId",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Get KMS key by ID",
|
||||||
|
params: z.object({
|
||||||
|
keyId: z.string().uuid().describe(KMS.GET_KEY_BY_ID.keyId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
key: CmekSchema
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const {
|
||||||
|
params: { keyId },
|
||||||
|
permission
|
||||||
|
} = req;
|
||||||
|
|
||||||
|
const key = await server.services.cmek.findCmekById(keyId, permission);
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: key.projectId!,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_CMEK,
|
||||||
|
metadata: {
|
||||||
|
keyId: key.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { key };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/keys/key-name/:keyName",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Get KMS key by Name",
|
||||||
|
params: z.object({
|
||||||
|
keyName: slugSchema({ field: "Key name" }).describe(KMS.GET_KEY_BY_NAME.keyName)
|
||||||
|
}),
|
||||||
|
querystring: z.object({
|
||||||
|
projectId: z.string().min(1, "Project ID is required").describe(KMS.GET_KEY_BY_NAME.projectId)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
key: CmekSchema
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const {
|
||||||
|
params: { keyName },
|
||||||
|
query: { projectId },
|
||||||
|
permission
|
||||||
|
} = req;
|
||||||
|
|
||||||
|
const key = await server.services.cmek.findCmekByName(keyName, projectId, permission);
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
projectId: key.projectId!,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_CMEK,
|
||||||
|
metadata: {
|
||||||
|
keyId: key.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { key };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// encrypt data
|
// encrypt data
|
||||||
server.route({
|
server.route({
|
||||||
method: "POST",
|
method: "POST",
|
||||||
|
@ -79,44 +79,44 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider)
|
|||||||
params: z.object({
|
params: z.object({
|
||||||
identityId: z.string().trim().describe(AWS_AUTH.ATTACH.identityId)
|
identityId: z.string().trim().describe(AWS_AUTH.ATTACH.identityId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z
|
||||||
stsEndpoint: z
|
.object({
|
||||||
.string()
|
stsEndpoint: z
|
||||||
.trim()
|
.string()
|
||||||
.min(1)
|
.trim()
|
||||||
.default("https://sts.amazonaws.com/")
|
.min(1)
|
||||||
.describe(AWS_AUTH.ATTACH.stsEndpoint),
|
.default("https://sts.amazonaws.com/")
|
||||||
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.ATTACH.allowedPrincipalArns),
|
.describe(AWS_AUTH.ATTACH.stsEndpoint),
|
||||||
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.ATTACH.allowedAccountIds),
|
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.ATTACH.allowedPrincipalArns),
|
||||||
accessTokenTrustedIps: z
|
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.ATTACH.allowedAccountIds),
|
||||||
.object({
|
accessTokenTrustedIps: z
|
||||||
ipAddress: z.string().trim()
|
.object({
|
||||||
})
|
ipAddress: z.string().trim()
|
||||||
.array()
|
})
|
||||||
.min(1)
|
.array()
|
||||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
.min(1)
|
||||||
.describe(AWS_AUTH.ATTACH.accessTokenTrustedIps),
|
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
||||||
accessTokenTTL: z
|
.describe(AWS_AUTH.ATTACH.accessTokenTrustedIps),
|
||||||
.number()
|
accessTokenTTL: z
|
||||||
.int()
|
.number()
|
||||||
.min(1)
|
.int()
|
||||||
.max(315360000)
|
.min(0)
|
||||||
.refine((value) => value !== 0, {
|
.max(315360000)
|
||||||
message: "accessTokenTTL must have a non zero number"
|
.default(2592000)
|
||||||
})
|
.describe(AWS_AUTH.ATTACH.accessTokenTTL),
|
||||||
.default(2592000)
|
accessTokenMaxTTL: z
|
||||||
.describe(AWS_AUTH.ATTACH.accessTokenTTL),
|
.number()
|
||||||
accessTokenMaxTTL: z
|
.int()
|
||||||
.number()
|
.min(1)
|
||||||
.int()
|
.max(315360000)
|
||||||
.max(315360000)
|
.default(2592000)
|
||||||
.refine((value) => value !== 0, {
|
.describe(AWS_AUTH.ATTACH.accessTokenMaxTTL),
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AWS_AUTH.ATTACH.accessTokenNumUsesLimit)
|
||||||
})
|
})
|
||||||
.default(2592000)
|
.refine(
|
||||||
.describe(AWS_AUTH.ATTACH.accessTokenMaxTTL),
|
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AWS_AUTH.ATTACH.accessTokenNumUsesLimit)
|
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||||
}),
|
),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
identityAwsAuth: IdentityAwsAuthsSchema
|
identityAwsAuth: IdentityAwsAuthsSchema
|
||||||
@ -172,30 +172,33 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider)
|
|||||||
params: z.object({
|
params: z.object({
|
||||||
identityId: z.string().describe(AWS_AUTH.UPDATE.identityId)
|
identityId: z.string().describe(AWS_AUTH.UPDATE.identityId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z
|
||||||
stsEndpoint: z.string().trim().min(1).optional().describe(AWS_AUTH.UPDATE.stsEndpoint),
|
.object({
|
||||||
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.UPDATE.allowedPrincipalArns),
|
stsEndpoint: z.string().trim().min(1).optional().describe(AWS_AUTH.UPDATE.stsEndpoint),
|
||||||
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.UPDATE.allowedAccountIds),
|
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.UPDATE.allowedPrincipalArns),
|
||||||
accessTokenTrustedIps: z
|
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.UPDATE.allowedAccountIds),
|
||||||
.object({
|
accessTokenTrustedIps: z
|
||||||
ipAddress: z.string().trim()
|
.object({
|
||||||
})
|
ipAddress: z.string().trim()
|
||||||
.array()
|
})
|
||||||
.min(1)
|
.array()
|
||||||
.optional()
|
.min(1)
|
||||||
.describe(AWS_AUTH.UPDATE.accessTokenTrustedIps),
|
.optional()
|
||||||
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AWS_AUTH.UPDATE.accessTokenTTL),
|
.describe(AWS_AUTH.UPDATE.accessTokenTrustedIps),
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AWS_AUTH.UPDATE.accessTokenNumUsesLimit),
|
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AWS_AUTH.UPDATE.accessTokenTTL),
|
||||||
accessTokenMaxTTL: z
|
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AWS_AUTH.UPDATE.accessTokenNumUsesLimit),
|
||||||
.number()
|
accessTokenMaxTTL: z
|
||||||
.int()
|
.number()
|
||||||
.max(315360000)
|
.int()
|
||||||
.refine((value) => value !== 0, {
|
.max(315360000)
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
.min(0)
|
||||||
})
|
.optional()
|
||||||
.optional()
|
.describe(AWS_AUTH.UPDATE.accessTokenMaxTTL)
|
||||||
.describe(AWS_AUTH.UPDATE.accessTokenMaxTTL)
|
})
|
||||||
}),
|
.refine(
|
||||||
|
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
|
||||||
|
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||||
|
),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
identityAwsAuth: IdentityAwsAuthsSchema
|
identityAwsAuth: IdentityAwsAuthsSchema
|
||||||
|
@ -76,39 +76,44 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider
|
|||||||
params: z.object({
|
params: z.object({
|
||||||
identityId: z.string().trim().describe(AZURE_AUTH.LOGIN.identityId)
|
identityId: z.string().trim().describe(AZURE_AUTH.LOGIN.identityId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z
|
||||||
tenantId: z.string().trim().describe(AZURE_AUTH.ATTACH.tenantId),
|
.object({
|
||||||
resource: z.string().trim().describe(AZURE_AUTH.ATTACH.resource),
|
tenantId: z.string().trim().describe(AZURE_AUTH.ATTACH.tenantId),
|
||||||
allowedServicePrincipalIds: validateAzureAuthField.describe(AZURE_AUTH.ATTACH.allowedServicePrincipalIds),
|
resource: z.string().trim().describe(AZURE_AUTH.ATTACH.resource),
|
||||||
accessTokenTrustedIps: z
|
allowedServicePrincipalIds: validateAzureAuthField.describe(AZURE_AUTH.ATTACH.allowedServicePrincipalIds),
|
||||||
.object({
|
accessTokenTrustedIps: z
|
||||||
ipAddress: z.string().trim()
|
.object({
|
||||||
})
|
ipAddress: z.string().trim()
|
||||||
.array()
|
})
|
||||||
.min(1)
|
.array()
|
||||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
.min(1)
|
||||||
.describe(AZURE_AUTH.ATTACH.accessTokenTrustedIps),
|
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
||||||
accessTokenTTL: z
|
.describe(AZURE_AUTH.ATTACH.accessTokenTrustedIps),
|
||||||
.number()
|
accessTokenTTL: z
|
||||||
.int()
|
.number()
|
||||||
.min(1)
|
.int()
|
||||||
.max(315360000)
|
.min(0)
|
||||||
.refine((value) => value !== 0, {
|
.max(315360000)
|
||||||
message: "accessTokenTTL must have a non zero number"
|
.default(2592000)
|
||||||
})
|
.describe(AZURE_AUTH.ATTACH.accessTokenTTL),
|
||||||
.default(2592000)
|
accessTokenMaxTTL: z
|
||||||
.describe(AZURE_AUTH.ATTACH.accessTokenTTL),
|
.number()
|
||||||
accessTokenMaxTTL: z
|
.int()
|
||||||
.number()
|
.min(0)
|
||||||
.int()
|
.max(315360000)
|
||||||
.max(315360000)
|
.default(2592000)
|
||||||
.refine((value) => value !== 0, {
|
.describe(AZURE_AUTH.ATTACH.accessTokenMaxTTL),
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
accessTokenNumUsesLimit: z
|
||||||
})
|
.number()
|
||||||
.default(2592000)
|
.int()
|
||||||
.describe(AZURE_AUTH.ATTACH.accessTokenMaxTTL),
|
.min(0)
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AZURE_AUTH.ATTACH.accessTokenNumUsesLimit)
|
.default(0)
|
||||||
}),
|
.describe(AZURE_AUTH.ATTACH.accessTokenNumUsesLimit)
|
||||||
|
})
|
||||||
|
.refine(
|
||||||
|
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
|
||||||
|
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||||
|
),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
identityAzureAuth: IdentityAzureAuthsSchema
|
identityAzureAuth: IdentityAzureAuthsSchema
|
||||||
@ -163,32 +168,40 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider
|
|||||||
params: z.object({
|
params: z.object({
|
||||||
identityId: z.string().trim().describe(AZURE_AUTH.UPDATE.identityId)
|
identityId: z.string().trim().describe(AZURE_AUTH.UPDATE.identityId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z
|
||||||
tenantId: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.tenantId),
|
.object({
|
||||||
resource: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.resource),
|
tenantId: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.tenantId),
|
||||||
allowedServicePrincipalIds: validateAzureAuthField
|
resource: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.resource),
|
||||||
.optional()
|
allowedServicePrincipalIds: validateAzureAuthField
|
||||||
.describe(AZURE_AUTH.UPDATE.allowedServicePrincipalIds),
|
.optional()
|
||||||
accessTokenTrustedIps: z
|
.describe(AZURE_AUTH.UPDATE.allowedServicePrincipalIds),
|
||||||
.object({
|
accessTokenTrustedIps: z
|
||||||
ipAddress: z.string().trim()
|
.object({
|
||||||
})
|
ipAddress: z.string().trim()
|
||||||
.array()
|
})
|
||||||
.min(1)
|
.array()
|
||||||
.optional()
|
.min(1)
|
||||||
.describe(AZURE_AUTH.UPDATE.accessTokenTrustedIps),
|
.optional()
|
||||||
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AZURE_AUTH.UPDATE.accessTokenTTL),
|
.describe(AZURE_AUTH.UPDATE.accessTokenTrustedIps),
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AZURE_AUTH.UPDATE.accessTokenNumUsesLimit),
|
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AZURE_AUTH.UPDATE.accessTokenTTL),
|
||||||
accessTokenMaxTTL: z
|
accessTokenNumUsesLimit: z
|
||||||
.number()
|
.number()
|
||||||
.int()
|
.int()
|
||||||
.max(315360000)
|
.min(0)
|
||||||
.refine((value) => value !== 0, {
|
.optional()
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
.describe(AZURE_AUTH.UPDATE.accessTokenNumUsesLimit),
|
||||||
})
|
accessTokenMaxTTL: z
|
||||||
.optional()
|
.number()
|
||||||
.describe(AZURE_AUTH.UPDATE.accessTokenMaxTTL)
|
.int()
|
||||||
}),
|
.max(315360000)
|
||||||
|
.min(0)
|
||||||
|
.optional()
|
||||||
|
.describe(AZURE_AUTH.UPDATE.accessTokenMaxTTL)
|
||||||
|
})
|
||||||
|
.refine(
|
||||||
|
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
|
||||||
|
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||||
|
),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
identityAzureAuth: IdentityAzureAuthsSchema
|
identityAzureAuth: IdentityAzureAuthsSchema
|
||||||
|
@ -74,40 +74,40 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider)
|
|||||||
params: z.object({
|
params: z.object({
|
||||||
identityId: z.string().trim().describe(GCP_AUTH.ATTACH.identityId)
|
identityId: z.string().trim().describe(GCP_AUTH.ATTACH.identityId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z
|
||||||
type: z.enum(["iam", "gce"]),
|
.object({
|
||||||
allowedServiceAccounts: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedServiceAccounts),
|
type: z.enum(["iam", "gce"]),
|
||||||
allowedProjects: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedProjects),
|
allowedServiceAccounts: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedServiceAccounts),
|
||||||
allowedZones: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedZones),
|
allowedProjects: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedProjects),
|
||||||
accessTokenTrustedIps: z
|
allowedZones: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedZones),
|
||||||
.object({
|
accessTokenTrustedIps: z
|
||||||
ipAddress: z.string().trim()
|
.object({
|
||||||
})
|
ipAddress: z.string().trim()
|
||||||
.array()
|
})
|
||||||
.min(1)
|
.array()
|
||||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
.min(1)
|
||||||
.describe(GCP_AUTH.ATTACH.accessTokenTrustedIps),
|
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
||||||
accessTokenTTL: z
|
.describe(GCP_AUTH.ATTACH.accessTokenTrustedIps),
|
||||||
.number()
|
accessTokenTTL: z
|
||||||
.int()
|
.number()
|
||||||
.min(1)
|
.int()
|
||||||
.max(315360000)
|
.min(0)
|
||||||
.refine((value) => value !== 0, {
|
.max(315360000)
|
||||||
message: "accessTokenTTL must have a non zero number"
|
.default(2592000)
|
||||||
})
|
.describe(GCP_AUTH.ATTACH.accessTokenTTL),
|
||||||
.default(2592000)
|
accessTokenMaxTTL: z
|
||||||
.describe(GCP_AUTH.ATTACH.accessTokenTTL),
|
.number()
|
||||||
accessTokenMaxTTL: z
|
.int()
|
||||||
.number()
|
.min(0)
|
||||||
.int()
|
.max(315360000)
|
||||||
.max(315360000)
|
.default(2592000)
|
||||||
.refine((value) => value !== 0, {
|
.describe(GCP_AUTH.ATTACH.accessTokenMaxTTL),
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(GCP_AUTH.ATTACH.accessTokenNumUsesLimit)
|
||||||
})
|
})
|
||||||
.default(2592000)
|
.refine(
|
||||||
.describe(GCP_AUTH.ATTACH.accessTokenMaxTTL),
|
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(GCP_AUTH.ATTACH.accessTokenNumUsesLimit)
|
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||||
}),
|
),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
identityGcpAuth: IdentityGcpAuthsSchema
|
identityGcpAuth: IdentityGcpAuthsSchema
|
||||||
@ -164,31 +164,34 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider)
|
|||||||
params: z.object({
|
params: z.object({
|
||||||
identityId: z.string().trim().describe(GCP_AUTH.UPDATE.identityId)
|
identityId: z.string().trim().describe(GCP_AUTH.UPDATE.identityId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z
|
||||||
type: z.enum(["iam", "gce"]).optional(),
|
.object({
|
||||||
allowedServiceAccounts: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedServiceAccounts),
|
type: z.enum(["iam", "gce"]).optional(),
|
||||||
allowedProjects: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedProjects),
|
allowedServiceAccounts: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedServiceAccounts),
|
||||||
allowedZones: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedZones),
|
allowedProjects: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedProjects),
|
||||||
accessTokenTrustedIps: z
|
allowedZones: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedZones),
|
||||||
.object({
|
accessTokenTrustedIps: z
|
||||||
ipAddress: z.string().trim()
|
.object({
|
||||||
})
|
ipAddress: z.string().trim()
|
||||||
.array()
|
})
|
||||||
.min(1)
|
.array()
|
||||||
.optional()
|
.min(1)
|
||||||
.describe(GCP_AUTH.UPDATE.accessTokenTrustedIps),
|
.optional()
|
||||||
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(GCP_AUTH.UPDATE.accessTokenTTL),
|
.describe(GCP_AUTH.UPDATE.accessTokenTrustedIps),
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(GCP_AUTH.UPDATE.accessTokenNumUsesLimit),
|
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(GCP_AUTH.UPDATE.accessTokenTTL),
|
||||||
accessTokenMaxTTL: z
|
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(GCP_AUTH.UPDATE.accessTokenNumUsesLimit),
|
||||||
.number()
|
accessTokenMaxTTL: z
|
||||||
.int()
|
.number()
|
||||||
.max(315360000)
|
.int()
|
||||||
.refine((value) => value !== 0, {
|
.min(0)
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
.max(315360000)
|
||||||
})
|
.optional()
|
||||||
.optional()
|
.describe(GCP_AUTH.UPDATE.accessTokenMaxTTL)
|
||||||
.describe(GCP_AUTH.UPDATE.accessTokenMaxTTL)
|
})
|
||||||
}),
|
.refine(
|
||||||
|
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
|
||||||
|
"Access Token TTL cannot be greater than Access Token Max TTL."
|
||||||
|
),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
identityGcpAuth: IdentityGcpAuthsSchema
|
identityGcpAuth: IdentityGcpAuthsSchema
|
||||||
|
@ -34,23 +34,12 @@ const CreateBaseSchema = z.object({
|
|||||||
.min(1)
|
.min(1)
|
||||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
||||||
.describe(JWT_AUTH.ATTACH.accessTokenTrustedIps),
|
.describe(JWT_AUTH.ATTACH.accessTokenTrustedIps),
|
||||||
accessTokenTTL: z
|
accessTokenTTL: z.number().int().min(0).max(315360000).default(2592000).describe(JWT_AUTH.ATTACH.accessTokenTTL),
|
||||||
.number()
|
|
||||||
.int()
|
|
||||||
.min(1)
|
|
||||||
.max(315360000)
|
|
||||||
.refine((value) => value !== 0, {
|
|
||||||
message: "accessTokenTTL must have a non zero number"
|
|
||||||
})
|
|
||||||
.default(2592000)
|
|
||||||
.describe(JWT_AUTH.ATTACH.accessTokenTTL),
|
|
||||||
accessTokenMaxTTL: z
|
accessTokenMaxTTL: z
|
||||||
.number()
|
.number()
|
||||||
.int()
|
.int()
|
||||||
|
.min(0)
|
||||||
.max(315360000)
|
.max(315360000)
|
||||||
.refine((value) => value !== 0, {
|
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
|
||||||
})
|
|
||||||
.default(2592000)
|
.default(2592000)
|
||||||
.describe(JWT_AUTH.ATTACH.accessTokenMaxTTL),
|
.describe(JWT_AUTH.ATTACH.accessTokenMaxTTL),
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(JWT_AUTH.ATTACH.accessTokenNumUsesLimit)
|
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(JWT_AUTH.ATTACH.accessTokenNumUsesLimit)
|
||||||
@ -70,23 +59,12 @@ const UpdateBaseSchema = z
|
|||||||
.min(1)
|
.min(1)
|
||||||
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
|
||||||
.describe(JWT_AUTH.UPDATE.accessTokenTrustedIps),
|
.describe(JWT_AUTH.UPDATE.accessTokenTrustedIps),
|
||||||
accessTokenTTL: z
|
accessTokenTTL: z.number().int().min(0).max(315360000).default(2592000).describe(JWT_AUTH.UPDATE.accessTokenTTL),
|
||||||
.number()
|
|
||||||
.int()
|
|
||||||
.min(1)
|
|
||||||
.max(315360000)
|
|
||||||
.refine((value) => value !== 0, {
|
|
||||||
message: "accessTokenTTL must have a non zero number"
|
|
||||||
})
|
|
||||||
.default(2592000)
|
|
||||||
.describe(JWT_AUTH.UPDATE.accessTokenTTL),
|
|
||||||
accessTokenMaxTTL: z
|
accessTokenMaxTTL: z
|
||||||
.number()
|
.number()
|
||||||
.int()
|
.int()
|
||||||
|
.min(0)
|
||||||
.max(315360000)
|
.max(315360000)
|
||||||
.refine((value) => value !== 0, {
|
|
||||||
message: "accessTokenMaxTTL must have a non zero number"
|
|
||||||
})
|
|
||||||
.default(2592000)
|
.default(2592000)
|
||||||
.describe(JWT_AUTH.UPDATE.accessTokenMaxTTL),
|
.describe(JWT_AUTH.UPDATE.accessTokenMaxTTL),
|
||||||
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(JWT_AUTH.UPDATE.accessTokenNumUsesLimit)
|
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(JWT_AUTH.UPDATE.accessTokenNumUsesLimit)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user