mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-13 09:35:39 +00:00
Compare commits
1 Commits
misc/add-e
...
max/connec
Author | SHA1 | Date | |
---|---|---|---|
5ecb660cdd |
34
.env.example
34
.env.example
@ -26,8 +26,7 @@ SITE_URL=http://localhost:8080
|
|||||||
# Mail/SMTP
|
# Mail/SMTP
|
||||||
SMTP_HOST=
|
SMTP_HOST=
|
||||||
SMTP_PORT=
|
SMTP_PORT=
|
||||||
SMTP_FROM_ADDRESS=
|
SMTP_NAME=
|
||||||
SMTP_FROM_NAME=
|
|
||||||
SMTP_USERNAME=
|
SMTP_USERNAME=
|
||||||
SMTP_PASSWORD=
|
SMTP_PASSWORD=
|
||||||
|
|
||||||
@ -89,34 +88,3 @@ PLAIN_WISH_LABEL_IDS=
|
|||||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||||
|
|
||||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||||
|
|
||||||
# App Connections
|
|
||||||
|
|
||||||
# aws assume-role connection
|
|
||||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
|
||||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
|
||||||
|
|
||||||
# github oauth connection
|
|
||||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
|
||||||
|
|
||||||
#github app connection
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
|
||||||
|
|
||||||
#gcp app connection
|
|
||||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
|
||||||
|
|
||||||
# azure app connection
|
|
||||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
|
||||||
|
|
||||||
# datadog
|
|
||||||
SHOULD_USE_DATADOG_TRACER=
|
|
||||||
DATADOG_PROFILING_ENABLED=
|
|
||||||
DATADOG_ENV=
|
|
||||||
DATADOG_SERVICE=
|
|
||||||
DATADOG_HOSTNAME=
|
|
||||||
|
3
.envrc
3
.envrc
@ -1,3 +0,0 @@
|
|||||||
# Learn more at https://direnv.net
|
|
||||||
# We instruct direnv to use our Nix flake for a consistent development environment.
|
|
||||||
use flake
|
|
@ -32,23 +32,10 @@ jobs:
|
|||||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||||
- name: Start the server
|
- name: Start the server
|
||||||
run: |
|
run: |
|
||||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||||
|
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||||
echo "Examining built image:"
|
|
||||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
|
||||||
|
|
||||||
docker run --name infisical-api -d -p 4000:4000 \
|
|
||||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
|
||||||
-e REDIS_URL=$REDIS_URL \
|
|
||||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
|
||||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
|
||||||
--env-file .env \
|
|
||||||
infisical-api
|
|
||||||
|
|
||||||
echo "Container status right after creation:"
|
|
||||||
docker ps -a | grep infisical-api
|
|
||||||
env:
|
env:
|
||||||
REDIS_URL: redis://172.17.0.1:6379
|
REDIS_URL: redis://172.17.0.1:6379
|
||||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||||
@ -56,48 +43,35 @@ jobs:
|
|||||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.5"
|
go-version: '1.21.5'
|
||||||
- name: Wait for container to be stable and check logs
|
- name: Wait for container to be stable and check logs
|
||||||
run: |
|
run: |
|
||||||
SECONDS=0
|
SECONDS=0
|
||||||
HEALTHY=0
|
HEALTHY=0
|
||||||
while [ $SECONDS -lt 60 ]; do
|
while [ $SECONDS -lt 60 ]; do
|
||||||
# Check if container is running
|
if docker ps | grep infisical-api | grep -q healthy; then
|
||||||
if docker ps | grep infisical-api; then
|
echo "Container is healthy."
|
||||||
# Try to access the API endpoint
|
HEALTHY=1
|
||||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
|
||||||
echo "API endpoint is responding. Container seems healthy."
|
|
||||||
HEALTHY=1
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Container is not running!"
|
|
||||||
docker ps -a | grep infisical-api
|
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||||
sleep 5
|
|
||||||
SECONDS=$((SECONDS+5))
|
docker logs infisical-api
|
||||||
|
|
||||||
|
sleep 2
|
||||||
|
SECONDS=$((SECONDS+2))
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ $HEALTHY -ne 1 ]; then
|
if [ $HEALTHY -ne 1 ]; then
|
||||||
echo "Container did not become healthy in time"
|
echo "Container did not become healthy in time"
|
||||||
echo "Container status:"
|
|
||||||
docker ps -a | grep infisical-api
|
|
||||||
echo "Container logs (if any):"
|
|
||||||
docker logs infisical-api || echo "No logs available"
|
|
||||||
echo "Container inspection:"
|
|
||||||
docker inspect infisical-api | grep -A 5 "State"
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
- name: Install openapi-diff
|
- name: Install openapi-diff
|
||||||
run: go install github.com/oasdiff/oasdiff@latest
|
run: go install github.com/tufin/oasdiff@latest
|
||||||
- name: Running OpenAPI Spec diff action
|
- name: Running OpenAPI Spec diff action
|
||||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||||
- name: cleanup
|
- name: cleanup
|
||||||
if: always()
|
|
||||||
run: |
|
run: |
|
||||||
docker compose -f "docker-compose.dev.yml" down
|
docker compose -f "docker-compose.dev.yml" down
|
||||||
docker stop infisical-api || true
|
docker stop infisical-api
|
||||||
docker rm infisical-api || true
|
docker remove infisical-api
|
||||||
|
8
.github/workflows/check-fe-ts-and-lint.yml
vendored
8
.github/workflows/check-fe-ts-and-lint.yml
vendored
@ -18,18 +18,18 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: ☁️ Checkout source
|
- name: ☁️ Checkout source
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: 🔧 Setup Node 20
|
- name: 🔧 Setup Node 16
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: "16"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: frontend/package-lock.json
|
cache-dependency-path: frontend/package-lock.json
|
||||||
- name: 📦 Install dependencies
|
- name: 📦 Install dependencies
|
||||||
run: npm install
|
run: npm install
|
||||||
working-directory: frontend
|
working-directory: frontend
|
||||||
- name: 🏗️ Run Type check
|
- name: 🏗️ Run Type check
|
||||||
run: npm run type:check
|
run: npm run type:check
|
||||||
working-directory: frontend
|
working-directory: frontend
|
||||||
- name: 🏗️ Run Link check
|
- name: 🏗️ Run Link check
|
||||||
run: npm run lint:fix
|
run: npm run lint:fix
|
||||||
working-directory: frontend
|
working-directory: frontend
|
||||||
|
212
.github/workflows/deployment-pipeline.yml
vendored
Normal file
212
.github/workflows/deployment-pipeline.yml
vendored
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
name: Deployment pipeline
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
infisical-tests:
|
||||||
|
name: Integration tests
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||||
|
uses: ./.github/workflows/run-backend-tests.yml
|
||||||
|
|
||||||
|
infisical-image:
|
||||||
|
name: Build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [infisical-tests]
|
||||||
|
steps:
|
||||||
|
- name: ☁️ Checkout source
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: 📦 Install dependencies to test all dependencies
|
||||||
|
run: npm ci --only-production
|
||||||
|
working-directory: backend
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: 🔧 Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: 🐋 Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
- name: 🏗️ Build backend and push to docker hub
|
||||||
|
uses: depot/build-push-action@v1
|
||||||
|
with:
|
||||||
|
project: 64mmf0n610
|
||||||
|
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||||
|
push: true
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.standalone-infisical
|
||||||
|
tags: |
|
||||||
|
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
infisical/staging_infisical:latest
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: |
|
||||||
|
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||||
|
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||||
|
|
||||||
|
gamma-deployment:
|
||||||
|
name: Deploy to gamma
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [infisical-image]
|
||||||
|
environment:
|
||||||
|
name: Gamma
|
||||||
|
steps:
|
||||||
|
- uses: twingate/github-action@v1
|
||||||
|
with:
|
||||||
|
# The Twingate Service Key used to connect Twingate to the proper service
|
||||||
|
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||||
|
#
|
||||||
|
# Required
|
||||||
|
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
- name: Change directory to backend and install dependencies
|
||||||
|
env:
|
||||||
|
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||||
|
run: |
|
||||||
|
cd backend
|
||||||
|
npm install
|
||||||
|
npm run migration:latest
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
|
with:
|
||||||
|
audience: sts.amazonaws.com
|
||||||
|
aws-region: us-east-1
|
||||||
|
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: Download task definition
|
||||||
|
run: |
|
||||||
|
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||||
|
- name: Render Amazon ECS task definition
|
||||||
|
id: render-web-container
|
||||||
|
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: task-definition.json
|
||||||
|
container-name: infisical-core
|
||||||
|
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
environment-variables: "LOG_LEVEL=info"
|
||||||
|
- name: Deploy to Amazon ECS service
|
||||||
|
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||||
|
service: infisical-core-gamma-stage
|
||||||
|
cluster: infisical-gamma-stage
|
||||||
|
wait-for-service-stability: true
|
||||||
|
|
||||||
|
production-us:
|
||||||
|
name: US production deploy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [gamma-deployment]
|
||||||
|
environment:
|
||||||
|
name: Production
|
||||||
|
steps:
|
||||||
|
- uses: twingate/github-action@v1
|
||||||
|
with:
|
||||||
|
# The Twingate Service Key used to connect Twingate to the proper service
|
||||||
|
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||||
|
#
|
||||||
|
# Required
|
||||||
|
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
- name: Change directory to backend and install dependencies
|
||||||
|
env:
|
||||||
|
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||||
|
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||||
|
run: |
|
||||||
|
cd backend
|
||||||
|
npm install
|
||||||
|
npm run migration:latest
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
|
with:
|
||||||
|
audience: sts.amazonaws.com
|
||||||
|
aws-region: us-east-1
|
||||||
|
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: Download task definition
|
||||||
|
run: |
|
||||||
|
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||||
|
- name: Render Amazon ECS task definition
|
||||||
|
id: render-web-container
|
||||||
|
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: task-definition.json
|
||||||
|
container-name: infisical-core-platform
|
||||||
|
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
environment-variables: "LOG_LEVEL=info"
|
||||||
|
- name: Deploy to Amazon ECS service
|
||||||
|
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||||
|
service: infisical-core-platform
|
||||||
|
cluster: infisical-core-platform
|
||||||
|
wait-for-service-stability: true
|
||||||
|
|
||||||
|
production-eu:
|
||||||
|
name: EU production deploy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [production-us]
|
||||||
|
environment:
|
||||||
|
name: production-eu
|
||||||
|
steps:
|
||||||
|
- uses: twingate/github-action@v1
|
||||||
|
with:
|
||||||
|
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
|
with:
|
||||||
|
audience: sts.amazonaws.com
|
||||||
|
aws-region: eu-central-1
|
||||||
|
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
- name: Change directory to backend and install dependencies
|
||||||
|
env:
|
||||||
|
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||||
|
run: |
|
||||||
|
cd backend
|
||||||
|
npm install
|
||||||
|
npm run migration:latest
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: Download task definition
|
||||||
|
run: |
|
||||||
|
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||||
|
- name: Render Amazon ECS task definition
|
||||||
|
id: render-web-container
|
||||||
|
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: task-definition.json
|
||||||
|
container-name: infisical-core-platform
|
||||||
|
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
environment-variables: "LOG_LEVEL=info"
|
||||||
|
- name: Deploy to Amazon ECS service
|
||||||
|
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||||
|
service: infisical-core-platform
|
||||||
|
cluster: infisical-core-platform
|
||||||
|
wait-for-service-stability: true
|
@ -1,4 +1,4 @@
|
|||||||
name: Release Infisical Core Helm chart
|
name: Release Helm Charts
|
||||||
|
|
||||||
on: [workflow_dispatch]
|
on: [workflow_dispatch]
|
||||||
|
|
||||||
@ -17,6 +17,6 @@ jobs:
|
|||||||
- name: Install Cloudsmith CLI
|
- name: Install Cloudsmith CLI
|
||||||
run: pip install --upgrade cloudsmith-cli
|
run: pip install --upgrade cloudsmith-cli
|
||||||
- name: Build and push helm package to Cloudsmith
|
- name: Build and push helm package to Cloudsmith
|
||||||
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
run: cd helm-charts && sh upload-to-cloudsmith.sh
|
||||||
env:
|
env:
|
||||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -26,7 +26,7 @@ jobs:
|
|||||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||||
|
|
||||||
npm-release:
|
npm-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
env:
|
env:
|
||||||
working-directory: ./npm
|
working-directory: ./npm
|
||||||
needs:
|
needs:
|
||||||
@ -83,7 +83,7 @@ jobs:
|
|||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
needs: [cli-integration-tests]
|
needs: [cli-integration-tests]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
@ -103,12 +103,11 @@ jobs:
|
|||||||
go-version: ">=1.19.3"
|
go-version: ">=1.19.3"
|
||||||
cache: true
|
cache: true
|
||||||
cache-dependency-path: cli/go.sum
|
cache-dependency-path: cli/go.sum
|
||||||
- name: Setup for libssl1.0-dev
|
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||||
run: |
|
run: |
|
||||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
sudo apt update && apt-cache policy libssl1.0-dev
|
||||||
sudo apt update
|
sudo apt-get install libssl1.0-dev
|
||||||
sudo apt-get install -y libssl1.0-dev
|
|
||||||
- name: OSXCross for CGO Support
|
- name: OSXCross for CGO Support
|
||||||
run: |
|
run: |
|
||||||
mkdir ../../osxcross
|
mkdir ../../osxcross
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
name: Release image + Helm chart K8s Operator
|
name: Release Docker image for K8 operator
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
@ -35,18 +35,3 @@ jobs:
|
|||||||
tags: |
|
tags: |
|
||||||
infisical/kubernetes-operator:latest
|
infisical/kubernetes-operator:latest
|
||||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Install Helm
|
|
||||||
uses: azure/setup-helm@v3
|
|
||||||
with:
|
|
||||||
version: v3.10.0
|
|
||||||
- name: Install python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
- name: Install Cloudsmith CLI
|
|
||||||
run: pip install --upgrade cloudsmith-cli
|
|
||||||
- name: Build and push helm package to Cloudsmith
|
|
||||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
|
||||||
env:
|
|
||||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
|
||||||
|
8
.github/workflows/run-backend-tests.yml
vendored
8
.github/workflows/run-backend-tests.yml
vendored
@ -34,10 +34,7 @@ jobs:
|
|||||||
working-directory: backend
|
working-directory: backend
|
||||||
- name: Start postgres and redis
|
- name: Start postgres and redis
|
||||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||||
- name: Run unit test
|
- name: Start integration test
|
||||||
run: npm run test:unit
|
|
||||||
working-directory: backend
|
|
||||||
- name: Run integration test
|
|
||||||
run: npm run test:e2e
|
run: npm run test:e2e
|
||||||
working-directory: backend
|
working-directory: backend
|
||||||
env:
|
env:
|
||||||
@ -47,5 +44,4 @@ jobs:
|
|||||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||||
- name: cleanup
|
- name: cleanup
|
||||||
run: |
|
run: |
|
||||||
docker compose -f "docker-compose.dev.yml" down
|
docker compose -f "docker-compose.dev.yml" down
|
||||||
|
|
@ -7,4 +7,3 @@ docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
|||||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||||
docs/mint.json:generic-api-key:651
|
docs/mint.json:generic-api-key:651
|
||||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
|
||||||
|
@ -8,7 +8,7 @@ FROM node:20-slim AS base
|
|||||||
FROM base AS frontend-dependencies
|
FROM base AS frontend-dependencies
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY frontend/package.json frontend/package-lock.json ./
|
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
RUN npm ci --only-production --ignore-scripts
|
RUN npm ci --only-production --ignore-scripts
|
||||||
@ -23,16 +23,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
|||||||
COPY /frontend .
|
COPY /frontend .
|
||||||
|
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
|
ENV NEXT_PUBLIC_ENV production
|
||||||
ARG POSTHOG_HOST
|
ARG POSTHOG_HOST
|
||||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||||
ARG POSTHOG_API_KEY
|
ARG POSTHOG_API_KEY
|
||||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||||
ARG INTERCOM_ID
|
ARG INTERCOM_ID
|
||||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
ARG INFISICAL_PLATFORM_VERSION
|
||||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
@ -43,10 +44,20 @@ WORKDIR /app
|
|||||||
|
|
||||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||||
|
|
||||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||||
|
VOLUME /app/.next/cache/images
|
||||||
|
|
||||||
|
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||||
|
COPY --from=frontend-builder /app/public ./public
|
||||||
|
RUN chown non-root-user:nodejs ./public/data
|
||||||
|
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||||
|
|
||||||
USER non-root-user
|
USER non-root-user
|
||||||
|
|
||||||
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|
||||||
##
|
##
|
||||||
## BACKEND
|
## BACKEND
|
||||||
##
|
##
|
||||||
@ -126,7 +137,6 @@ RUN apt-get update && apt-get install -y \
|
|||||||
freetds-dev \
|
freetds-dev \
|
||||||
freetds-bin \
|
freetds-bin \
|
||||||
tdsodbc \
|
tdsodbc \
|
||||||
openssh-client \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Configure ODBC in production
|
# Configure ODBC in production
|
||||||
@ -149,11 +159,14 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
|
|||||||
|
|
||||||
## set pre baked keys
|
## set pre baked keys
|
||||||
ARG POSTHOG_API_KEY
|
ARG POSTHOG_API_KEY
|
||||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||||
ARG INTERCOM_ID=intercom-id
|
ARG INTERCOM_ID=intercom-id
|
||||||
ENV INTERCOM_ID=$INTERCOM_ID
|
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||||
|
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
@ -161,9 +174,6 @@ COPY --from=backend-runner /app /backend
|
|||||||
|
|
||||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||||
|
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
|
||||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
|
||||||
|
|
||||||
ENV PORT 8080
|
ENV PORT 8080
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
ENV HTTPS_ENABLED false
|
ENV HTTPS_ENABLED false
|
||||||
@ -181,4 +191,4 @@ EXPOSE 443
|
|||||||
|
|
||||||
USER non-root-user
|
USER non-root-user
|
||||||
|
|
||||||
CMD ["./standalone-entrypoint.sh"]
|
CMD ["./standalone-entrypoint.sh"]
|
@ -3,13 +3,16 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
|||||||
ARG INTERCOM_ID=intercom-id
|
ARG INTERCOM_ID=intercom-id
|
||||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||||
|
|
||||||
FROM node:20-slim AS base
|
FROM node:20-alpine AS base
|
||||||
|
|
||||||
FROM base AS frontend-dependencies
|
FROM base AS frontend-dependencies
|
||||||
|
|
||||||
|
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||||
|
RUN apk add --no-cache libc6-compat
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY frontend/package.json frontend/package-lock.json ./
|
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
RUN npm ci --only-production --ignore-scripts
|
RUN npm ci --only-production --ignore-scripts
|
||||||
@ -24,16 +27,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
|||||||
COPY /frontend .
|
COPY /frontend .
|
||||||
|
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
|
ENV NEXT_PUBLIC_ENV production
|
||||||
ARG POSTHOG_HOST
|
ARG POSTHOG_HOST
|
||||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||||
ARG POSTHOG_API_KEY
|
ARG POSTHOG_API_KEY
|
||||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||||
ARG INTERCOM_ID
|
ARG INTERCOM_ID
|
||||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
ARG INFISICAL_PLATFORM_VERSION
|
||||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
@ -42,34 +46,42 @@ RUN npm run build
|
|||||||
FROM base AS frontend-runner
|
FROM base AS frontend-runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
RUN groupadd --system --gid 1001 nodejs
|
RUN addgroup --system --gid 1001 nodejs
|
||||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
RUN adduser --system --uid 1001 non-root-user
|
||||||
|
|
||||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||||
|
VOLUME /app/.next/cache/images
|
||||||
|
|
||||||
|
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||||
|
COPY --from=frontend-builder /app/public ./public
|
||||||
|
RUN chown non-root-user:nodejs ./public/data
|
||||||
|
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||||
|
|
||||||
USER non-root-user
|
USER non-root-user
|
||||||
|
|
||||||
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|
||||||
##
|
##
|
||||||
## BACKEND
|
## BACKEND
|
||||||
##
|
##
|
||||||
FROM base AS backend-build
|
FROM base AS backend-build
|
||||||
|
RUN addgroup --system --gid 1001 nodejs \
|
||||||
|
&& adduser --system --uid 1001 non-root-user
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install all required dependencies for build
|
# Install all required dependencies for build
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apk --update add \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++ \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
freetds-bin \
|
freetds \
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
libc-dev \
|
libc-dev \
|
||||||
freetds-dev \
|
freetds-dev
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN groupadd --system --gid 1001 nodejs
|
|
||||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
@ -85,19 +97,18 @@ FROM base AS backend-runner
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install all required dependencies for runtime
|
# Install all required dependencies for runtime
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apk --update add \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++ \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
freetds-bin \
|
freetds \
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
libc-dev \
|
libc-dev \
|
||||||
freetds-dev \
|
freetds-dev
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Configure ODBC
|
# Configure ODBC
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
@ -109,36 +120,33 @@ RUN mkdir frontend-build
|
|||||||
# Production stage
|
# Production stage
|
||||||
FROM base AS production
|
FROM base AS production
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apk add --upgrade --no-cache ca-certificates
|
||||||
ca-certificates \
|
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||||
bash \
|
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||||
curl \
|
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||||
git \
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
# Install all required runtime dependencies
|
||||||
|
RUN apk --update add \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++ \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
freetds-bin \
|
freetds \
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
libc-dev \
|
libc-dev \
|
||||||
freetds-dev \
|
freetds-dev \
|
||||||
wget \
|
bash \
|
||||||
openssh-client \
|
curl \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
git
|
||||||
|
|
||||||
# Install Infisical CLI
|
|
||||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
|
||||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /
|
|
||||||
|
|
||||||
# Configure ODBC in production
|
# Configure ODBC in production
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
# Setup user permissions
|
# Setup user permissions
|
||||||
RUN groupadd --system --gid 1001 nodejs \
|
RUN addgroup --system --gid 1001 nodejs \
|
||||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
&& adduser --system --uid 1001 non-root-user
|
||||||
|
|
||||||
# Give non-root-user permission to update SSL certs
|
# Give non-root-user permission to update SSL certs
|
||||||
RUN chown -R non-root-user /etc/ssl/certs
|
RUN chown -R non-root-user /etc/ssl/certs
|
||||||
@ -150,17 +158,20 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
|
|||||||
|
|
||||||
## set pre baked keys
|
## set pre baked keys
|
||||||
ARG POSTHOG_API_KEY
|
ARG POSTHOG_API_KEY
|
||||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||||
ARG INTERCOM_ID=intercom-id
|
ARG INTERCOM_ID=intercom-id
|
||||||
ENV INTERCOM_ID=$INTERCOM_ID
|
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||||
|
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
|
|
||||||
COPY --from=backend-runner /app /backend
|
COPY --from=backend-runner /app /backend
|
||||||
|
|
||||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||||
|
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
|
||||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
|
||||||
|
|
||||||
ENV PORT 8080
|
ENV PORT 8080
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
@ -168,7 +179,6 @@ ENV HTTPS_ENABLED false
|
|||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
ENV STANDALONE_BUILD true
|
ENV STANDALONE_BUILD true
|
||||||
ENV STANDALONE_MODE true
|
ENV STANDALONE_MODE true
|
||||||
|
|
||||||
WORKDIR /backend
|
WORKDIR /backend
|
||||||
|
|
||||||
ENV TELEMETRY_ENABLED true
|
ENV TELEMETRY_ENABLED true
|
||||||
@ -178,4 +188,4 @@ EXPOSE 443
|
|||||||
|
|
||||||
USER non-root-user
|
USER non-root-user
|
||||||
|
|
||||||
CMD ["./standalone-entrypoint.sh"]
|
CMD ["./standalone-entrypoint.sh"]
|
3
Makefile
3
Makefile
@ -30,6 +30,3 @@ reviewable-api:
|
|||||||
npm run type:check
|
npm run type:check
|
||||||
|
|
||||||
reviewable: reviewable-ui reviewable-api
|
reviewable: reviewable-ui reviewable-api
|
||||||
|
|
||||||
up-dev-sso:
|
|
||||||
docker compose -f docker-compose.dev.yml --profile sso up --build
|
|
||||||
|
11
README.md
11
README.md
@ -56,7 +56,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
|||||||
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
|
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
|
||||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
|
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
|
||||||
|
|
||||||
### Infisical (Internal) PKI:
|
### Internal PKI:
|
||||||
|
|
||||||
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
|
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
|
||||||
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
|
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
|
||||||
@ -64,17 +64,12 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
|||||||
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
|
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
|
||||||
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
|
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
|
||||||
|
|
||||||
### Infisical Key Management System (KMS):
|
### Key Management (KMS):
|
||||||
|
|
||||||
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||||
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
||||||
|
|
||||||
### Infisical SSH
|
|
||||||
|
|
||||||
- **[Signed SSH Certificates](https://infisical.com/docs/documentation/platform/ssh)**: Issue ephemeral SSH credentials for secure, short-lived, and centralized access to infrastructure.
|
|
||||||
|
|
||||||
### General Platform:
|
### General Platform:
|
||||||
|
|
||||||
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
|
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
|
||||||
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
|
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
|
||||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
|
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
|
||||||
@ -125,7 +120,7 @@ Install pre commit hook to scan each commit before you push to your repository
|
|||||||
infisical scan install --pre-commit-hook
|
infisical scan install --pre-commit-hook
|
||||||
```
|
```
|
||||||
|
|
||||||
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||||
|
|
||||||
## Open-source vs. paid
|
## Open-source vs. paid
|
||||||
|
|
||||||
|
@ -1,22 +1,22 @@
|
|||||||
# Build stage
|
# Build stage
|
||||||
FROM node:20-slim AS build
|
FROM node:20-alpine AS build
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Required for pkcs11js
|
# Required for pkcs11js
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apk --update add \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++
|
||||||
openssh-client
|
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||||
RUN apt-get install -y \
|
RUN apk add --no-cache \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
freetds-bin \
|
freetds \
|
||||||
freetds-dev \
|
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
libc-dev
|
libc-dev \
|
||||||
|
freetds-dev
|
||||||
|
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
@ -25,36 +25,36 @@ COPY . .
|
|||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
FROM node:20-slim
|
FROM node:20-alpine
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV npm_config_cache /home/node/.npm
|
ENV npm_config_cache /home/node/.npm
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apk --update add \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++
|
g++
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||||
RUN apt-get install -y \
|
RUN apk add --no-cache \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
freetds-bin \
|
freetds \
|
||||||
freetds-dev \
|
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
libc-dev
|
libc-dev \
|
||||||
|
freetds-dev
|
||||||
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
RUN npm ci --only-production && npm cache clean --force
|
RUN npm ci --only-production && npm cache clean --force
|
||||||
|
|
||||||
COPY --from=build /app .
|
COPY --from=build /app .
|
||||||
|
|
||||||
# Install Infisical CLI
|
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||||
RUN apt-get install -y curl bash && \
|
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||||
CMD node healthcheck.js
|
CMD node healthcheck.js
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM node:20-slim
|
FROM node:20-alpine
|
||||||
|
|
||||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||||
|
|
||||||
@ -7,32 +7,31 @@ ARG SOFTHSM2_VERSION=2.5.0
|
|||||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||||
|
|
||||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apk --update add \
|
||||||
build-essential \
|
alpine-sdk \
|
||||||
autoconf \
|
autoconf \
|
||||||
automake \
|
automake \
|
||||||
git \
|
git \
|
||||||
libtool \
|
libtool \
|
||||||
libssl-dev \
|
openssl-dev \
|
||||||
python3 \
|
python3 \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++
|
||||||
openssh-client \
|
|
||||||
curl \
|
|
||||||
pkg-config
|
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||||
RUN apt-get install -y \
|
RUN apk add --no-cache \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
|
freetds \
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
freetds-dev \
|
libc-dev \
|
||||||
freetds-bin \
|
freetds-dev
|
||||||
tdsodbc
|
|
||||||
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
# Build and install SoftHSM2
|
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||||
|
|
||||||
|
# build and install SoftHSM2
|
||||||
|
|
||||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||||
WORKDIR ${SOFTHSM2_SOURCES}
|
WORKDIR ${SOFTHSM2_SOURCES}
|
||||||
|
|
||||||
@ -45,18 +44,16 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
|||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||||
|
|
||||||
# Install pkcs11-tool
|
# install pkcs11-tool
|
||||||
RUN apt-get install -y opensc
|
RUN apk --update add opensc
|
||||||
|
|
||||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
|
||||||
|
|
||||||
# ? App setup
|
# ? App setup
|
||||||
|
|
||||||
# Install Infisical CLI
|
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||||
apt-get update && \
|
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||||
apt-get install -y infisical=0.8.1
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
@ -22,10 +22,8 @@ export const mockQueue = (): TQueueServiceFactory => {
|
|||||||
listen: (name, event) => {
|
listen: (name, event) => {
|
||||||
events[name] = event;
|
events[name] = event;
|
||||||
},
|
},
|
||||||
getRepeatableJobs: async () => [],
|
|
||||||
clearQueue: async () => {},
|
clearQueue: async () => {},
|
||||||
stopJobById: async () => {},
|
stopJobById: async () => {},
|
||||||
stopRepeatableJobByJobId: async () => true,
|
stopRepeatableJobByJobId: async () => true
|
||||||
stopRepeatableJobByKey: async () => true
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -535,107 +535,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
|
||||||
const updateSharedSecRes = await testServer.inject({
|
|
||||||
method: "PATCH",
|
|
||||||
url: `/api/v3/secrets/batch/raw`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environment: seedData1.environment.slug,
|
|
||||||
secretPath: path,
|
|
||||||
mode: "upsert",
|
|
||||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
|
||||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
secretComment: secret.comment
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
|
||||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
|
||||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
|
||||||
|
|
||||||
// bulk ones should exist
|
|
||||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
|
||||||
expect(secrets).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
Array.from(Array(5)).map((_e, i) =>
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
type: SecretType.Shared
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
await Promise.all(
|
|
||||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
|
||||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
|
||||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
secretComment: "comment",
|
|
||||||
secretPath: secretTestCases[0].path
|
|
||||||
}));
|
|
||||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
|
||||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
secretComment: "comment",
|
|
||||||
secretPath: secretTestCases[1].path
|
|
||||||
}));
|
|
||||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
|
||||||
|
|
||||||
const updateSharedSecRes = await testServer.inject({
|
|
||||||
method: "PATCH",
|
|
||||||
url: `/api/v3/secrets/batch/raw`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environment: seedData1.environment.slug,
|
|
||||||
mode: "upsert",
|
|
||||||
secrets: testSecrets
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
|
||||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
|
||||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
|
||||||
|
|
||||||
// bulk ones should exist
|
|
||||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
|
||||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
firstBatchSecrets.map((el) =>
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: el.secretKey,
|
|
||||||
secretValue: "update-value",
|
|
||||||
type: SecretType.Shared
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
|
||||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
secondBatchSecrets.map((el) =>
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: el.secretKey,
|
|
||||||
secretValue: "update-value",
|
|
||||||
type: SecretType.Shared
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
|
||||||
});
|
|
||||||
|
|
||||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||||
|
@ -23,14 +23,14 @@ export default {
|
|||||||
name: "knex-env",
|
name: "knex-env",
|
||||||
transformMode: "ssr",
|
transformMode: "ssr",
|
||||||
async setup() {
|
async setup() {
|
||||||
const logger = initLogger();
|
const logger = await initLogger();
|
||||||
const envConfig = initEnvConfig(logger);
|
const cfg = initEnvConfig(logger);
|
||||||
const db = initDbConnection({
|
const db = initDbConnection({
|
||||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||||
dbRootCert: envConfig.DB_ROOT_CERT
|
dbRootCert: cfg.DB_ROOT_CERT
|
||||||
});
|
});
|
||||||
|
|
||||||
const redis = new Redis(envConfig.REDIS_URL);
|
const redis = new Redis(cfg.REDIS_URL);
|
||||||
await redis.flushdb("SYNC");
|
await redis.flushdb("SYNC");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -42,7 +42,6 @@ export default {
|
|||||||
},
|
},
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
|
|
||||||
await db.migrate.latest({
|
await db.migrate.latest({
|
||||||
directory: path.join(__dirname, "../src/db/migrations"),
|
directory: path.join(__dirname, "../src/db/migrations"),
|
||||||
extension: "ts",
|
extension: "ts",
|
||||||
@ -53,24 +52,14 @@ export default {
|
|||||||
directory: path.join(__dirname, "../src/db/seeds"),
|
directory: path.join(__dirname, "../src/db/seeds"),
|
||||||
extension: "ts"
|
extension: "ts"
|
||||||
});
|
});
|
||||||
|
|
||||||
const smtp = mockSmtpServer();
|
const smtp = mockSmtpServer();
|
||||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||||
|
|
||||||
const hsmModule = initializeHsmModule(envConfig);
|
const hsmModule = initializeHsmModule();
|
||||||
hsmModule.initialize();
|
hsmModule.initialize();
|
||||||
|
|
||||||
const server = await main({
|
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||||
db,
|
|
||||||
smtp,
|
|
||||||
logger,
|
|
||||||
queue,
|
|
||||||
keyStore,
|
|
||||||
hsmModule: hsmModule.getModule(),
|
|
||||||
redis,
|
|
||||||
envConfig
|
|
||||||
});
|
|
||||||
|
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
globalThis.testServer = server;
|
globalThis.testServer = server;
|
||||||
@ -84,8 +73,8 @@ export default {
|
|||||||
organizationId: seedData1.organization.id,
|
organizationId: seedData1.organization.id,
|
||||||
accessVersion: 1
|
accessVersion: 1
|
||||||
},
|
},
|
||||||
envConfig.AUTH_SECRET,
|
cfg.AUTH_SECRET,
|
||||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
|
2634
backend/package-lock.json
generated
2634
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -40,38 +40,29 @@
|
|||||||
"type:check": "tsc --noEmit",
|
"type:check": "tsc --noEmit",
|
||||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||||
"lint": "eslint 'src/**/*.ts'",
|
"lint": "eslint 'src/**/*.ts'",
|
||||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
|
||||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
|
||||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
|
||||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
|
||||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
|
||||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
|
||||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
|
||||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
|
||||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
|
||||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
@ -138,27 +129,25 @@
|
|||||||
"@fastify/etag": "^5.1.0",
|
"@fastify/etag": "^5.1.0",
|
||||||
"@fastify/formbody": "^7.4.0",
|
"@fastify/formbody": "^7.4.0",
|
||||||
"@fastify/helmet": "^11.1.1",
|
"@fastify/helmet": "^11.1.1",
|
||||||
"@fastify/multipart": "8.3.1",
|
"@fastify/multipart": "8.3.0",
|
||||||
"@fastify/passport": "^2.4.0",
|
"@fastify/passport": "^2.4.0",
|
||||||
"@fastify/rate-limit": "^9.0.0",
|
"@fastify/rate-limit": "^9.0.0",
|
||||||
"@fastify/request-context": "^5.1.0",
|
"@fastify/request-context": "^5.1.0",
|
||||||
"@fastify/session": "^10.7.0",
|
"@fastify/session": "^10.7.0",
|
||||||
"@fastify/static": "^7.0.4",
|
|
||||||
"@fastify/swagger": "^8.14.0",
|
"@fastify/swagger": "^8.14.0",
|
||||||
"@fastify/swagger-ui": "^2.1.0",
|
"@fastify/swagger-ui": "^2.1.0",
|
||||||
"@google-cloud/kms": "^4.5.0",
|
"@google-cloud/kms": "^4.5.0",
|
||||||
"@infisical/quic": "^1.0.8",
|
"@node-saml/passport-saml": "^4.0.4",
|
||||||
"@node-saml/passport-saml": "^5.0.1",
|
|
||||||
"@octokit/auth-app": "^7.1.1",
|
"@octokit/auth-app": "^7.1.1",
|
||||||
"@octokit/plugin-retry": "^5.0.5",
|
"@octokit/plugin-retry": "^5.0.5",
|
||||||
"@octokit/rest": "^20.0.2",
|
"@octokit/rest": "^20.0.2",
|
||||||
"@octokit/webhooks-types": "^7.3.1",
|
"@octokit/webhooks-types": "^7.3.1",
|
||||||
"@octopusdeploy/api-client": "^3.4.1",
|
"@octopusdeploy/api-client": "^3.4.1",
|
||||||
"@opentelemetry/api": "^1.9.0",
|
"@opentelemetry/api": "^1.9.0",
|
||||||
|
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||||
"@opentelemetry/instrumentation": "^0.55.0",
|
"@opentelemetry/instrumentation": "^0.55.0",
|
||||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
|
||||||
"@opentelemetry/resources": "^1.28.0",
|
"@opentelemetry/resources": "^1.28.0",
|
||||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||||
@ -166,8 +155,8 @@
|
|||||||
"@peculiar/x509": "^1.12.1",
|
"@peculiar/x509": "^1.12.1",
|
||||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||||
"@sindresorhus/slugify": "1.1.0",
|
"@sindresorhus/slugify": "1.1.0",
|
||||||
"@slack/oauth": "^3.0.2",
|
"@slack/oauth": "^3.0.1",
|
||||||
"@slack/web-api": "^7.8.0",
|
"@slack/web-api": "^7.3.4",
|
||||||
"@ucast/mongo2js": "^1.3.4",
|
"@ucast/mongo2js": "^1.3.4",
|
||||||
"ajv": "^8.12.0",
|
"ajv": "^8.12.0",
|
||||||
"argon2": "^0.31.2",
|
"argon2": "^0.31.2",
|
||||||
@ -179,7 +168,6 @@
|
|||||||
"cassandra-driver": "^4.7.2",
|
"cassandra-driver": "^4.7.2",
|
||||||
"connect-redis": "^7.1.1",
|
"connect-redis": "^7.1.1",
|
||||||
"cron": "^3.1.7",
|
"cron": "^3.1.7",
|
||||||
"dd-trace": "^5.40.0",
|
|
||||||
"dotenv": "^16.4.1",
|
"dotenv": "^16.4.1",
|
||||||
"fastify": "^4.28.1",
|
"fastify": "^4.28.1",
|
||||||
"fastify-plugin": "^4.5.1",
|
"fastify-plugin": "^4.5.1",
|
||||||
@ -188,7 +176,6 @@
|
|||||||
"handlebars": "^4.7.8",
|
"handlebars": "^4.7.8",
|
||||||
"hdb": "^0.19.10",
|
"hdb": "^0.19.10",
|
||||||
"ioredis": "^5.3.2",
|
"ioredis": "^5.3.2",
|
||||||
"isomorphic-dompurify": "^2.22.0",
|
|
||||||
"jmespath": "^0.16.0",
|
"jmespath": "^0.16.0",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"jsrp": "^0.2.4",
|
"jsrp": "^0.2.4",
|
||||||
@ -201,7 +188,7 @@
|
|||||||
"mongodb": "^6.8.1",
|
"mongodb": "^6.8.1",
|
||||||
"ms": "^2.1.3",
|
"ms": "^2.1.3",
|
||||||
"mysql2": "^3.9.8",
|
"mysql2": "^3.9.8",
|
||||||
"nanoid": "^3.3.8",
|
"nanoid": "^3.3.4",
|
||||||
"nodemailer": "^6.9.9",
|
"nodemailer": "^6.9.9",
|
||||||
"odbc": "^2.4.9",
|
"odbc": "^2.4.9",
|
||||||
"openid-client": "^5.6.5",
|
"openid-client": "^5.6.5",
|
||||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import "@fastify/request-context";
|
||||||
|
|
||||||
|
declare module "@fastify/request-context" {
|
||||||
|
interface RequestContextData {
|
||||||
|
reqId: string;
|
||||||
|
}
|
||||||
|
}
|
33
backend/src/@types/fastify.d.ts
vendored
33
backend/src/@types/fastify.d.ts
vendored
@ -13,13 +13,9 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
|
|||||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
|
||||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||||
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
|
|
||||||
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
|
|
||||||
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
|
|
||||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||||
@ -35,12 +31,9 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
|
|||||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
|
||||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
|
||||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
|
||||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||||
@ -84,7 +77,6 @@ import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-
|
|||||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||||
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
|
|
||||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||||
@ -97,18 +89,6 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
|
|||||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||||
|
|
||||||
declare module "@fastify/request-context" {
|
|
||||||
interface RequestContextData {
|
|
||||||
reqId: string;
|
|
||||||
identityAuthInfo?: {
|
|
||||||
identityId: string;
|
|
||||||
oidc?: {
|
|
||||||
claims: Record<string, string>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
declare module "fastify" {
|
declare module "fastify" {
|
||||||
interface Session {
|
interface Session {
|
||||||
callbackPort: string;
|
callbackPort: string;
|
||||||
@ -136,11 +116,6 @@ declare module "fastify" {
|
|||||||
isUserCompleted: string;
|
isUserCompleted: string;
|
||||||
providerAuthToken: string;
|
providerAuthToken: string;
|
||||||
};
|
};
|
||||||
kmipUser: {
|
|
||||||
projectId: string;
|
|
||||||
clientId: string;
|
|
||||||
name: string;
|
|
||||||
};
|
|
||||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||||
@ -202,8 +177,6 @@ declare module "fastify" {
|
|||||||
auditLogStream: TAuditLogStreamServiceFactory;
|
auditLogStream: TAuditLogStreamServiceFactory;
|
||||||
certificate: TCertificateServiceFactory;
|
certificate: TCertificateServiceFactory;
|
||||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||||
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
|
|
||||||
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
|
|
||||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||||
certificateEst: TCertificateEstServiceFactory;
|
certificateEst: TCertificateEstServiceFactory;
|
||||||
@ -231,17 +204,11 @@ declare module "fastify" {
|
|||||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||||
projectTemplate: TProjectTemplateServiceFactory;
|
projectTemplate: TProjectTemplateServiceFactory;
|
||||||
totp: TTotpServiceFactory;
|
totp: TTotpServiceFactory;
|
||||||
appConnection: TAppConnectionServiceFactory;
|
|
||||||
secretSync: TSecretSyncServiceFactory;
|
|
||||||
kmip: TKmipServiceFactory;
|
|
||||||
kmipOperation: TKmipOperationServiceFactory;
|
|
||||||
gateway: TGatewayServiceFactory;
|
|
||||||
};
|
};
|
||||||
// this is exclusive use for middlewares in which we need to inject data
|
// this is exclusive use for middlewares in which we need to inject data
|
||||||
// everywhere else access using service layer
|
// everywhere else access using service layer
|
||||||
store: {
|
store: {
|
||||||
user: Pick<TUserDALFactory, "findById">;
|
user: Pick<TUserDALFactory, "findById">;
|
||||||
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
104
backend/src/@types/knex.d.ts
vendored
104
backend/src/@types/knex.d.ts
vendored
@ -68,9 +68,6 @@ import {
|
|||||||
TExternalKms,
|
TExternalKms,
|
||||||
TExternalKmsInsert,
|
TExternalKmsInsert,
|
||||||
TExternalKmsUpdate,
|
TExternalKmsUpdate,
|
||||||
TGateways,
|
|
||||||
TGatewaysInsert,
|
|
||||||
TGatewaysUpdate,
|
|
||||||
TGitAppInstallSessions,
|
TGitAppInstallSessions,
|
||||||
TGitAppInstallSessionsInsert,
|
TGitAppInstallSessionsInsert,
|
||||||
TGitAppInstallSessionsUpdate,
|
TGitAppInstallSessionsUpdate,
|
||||||
@ -146,18 +143,6 @@ import {
|
|||||||
TInternalKms,
|
TInternalKms,
|
||||||
TInternalKmsInsert,
|
TInternalKmsInsert,
|
||||||
TInternalKmsUpdate,
|
TInternalKmsUpdate,
|
||||||
TKmipClientCertificates,
|
|
||||||
TKmipClientCertificatesInsert,
|
|
||||||
TKmipClientCertificatesUpdate,
|
|
||||||
TKmipClients,
|
|
||||||
TKmipClientsInsert,
|
|
||||||
TKmipClientsUpdate,
|
|
||||||
TKmipOrgConfigs,
|
|
||||||
TKmipOrgConfigsInsert,
|
|
||||||
TKmipOrgConfigsUpdate,
|
|
||||||
TKmipOrgServerCertificates,
|
|
||||||
TKmipOrgServerCertificatesInsert,
|
|
||||||
TKmipOrgServerCertificatesUpdate,
|
|
||||||
TKmsKeys,
|
TKmsKeys,
|
||||||
TKmsKeysInsert,
|
TKmsKeysInsert,
|
||||||
TKmsKeysUpdate,
|
TKmsKeysUpdate,
|
||||||
@ -182,9 +167,6 @@ import {
|
|||||||
TOrgBots,
|
TOrgBots,
|
||||||
TOrgBotsInsert,
|
TOrgBotsInsert,
|
||||||
TOrgBotsUpdate,
|
TOrgBotsUpdate,
|
||||||
TOrgGatewayConfig,
|
|
||||||
TOrgGatewayConfigInsert,
|
|
||||||
TOrgGatewayConfigUpdate,
|
|
||||||
TOrgMemberships,
|
TOrgMemberships,
|
||||||
TOrgMembershipsInsert,
|
TOrgMembershipsInsert,
|
||||||
TOrgMembershipsUpdate,
|
TOrgMembershipsUpdate,
|
||||||
@ -206,9 +188,6 @@ import {
|
|||||||
TProjectEnvironments,
|
TProjectEnvironments,
|
||||||
TProjectEnvironmentsInsert,
|
TProjectEnvironmentsInsert,
|
||||||
TProjectEnvironmentsUpdate,
|
TProjectEnvironmentsUpdate,
|
||||||
TProjectGateways,
|
|
||||||
TProjectGatewaysInsert,
|
|
||||||
TProjectGatewaysUpdate,
|
|
||||||
TProjectKeys,
|
TProjectKeys,
|
||||||
TProjectKeysInsert,
|
TProjectKeysInsert,
|
||||||
TProjectKeysUpdate,
|
TProjectKeysUpdate,
|
||||||
@ -239,9 +218,6 @@ import {
|
|||||||
TRateLimit,
|
TRateLimit,
|
||||||
TRateLimitInsert,
|
TRateLimitInsert,
|
||||||
TRateLimitUpdate,
|
TRateLimitUpdate,
|
||||||
TResourceMetadata,
|
|
||||||
TResourceMetadataInsert,
|
|
||||||
TResourceMetadataUpdate,
|
|
||||||
TSamlConfigs,
|
TSamlConfigs,
|
||||||
TSamlConfigsInsert,
|
TSamlConfigsInsert,
|
||||||
TSamlConfigsUpdate,
|
TSamlConfigsUpdate,
|
||||||
@ -341,21 +317,6 @@ import {
|
|||||||
TSlackIntegrations,
|
TSlackIntegrations,
|
||||||
TSlackIntegrationsInsert,
|
TSlackIntegrationsInsert,
|
||||||
TSlackIntegrationsUpdate,
|
TSlackIntegrationsUpdate,
|
||||||
TSshCertificateAuthorities,
|
|
||||||
TSshCertificateAuthoritiesInsert,
|
|
||||||
TSshCertificateAuthoritiesUpdate,
|
|
||||||
TSshCertificateAuthoritySecrets,
|
|
||||||
TSshCertificateAuthoritySecretsInsert,
|
|
||||||
TSshCertificateAuthoritySecretsUpdate,
|
|
||||||
TSshCertificateBodies,
|
|
||||||
TSshCertificateBodiesInsert,
|
|
||||||
TSshCertificateBodiesUpdate,
|
|
||||||
TSshCertificates,
|
|
||||||
TSshCertificatesInsert,
|
|
||||||
TSshCertificatesUpdate,
|
|
||||||
TSshCertificateTemplates,
|
|
||||||
TSshCertificateTemplatesInsert,
|
|
||||||
TSshCertificateTemplatesUpdate,
|
|
||||||
TSuperAdmin,
|
TSuperAdmin,
|
||||||
TSuperAdminInsert,
|
TSuperAdminInsert,
|
||||||
TSuperAdminUpdate,
|
TSuperAdminUpdate,
|
||||||
@ -387,13 +348,11 @@ import {
|
|||||||
TWorkflowIntegrationsInsert,
|
TWorkflowIntegrationsInsert,
|
||||||
TWorkflowIntegrationsUpdate
|
TWorkflowIntegrationsUpdate
|
||||||
} from "@app/db/schemas";
|
} from "@app/db/schemas";
|
||||||
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
|
||||||
import {
|
import {
|
||||||
TExternalGroupOrgRoleMappings,
|
TExternalGroupOrgRoleMappings,
|
||||||
TExternalGroupOrgRoleMappingsInsert,
|
TExternalGroupOrgRoleMappingsInsert,
|
||||||
TExternalGroupOrgRoleMappingsUpdate
|
TExternalGroupOrgRoleMappingsUpdate
|
||||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||||
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
|
|
||||||
import {
|
import {
|
||||||
TSecretV2TagJunction,
|
TSecretV2TagJunction,
|
||||||
TSecretV2TagJunctionInsert,
|
TSecretV2TagJunctionInsert,
|
||||||
@ -419,31 +378,6 @@ declare module "knex/types/tables" {
|
|||||||
interface Tables {
|
interface Tables {
|
||||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||||
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateAuthorities,
|
|
||||||
TSshCertificateAuthoritiesInsert,
|
|
||||||
TSshCertificateAuthoritiesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateAuthoritySecrets,
|
|
||||||
TSshCertificateAuthoritySecretsInsert,
|
|
||||||
TSshCertificateAuthoritySecretsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateTemplates,
|
|
||||||
TSshCertificateTemplatesInsert,
|
|
||||||
TSshCertificateTemplatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificates,
|
|
||||||
TSshCertificatesInsert,
|
|
||||||
TSshCertificatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateBodies,
|
|
||||||
TSshCertificateBodiesInsert,
|
|
||||||
TSshCertificateBodiesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||||
TCertificateAuthorities,
|
TCertificateAuthorities,
|
||||||
TCertificateAuthoritiesInsert,
|
TCertificateAuthoritiesInsert,
|
||||||
@ -912,43 +846,5 @@ declare module "knex/types/tables" {
|
|||||||
TProjectSplitBackfillIdsInsert,
|
TProjectSplitBackfillIdsInsert,
|
||||||
TProjectSplitBackfillIdsUpdate
|
TProjectSplitBackfillIdsUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
|
|
||||||
TResourceMetadata,
|
|
||||||
TResourceMetadataInsert,
|
|
||||||
TResourceMetadataUpdate
|
|
||||||
>;
|
|
||||||
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
|
|
||||||
TAppConnections,
|
|
||||||
TAppConnectionsInsert,
|
|
||||||
TAppConnectionsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
|
|
||||||
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
|
|
||||||
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
|
|
||||||
TKmipOrgConfigs,
|
|
||||||
TKmipOrgConfigsInsert,
|
|
||||||
TKmipOrgConfigsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
|
|
||||||
TKmipOrgServerCertificates,
|
|
||||||
TKmipOrgServerCertificatesInsert,
|
|
||||||
TKmipOrgServerCertificatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
|
|
||||||
TKmipClientCertificates,
|
|
||||||
TKmipClientCertificatesInsert,
|
|
||||||
TKmipClientCertificatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
|
||||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
|
||||||
TProjectGateways,
|
|
||||||
TProjectGatewaysInsert,
|
|
||||||
TProjectGatewaysUpdate
|
|
||||||
>;
|
|
||||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
|
||||||
TOrgGatewayConfig,
|
|
||||||
TOrgGatewayConfigInsert,
|
|
||||||
TOrgGatewayConfigUpdate
|
|
||||||
>;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,105 +0,0 @@
|
|||||||
import path from "node:path";
|
|
||||||
|
|
||||||
import dotenv from "dotenv";
|
|
||||||
import { Knex } from "knex";
|
|
||||||
import { Logger } from "pino";
|
|
||||||
|
|
||||||
import { PgSqlLock } from "./keystore/keystore";
|
|
||||||
|
|
||||||
dotenv.config();
|
|
||||||
|
|
||||||
type TArgs = {
|
|
||||||
auditLogDb?: Knex;
|
|
||||||
applicationDb: Knex;
|
|
||||||
logger: Logger;
|
|
||||||
};
|
|
||||||
|
|
||||||
const isProduction = process.env.NODE_ENV === "production";
|
|
||||||
const migrationConfig = {
|
|
||||||
directory: path.join(__dirname, "./db/migrations"),
|
|
||||||
loadExtensions: [".mjs", ".ts"],
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
};
|
|
||||||
|
|
||||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
|
||||||
// happens for first time in which the migration table itself is not created yet
|
|
||||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
|
||||||
if (err?.message?.includes("does not exist")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
throw err;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
|
||||||
try {
|
|
||||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
|
||||||
if (isProduction) {
|
|
||||||
const migrationTable = migrationConfig.tableName;
|
|
||||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
|
||||||
if (hasMigrationTable) {
|
|
||||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
|
||||||
if (firstFile?.name?.includes(".ts")) {
|
|
||||||
await applicationDb(migrationTable).update({
|
|
||||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (auditLogDb) {
|
|
||||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
|
||||||
if (hasMigrationTableInAuditLog) {
|
|
||||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
|
||||||
if (firstFile?.name?.includes(".ts")) {
|
|
||||||
await auditLogDb(migrationTable).update({
|
|
||||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const shouldRunMigration = Boolean(
|
|
||||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
|
||||||
); // db.length - code.length
|
|
||||||
if (!shouldRunMigration) {
|
|
||||||
logger.info("No migrations pending: Skipping migration process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (auditLogDb) {
|
|
||||||
await auditLogDb.transaction(async (tx) => {
|
|
||||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
|
||||||
logger.info("Running audit log migrations.");
|
|
||||||
|
|
||||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
|
||||||
.status(migrationConfig)
|
|
||||||
.catch(migrationStatusCheckErrorHandler));
|
|
||||||
if (didPreviousInstanceRunMigration) {
|
|
||||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await auditLogDb.migrate.latest(migrationConfig);
|
|
||||||
logger.info("Finished audit log migrations.");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await applicationDb.transaction(async (tx) => {
|
|
||||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
|
||||||
logger.info("Running application migrations.");
|
|
||||||
|
|
||||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
|
||||||
.status(migrationConfig)
|
|
||||||
.catch(migrationStatusCheckErrorHandler));
|
|
||||||
if (didPreviousInstanceRunMigration) {
|
|
||||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await applicationDb.migrate.latest(migrationConfig);
|
|
||||||
logger.info("Finished application migrations.");
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(err, "Boot up migration failed");
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
};
|
|
@ -49,9 +49,6 @@ export const initDbConnection = ({
|
|||||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
},
|
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -67,9 +64,6 @@ export const initDbConnection = ({
|
|||||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
},
|
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -104,9 +98,6 @@ export const initAuditLogDbConnection = ({
|
|||||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
},
|
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -38,8 +38,7 @@ export default {
|
|||||||
directory: "./seeds"
|
directory: "./seeds"
|
||||||
},
|
},
|
||||||
migrations: {
|
migrations: {
|
||||||
tableName: "infisical_migrations",
|
tableName: "infisical_migrations"
|
||||||
loadExtensions: [".mjs", ".ts"]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
production: {
|
production: {
|
||||||
@ -63,8 +62,7 @@ export default {
|
|||||||
max: 10
|
max: 10
|
||||||
},
|
},
|
||||||
migrations: {
|
migrations: {
|
||||||
tableName: "infisical_migrations",
|
tableName: "infisical_migrations"
|
||||||
loadExtensions: [".mjs", ".ts"]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} as Knex.Config;
|
} as Knex.Config;
|
||||||
|
@ -1,99 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthority))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateAuthority, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.string("projectId").notNullable();
|
|
||||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
t.string("status").notNullable(); // active / disabled
|
|
||||||
t.string("friendlyName").notNullable();
|
|
||||||
t.string("keyAlgorithm").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthoritySecret))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateAuthoritySecret, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCaId").notNullable().unique();
|
|
||||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
|
||||||
t.binary("encryptedPrivateKey").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateTemplate))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateTemplate, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCaId").notNullable();
|
|
||||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
|
||||||
t.string("status").notNullable(); // active / disabled
|
|
||||||
t.string("name").notNullable();
|
|
||||||
t.string("ttl").notNullable();
|
|
||||||
t.string("maxTTL").notNullable();
|
|
||||||
t.specificType("allowedUsers", "text[]").notNullable();
|
|
||||||
t.specificType("allowedHosts", "text[]").notNullable();
|
|
||||||
t.boolean("allowUserCertificates").notNullable();
|
|
||||||
t.boolean("allowHostCertificates").notNullable();
|
|
||||||
t.boolean("allowCustomKeyIds").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificate))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificate, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCaId").notNullable();
|
|
||||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
|
|
||||||
t.uuid("sshCertificateTemplateId");
|
|
||||||
t.foreign("sshCertificateTemplateId")
|
|
||||||
.references("id")
|
|
||||||
.inTable(TableName.SshCertificateTemplate)
|
|
||||||
.onDelete("SET NULL");
|
|
||||||
t.string("serialNumber").notNullable().unique();
|
|
||||||
t.string("certType").notNullable(); // user or host
|
|
||||||
t.specificType("principals", "text[]").notNullable();
|
|
||||||
t.string("keyId").notNullable();
|
|
||||||
t.datetime("notBefore").notNullable();
|
|
||||||
t.datetime("notAfter").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificate);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateBody))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateBody, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCertId").notNullable().unique();
|
|
||||||
t.foreign("sshCertId").references("id").inTable(TableName.SshCertificate).onDelete("CASCADE");
|
|
||||||
t.binary("encryptedCertificate").notNullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateBody);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificate);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificate);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateTemplate);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthoritySecret);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthority);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
|
||||||
}
|
|
@ -1,40 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.ResourceMetadata))) {
|
|
||||||
await knex.schema.createTable(TableName.ResourceMetadata, (tb) => {
|
|
||||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
tb.string("key").notNullable();
|
|
||||||
tb.string("value", 1020).notNullable();
|
|
||||||
tb.uuid("orgId").notNullable();
|
|
||||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
tb.uuid("userId");
|
|
||||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
|
||||||
tb.uuid("identityId");
|
|
||||||
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
|
||||||
tb.uuid("secretId");
|
|
||||||
tb.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
|
|
||||||
tb.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
|
||||||
if (!hasSecretMetadataField) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
|
||||||
t.jsonb("secretMetadata");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.ResourceMetadata);
|
|
||||||
|
|
||||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
|
||||||
if (hasSecretMetadataField) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
|
||||||
t.dropColumn("secretMetadata");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,28 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.AppConnection))) {
|
|
||||||
await knex.schema.createTable(TableName.AppConnection, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.string("name", 32).notNullable();
|
|
||||||
t.string("description");
|
|
||||||
t.string("app").notNullable();
|
|
||||||
t.string("method").notNullable();
|
|
||||||
t.binary("encryptedCredentials").notNullable();
|
|
||||||
t.integer("version").defaultTo(1).notNullable();
|
|
||||||
t.uuid("orgId").notNullable();
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.AppConnection);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.AppConnection);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.AppConnection);
|
|
||||||
}
|
|
@ -1,49 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
// find any duplicate group names within organizations
|
|
||||||
const duplicates = await knex(TableName.Groups)
|
|
||||||
.select("orgId", "name")
|
|
||||||
.count("* as count")
|
|
||||||
.groupBy("orgId", "name")
|
|
||||||
.having(knex.raw("count(*) > 1"));
|
|
||||||
|
|
||||||
// for each set of duplicates, update all but one with a numbered suffix
|
|
||||||
for await (const duplicate of duplicates) {
|
|
||||||
const groups = await knex(TableName.Groups)
|
|
||||||
.select("id", "name")
|
|
||||||
.where({
|
|
||||||
orgId: duplicate.orgId,
|
|
||||||
name: duplicate.name
|
|
||||||
})
|
|
||||||
.orderBy("createdAt", "asc"); // keep original name for oldest group
|
|
||||||
|
|
||||||
// skip the first (oldest) group, rename others with numbered suffix
|
|
||||||
for (let i = 1; i < groups.length; i += 1) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.Groups)
|
|
||||||
.where("id", groups[i].id)
|
|
||||||
.update({
|
|
||||||
name: `${groups[i].name} (${i})`,
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore TS doesn't know about Knex's timestamp types
|
|
||||||
updatedAt: new Date()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// add the unique constraint
|
|
||||||
await knex.schema.alterTable(TableName.Groups, (t) => {
|
|
||||||
t.unique(["orgId", "name"]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
// Remove the unique constraint
|
|
||||||
await knex.schema.alterTable(TableName.Groups, (t) => {
|
|
||||||
t.dropUnique(["orgId", "name"]);
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,33 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
|
|
||||||
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
|
||||||
if (!hasEnforceCapitalizationCol) {
|
|
||||||
t.boolean("enforceCapitalization").defaultTo(false).notNullable();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasAutoCapitalizationCol) {
|
|
||||||
t.boolean("autoCapitalization").defaultTo(false).alter();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
|
|
||||||
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
|
||||||
if (hasEnforceCapitalizationCol) {
|
|
||||||
t.dropColumn("enforceCapitalization");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasAutoCapitalizationCol) {
|
|
||||||
t.boolean("autoCapitalization").defaultTo(true).alter();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,50 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SecretSync))) {
|
|
||||||
await knex.schema.createTable(TableName.SecretSync, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.string("name", 32).notNullable();
|
|
||||||
t.string("description");
|
|
||||||
t.string("destination").notNullable();
|
|
||||||
t.boolean("isAutoSyncEnabled").notNullable().defaultTo(true);
|
|
||||||
t.integer("version").defaultTo(1).notNullable();
|
|
||||||
t.jsonb("destinationConfig").notNullable();
|
|
||||||
t.jsonb("syncOptions").notNullable();
|
|
||||||
// we're including projectId in addition to folder ID because we allow folderId to be null (if the folder
|
|
||||||
// is deleted), to preserve sync configuration
|
|
||||||
t.string("projectId").notNullable();
|
|
||||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
t.uuid("folderId");
|
|
||||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("SET NULL");
|
|
||||||
t.uuid("connectionId").notNullable();
|
|
||||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
// sync secrets to destination
|
|
||||||
t.string("syncStatus");
|
|
||||||
t.string("lastSyncJobId");
|
|
||||||
t.string("lastSyncMessage");
|
|
||||||
t.datetime("lastSyncedAt");
|
|
||||||
// import secrets from destination
|
|
||||||
t.string("importStatus");
|
|
||||||
t.string("lastImportJobId");
|
|
||||||
t.string("lastImportMessage");
|
|
||||||
t.datetime("lastImportedAt");
|
|
||||||
// remove secrets from destination
|
|
||||||
t.string("removeStatus");
|
|
||||||
t.string("lastRemoveJobId");
|
|
||||||
t.string("lastRemoveMessage");
|
|
||||||
t.datetime("lastRemovedAt");
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SecretSync);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SecretSync);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SecretSync);
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
|
||||||
if (!hasManageGroupMembershipsCol) {
|
|
||||||
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
|
||||||
if (hasManageGroupMembershipsCol) {
|
|
||||||
t.dropColumn("manageGroupMemberships");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,108 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
|
|
||||||
if (!hasKmipClientTable) {
|
|
||||||
await knex.schema.createTable(TableName.KmipClient, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.string("name").notNullable();
|
|
||||||
t.specificType("permissions", "text[]");
|
|
||||||
t.string("description");
|
|
||||||
t.string("projectId").notNullable();
|
|
||||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
|
|
||||||
if (!hasKmipOrgPkiConfig) {
|
|
||||||
await knex.schema.createTable(TableName.KmipOrgConfig, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
|
|
||||||
t.uuid("orgId").notNullable();
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
t.unique("orgId");
|
|
||||||
|
|
||||||
t.string("caKeyAlgorithm").notNullable();
|
|
||||||
|
|
||||||
t.datetime("rootCaIssuedAt").notNullable();
|
|
||||||
t.datetime("rootCaExpiration").notNullable();
|
|
||||||
t.string("rootCaSerialNumber").notNullable();
|
|
||||||
t.binary("encryptedRootCaCertificate").notNullable();
|
|
||||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
|
||||||
|
|
||||||
t.datetime("serverIntermediateCaIssuedAt").notNullable();
|
|
||||||
t.datetime("serverIntermediateCaExpiration").notNullable();
|
|
||||||
t.string("serverIntermediateCaSerialNumber");
|
|
||||||
t.binary("encryptedServerIntermediateCaCertificate").notNullable();
|
|
||||||
t.binary("encryptedServerIntermediateCaChain").notNullable();
|
|
||||||
t.binary("encryptedServerIntermediateCaPrivateKey").notNullable();
|
|
||||||
|
|
||||||
t.datetime("clientIntermediateCaIssuedAt").notNullable();
|
|
||||||
t.datetime("clientIntermediateCaExpiration").notNullable();
|
|
||||||
t.string("clientIntermediateCaSerialNumber").notNullable();
|
|
||||||
t.binary("encryptedClientIntermediateCaCertificate").notNullable();
|
|
||||||
t.binary("encryptedClientIntermediateCaChain").notNullable();
|
|
||||||
t.binary("encryptedClientIntermediateCaPrivateKey").notNullable();
|
|
||||||
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.KmipOrgConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
|
|
||||||
if (!hasKmipOrgServerCertTable) {
|
|
||||||
await knex.schema.createTable(TableName.KmipOrgServerCertificates, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.uuid("orgId").notNullable();
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
t.string("commonName").notNullable();
|
|
||||||
t.string("altNames").notNullable();
|
|
||||||
t.string("serialNumber").notNullable();
|
|
||||||
t.string("keyAlgorithm").notNullable();
|
|
||||||
t.datetime("issuedAt").notNullable();
|
|
||||||
t.datetime("expiration").notNullable();
|
|
||||||
t.binary("encryptedCertificate").notNullable();
|
|
||||||
t.binary("encryptedChain").notNullable();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
|
|
||||||
if (!hasKmipClientCertTable) {
|
|
||||||
await knex.schema.createTable(TableName.KmipClientCertificates, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.uuid("kmipClientId").notNullable();
|
|
||||||
t.foreign("kmipClientId").references("id").inTable(TableName.KmipClient).onDelete("CASCADE");
|
|
||||||
t.string("serialNumber").notNullable();
|
|
||||||
t.string("keyAlgorithm").notNullable();
|
|
||||||
t.datetime("issuedAt").notNullable();
|
|
||||||
t.datetime("expiration").notNullable();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
|
|
||||||
if (hasKmipOrgPkiConfig) {
|
|
||||||
await knex.schema.dropTable(TableName.KmipOrgConfig);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.KmipOrgConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
|
|
||||||
if (hasKmipOrgServerCertTable) {
|
|
||||||
await knex.schema.dropTable(TableName.KmipOrgServerCertificates);
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
|
|
||||||
if (hasKmipClientCertTable) {
|
|
||||||
await knex.schema.dropTable(TableName.KmipClientCertificates);
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
|
|
||||||
if (hasKmipClientTable) {
|
|
||||||
await knex.schema.dropTable(TableName.KmipClient);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
|
||||||
t.unique(["orgId", "name"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
|
||||||
t.unique(["projectId", "name"]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
|
||||||
t.dropUnique(["orgId", "name"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
|
||||||
t.dropUnique(["projectId", "name"]);
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,37 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
|
||||||
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
|
||||||
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityGcpAuth,
|
|
||||||
"allowedServiceAccounts"
|
|
||||||
);
|
|
||||||
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
|
||||||
if (hasTable) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
|
||||||
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
|
|
||||||
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
|
|
||||||
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
|
||||||
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
|
||||||
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityGcpAuth,
|
|
||||||
"allowedServiceAccounts"
|
|
||||||
);
|
|
||||||
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
|
||||||
if (hasTable) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
|
||||||
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
|
|
||||||
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
|
|
||||||
if (hasAllowedZones) t.string("allowedZones").alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,27 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
|
||||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
|
||||||
|
|
||||||
if (hasSlugCol) {
|
|
||||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
|
||||||
t.dropColumn("slug");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
|
||||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
|
||||||
|
|
||||||
if (!hasSlugCol) {
|
|
||||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
|
||||||
t.string("slug", 32);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,31 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
|
||||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
|
||||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
|
||||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
|
||||||
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
|
|
||||||
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
|
|
||||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
|
||||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
|
||||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
|
||||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
|
||||||
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
|
|
||||||
if (hasLastImportMessage) t.string("lastImportMessage").alter();
|
|
||||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,130 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
|
||||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { initLogger } from "@app/lib/logger";
|
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
|
||||||
|
|
||||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
|
||||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
|
||||||
import { createCircularCache } from "./utils/ring-buffer";
|
|
||||||
import { getMigrationEncryptionServices } from "./utils/services";
|
|
||||||
|
|
||||||
const BATCH_SIZE = 500;
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
|
||||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
|
||||||
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
|
||||||
|
|
||||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
|
||||||
if (hasWebhookTable) {
|
|
||||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
|
||||||
if (!hasEncryptedKey) t.binary("encryptedPassKey");
|
|
||||||
if (!hasEncryptedUrl) t.binary("encryptedUrl");
|
|
||||||
if (hasUrl) t.string("url").nullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const projectEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
const webhooks = await knex(TableName.Webhook)
|
|
||||||
.where({})
|
|
||||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
|
||||||
.select(
|
|
||||||
"url",
|
|
||||||
"encryptedSecretKey",
|
|
||||||
"iv",
|
|
||||||
"tag",
|
|
||||||
"keyEncoding",
|
|
||||||
"urlCipherText",
|
|
||||||
"urlIV",
|
|
||||||
"urlTag",
|
|
||||||
knex.ref("id").withSchema(TableName.Webhook),
|
|
||||||
"envId"
|
|
||||||
)
|
|
||||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
|
||||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
|
||||||
|
|
||||||
const updatedWebhooks = await Promise.all(
|
|
||||||
webhooks.map(async (el) => {
|
|
||||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
|
||||||
if (!projectKmsService) {
|
|
||||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.SecretManager,
|
|
||||||
projectId: el.projectId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
let encryptedSecretKey = null;
|
|
||||||
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
|
||||||
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
|
||||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
|
||||||
iv: el.iv,
|
|
||||||
tag: el.tag,
|
|
||||||
ciphertext: el.encryptedSecretKey
|
|
||||||
});
|
|
||||||
encryptedSecretKey = projectKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decyptedSecretKey, "utf8")
|
|
||||||
}).cipherTextBlob;
|
|
||||||
}
|
|
||||||
|
|
||||||
const decryptedUrl =
|
|
||||||
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
|
||||||
? infisicalSymmetricDecrypt({
|
|
||||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
|
||||||
iv: el.urlIV,
|
|
||||||
tag: el.urlTag,
|
|
||||||
ciphertext: el.urlCipherText
|
|
||||||
})
|
|
||||||
: null;
|
|
||||||
|
|
||||||
const encryptedUrl = projectKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedUrl || el.url || "")
|
|
||||||
}).cipherTextBlob;
|
|
||||||
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.Webhook)
|
|
||||||
.insert(
|
|
||||||
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
|
|
||||||
id: el.id,
|
|
||||||
envId: el.envId,
|
|
||||||
url: "",
|
|
||||||
encryptedUrl: el.encryptedUrl,
|
|
||||||
encryptedPassKey: el.encryptedSecretKey
|
|
||||||
}))
|
|
||||||
)
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasWebhookTable) {
|
|
||||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
|
||||||
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
|
||||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
|
||||||
|
|
||||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
|
||||||
if (hasWebhookTable) {
|
|
||||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
|
||||||
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
|
|
||||||
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,111 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
|
||||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { selectAllTableCols } from "@app/lib/knex";
|
|
||||||
import { initLogger } from "@app/lib/logger";
|
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
|
||||||
|
|
||||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
|
||||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
|
||||||
import { createCircularCache } from "./utils/ring-buffer";
|
|
||||||
import { getMigrationEncryptionServices } from "./utils/services";
|
|
||||||
|
|
||||||
const BATCH_SIZE = 500;
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
|
||||||
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
|
||||||
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
|
||||||
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
|
||||||
|
|
||||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
|
||||||
if (hasDynamicSecretTable) {
|
|
||||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
|
||||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
|
|
||||||
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
|
|
||||||
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
|
|
||||||
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const projectEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
|
|
||||||
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
|
|
||||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
|
||||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
|
||||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
|
||||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
|
||||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
|
||||||
|
|
||||||
const updatedDynamicSecrets = await Promise.all(
|
|
||||||
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
|
|
||||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
|
||||||
if (!projectKmsService) {
|
|
||||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.SecretManager,
|
|
||||||
projectId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
const decryptedInputData =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
|
||||||
? infisicalSymmetricDecrypt({
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.inputIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.inputTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.inputCiphertext
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const encryptedInput = projectKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedInputData)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
|
|
||||||
return { ...el, encryptedInput };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.DynamicSecret)
|
|
||||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasDynamicSecretTable) {
|
|
||||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
|
||||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
|
||||||
|
|
||||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
|
||||||
if (hasDynamicSecretTable) {
|
|
||||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
|
||||||
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,103 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
|
||||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { selectAllTableCols } from "@app/lib/knex";
|
|
||||||
import { initLogger } from "@app/lib/logger";
|
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
|
||||||
|
|
||||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
|
||||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
|
||||||
import { createCircularCache } from "./utils/ring-buffer";
|
|
||||||
import { getMigrationEncryptionServices } from "./utils/services";
|
|
||||||
|
|
||||||
const BATCH_SIZE = 500;
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
|
||||||
|
|
||||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
|
||||||
if (hasRotationTable) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
|
||||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const projectEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
|
|
||||||
const secretRotations = await knex(TableName.SecretRotation)
|
|
||||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
|
|
||||||
.select(selectAllTableCols(TableName.SecretRotation))
|
|
||||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
|
||||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
|
||||||
|
|
||||||
const updatedRotationData = await Promise.all(
|
|
||||||
secretRotations.map(async ({ projectId, ...el }) => {
|
|
||||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
|
||||||
if (!projectKmsService) {
|
|
||||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.SecretManager,
|
|
||||||
projectId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
const decryptedRotationData =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
|
||||||
? infisicalSymmetricDecrypt({
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.encryptedDataIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.encryptedDataTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedData
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const encryptedRotationData = projectKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedRotationData)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
return { ...el, encryptedRotationData };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.SecretRotation)
|
|
||||||
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasRotationTable) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
|
||||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
|
||||||
|
|
||||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
|
||||||
if (hasRotationTable) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
|
||||||
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,200 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
|
||||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { selectAllTableCols } from "@app/lib/knex";
|
|
||||||
import { initLogger } from "@app/lib/logger";
|
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
|
||||||
|
|
||||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
|
||||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
|
||||||
import { createCircularCache } from "./utils/ring-buffer";
|
|
||||||
import { getMigrationEncryptionServices } from "./utils/services";
|
|
||||||
|
|
||||||
const BATCH_SIZE = 500;
|
|
||||||
const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
|
||||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityKubernetesAuth,
|
|
||||||
"encryptedKubernetesTokenReviewerJwt"
|
|
||||||
);
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityKubernetesAuth,
|
|
||||||
"encryptedKubernetesCaCertificate"
|
|
||||||
);
|
|
||||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
|
||||||
|
|
||||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
|
|
||||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
|
|
||||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
|
|
||||||
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityKubernetesAuth,
|
|
||||||
"encryptedTokenReviewerJwt"
|
|
||||||
);
|
|
||||||
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityKubernetesAuth,
|
|
||||||
"tokenReviewerJwtIV"
|
|
||||||
);
|
|
||||||
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityKubernetesAuth,
|
|
||||||
"tokenReviewerJwtTag"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (hasidentityKubernetesAuthTable) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
|
||||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
|
||||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
|
||||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
|
||||||
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
|
|
||||||
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
|
|
||||||
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
|
|
||||||
|
|
||||||
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
|
|
||||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const orgEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
|
|
||||||
.join(
|
|
||||||
TableName.IdentityOrgMembership,
|
|
||||||
`${TableName.IdentityOrgMembership}.identityId`,
|
|
||||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
|
||||||
)
|
|
||||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
|
||||||
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
|
|
||||||
.select(
|
|
||||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
|
||||||
)
|
|
||||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
|
||||||
|
|
||||||
const updatedIdentityKubernetesConfigs = [];
|
|
||||||
|
|
||||||
for await (const {
|
|
||||||
encryptedSymmetricKey,
|
|
||||||
symmetricKeyKeyEncoding,
|
|
||||||
symmetricKeyTag,
|
|
||||||
symmetricKeyIV,
|
|
||||||
orgId,
|
|
||||||
...el
|
|
||||||
} of identityKubernetesConfigs) {
|
|
||||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
|
||||||
|
|
||||||
if (!orgKmsService) {
|
|
||||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.Organization,
|
|
||||||
orgId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const decryptedTokenReviewerJwt =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.tokenReviewerJwtIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.tokenReviewerJwtTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedTokenReviewerJwt
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const decryptedCertificate =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.caCertIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.caCertTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedCaCert
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedTokenReviewerJwt)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedCertificate)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
|
|
||||||
updatedIdentityKubernetesConfigs.push({
|
|
||||||
...el,
|
|
||||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
|
||||||
encryptedKubernetesCaCertificate,
|
|
||||||
encryptedKubernetesTokenReviewerJwt
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.IdentityKubernetesAuth)
|
|
||||||
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
if (hasidentityKubernetesAuthTable) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
|
||||||
if (!hasEncryptedKubernetesTokenReviewerJwt)
|
|
||||||
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
await reencryptIdentityK8sAuth(knex);
|
|
||||||
}
|
|
||||||
|
|
||||||
const dropIdentityK8sColumns = async (knex: Knex) => {
|
|
||||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityKubernetesAuth,
|
|
||||||
"encryptedKubernetesTokenReviewerJwt"
|
|
||||||
);
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityKubernetesAuth,
|
|
||||||
"encryptedKubernetesCaCertificate"
|
|
||||||
);
|
|
||||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
|
||||||
|
|
||||||
if (hasidentityKubernetesAuthTable) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
|
||||||
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
|
|
||||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await dropIdentityK8sColumns(knex);
|
|
||||||
}
|
|
@ -1,141 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
|
||||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { selectAllTableCols } from "@app/lib/knex";
|
|
||||||
import { initLogger } from "@app/lib/logger";
|
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
|
||||||
|
|
||||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
|
||||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
|
||||||
import { createCircularCache } from "./utils/ring-buffer";
|
|
||||||
import { getMigrationEncryptionServices } from "./utils/services";
|
|
||||||
|
|
||||||
const BATCH_SIZE = 500;
|
|
||||||
const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityOidcAuth,
|
|
||||||
"encryptedCaCertificate"
|
|
||||||
);
|
|
||||||
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
|
||||||
|
|
||||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
|
|
||||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
|
|
||||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
|
|
||||||
|
|
||||||
if (hasidentityOidcAuthTable) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
|
||||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
|
||||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
|
||||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
|
||||||
|
|
||||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const orgEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
|
|
||||||
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
|
|
||||||
.join(
|
|
||||||
TableName.IdentityOrgMembership,
|
|
||||||
`${TableName.IdentityOrgMembership}.identityId`,
|
|
||||||
`${TableName.IdentityOidcAuth}.identityId`
|
|
||||||
)
|
|
||||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
|
||||||
.select(selectAllTableCols(TableName.IdentityOidcAuth))
|
|
||||||
.select(
|
|
||||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
|
||||||
)
|
|
||||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
|
||||||
|
|
||||||
const updatedIdentityOidcConfigs = await Promise.all(
|
|
||||||
identityOidcConfig.map(
|
|
||||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
|
|
||||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
|
||||||
if (!orgKmsService) {
|
|
||||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.Organization,
|
|
||||||
orgId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
|
||||||
}
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const decryptedCertificate =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.caCertIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.caCertTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedCaCert
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const encryptedCaCertificate = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedCertificate)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
|
|
||||||
return {
|
|
||||||
...el,
|
|
||||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
|
||||||
encryptedCaCertificate
|
|
||||||
};
|
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.IdentityOidcAuth)
|
|
||||||
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
await reencryptIdentityOidcAuth(knex);
|
|
||||||
}
|
|
||||||
|
|
||||||
const dropIdentityOidcColumns = async (knex: Knex) => {
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.IdentityOidcAuth,
|
|
||||||
"encryptedCaCertificate"
|
|
||||||
);
|
|
||||||
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
|
||||||
|
|
||||||
if (hasidentityOidcTable) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
|
||||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await dropIdentityOidcColumns(knex);
|
|
||||||
}
|
|
@ -1,493 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
|
||||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
|
||||||
import { selectAllTableCols } from "@app/lib/knex";
|
|
||||||
import { initLogger } from "@app/lib/logger";
|
|
||||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
|
||||||
|
|
||||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
|
||||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
|
||||||
import { createCircularCache } from "./utils/ring-buffer";
|
|
||||||
import { getMigrationEncryptionServices } from "./utils/services";
|
|
||||||
|
|
||||||
const BATCH_SIZE = 500;
|
|
||||||
const reencryptSamlConfig = async (knex: Knex) => {
|
|
||||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
|
||||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
|
||||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
|
||||||
|
|
||||||
if (hasSamlConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
|
||||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
|
|
||||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
|
|
||||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const orgEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
|
|
||||||
const samlConfigs = await knex(TableName.SamlConfig)
|
|
||||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
|
|
||||||
.select(selectAllTableCols(TableName.SamlConfig))
|
|
||||||
.select(
|
|
||||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
|
||||||
)
|
|
||||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
|
||||||
|
|
||||||
const updatedSamlConfigs = await Promise.all(
|
|
||||||
samlConfigs.map(
|
|
||||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
|
||||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
|
||||||
if (!orgKmsService) {
|
|
||||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.Organization,
|
|
||||||
orgId: el.orgId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
|
||||||
}
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const decryptedEntryPoint =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.entryPointIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.entryPointTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedEntryPoint
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const decryptedIssuer =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedIssuer && el.issuerIV && el.issuerTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.issuerIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.issuerTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedIssuer
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const decryptedCertificate =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedCert && el.certIV && el.certTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.certIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.certTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedCert
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const encryptedSamlIssuer = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedIssuer)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
const encryptedSamlCertificate = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedCertificate)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
const encryptedSamlEntryPoint = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedEntryPoint)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
|
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.SamlConfig)
|
|
||||||
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasSamlConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
|
||||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
|
|
||||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
|
|
||||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const reencryptLdapConfig = async (knex: Knex) => {
|
|
||||||
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
|
||||||
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
|
||||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
|
||||||
|
|
||||||
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
|
|
||||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
|
|
||||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
|
|
||||||
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
|
|
||||||
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
|
|
||||||
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
|
|
||||||
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
|
|
||||||
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
|
|
||||||
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
|
|
||||||
|
|
||||||
if (hasLdapConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
|
||||||
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
|
|
||||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
|
||||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
|
||||||
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
|
|
||||||
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
|
|
||||||
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
|
|
||||||
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
|
|
||||||
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
|
|
||||||
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
|
|
||||||
|
|
||||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
|
|
||||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
|
|
||||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const orgEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
|
|
||||||
const ldapConfigs = await knex(TableName.LdapConfig)
|
|
||||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
|
|
||||||
.select(selectAllTableCols(TableName.LdapConfig))
|
|
||||||
.select(
|
|
||||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
|
||||||
)
|
|
||||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
|
||||||
|
|
||||||
const updatedLdapConfigs = await Promise.all(
|
|
||||||
ldapConfigs.map(
|
|
||||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
|
||||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
|
||||||
if (!orgKmsService) {
|
|
||||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.Organization,
|
|
||||||
orgId: el.orgId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
|
||||||
}
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const decryptedBindDN =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.bindDNIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.bindDNTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedBindDN
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const decryptedBindPass =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.bindPassIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.bindPassTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedBindPass
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const decryptedCertificate =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedCACert && el.caCertIV && el.caCertTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.caCertIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.caCertTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedCACert
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const encryptedLdapBindDN = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedBindDN)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
const encryptedLdapBindPass = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedBindPass)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
const encryptedLdapCaCertificate = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedCertificate)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
|
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.LdapConfig)
|
|
||||||
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
if (hasLdapConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
|
||||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
|
|
||||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const reencryptOidcConfig = async (knex: Knex) => {
|
|
||||||
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
|
||||||
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.OidcConfig,
|
|
||||||
"encryptedOidcClientSecret"
|
|
||||||
);
|
|
||||||
|
|
||||||
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
|
|
||||||
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
|
|
||||||
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
|
|
||||||
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
|
|
||||||
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
|
|
||||||
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
|
|
||||||
|
|
||||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
|
||||||
|
|
||||||
if (hasOidcConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
|
||||||
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
|
|
||||||
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
|
|
||||||
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
|
|
||||||
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
|
|
||||||
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
|
|
||||||
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
|
|
||||||
|
|
||||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
|
|
||||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
initLogger();
|
|
||||||
const envConfig = getMigrationEnvConfig();
|
|
||||||
const keyStore = inMemoryKeyStore();
|
|
||||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
|
||||||
const orgEncryptionRingBuffer =
|
|
||||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
|
||||||
|
|
||||||
const oidcConfigs = await knex(TableName.OidcConfig)
|
|
||||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
|
|
||||||
.select(selectAllTableCols(TableName.OidcConfig))
|
|
||||||
.select(
|
|
||||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
|
||||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
|
||||||
)
|
|
||||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
|
||||||
|
|
||||||
const updatedOidcConfigs = await Promise.all(
|
|
||||||
oidcConfigs.map(
|
|
||||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
|
||||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
|
||||||
if (!orgKmsService) {
|
|
||||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
|
||||||
{
|
|
||||||
type: KmsDataKey.Organization,
|
|
||||||
orgId: el.orgId
|
|
||||||
},
|
|
||||||
knex
|
|
||||||
);
|
|
||||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
|
||||||
}
|
|
||||||
const key = infisicalSymmetricDecrypt({
|
|
||||||
ciphertext: encryptedSymmetricKey,
|
|
||||||
iv: symmetricKeyIV,
|
|
||||||
tag: symmetricKeyTag,
|
|
||||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
|
||||||
});
|
|
||||||
|
|
||||||
const decryptedClientId =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedClientId && el.clientIdIV && el.clientIdTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.clientIdIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.clientIdTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedClientId
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const decryptedClientSecret =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
|
|
||||||
? decryptSymmetric({
|
|
||||||
key,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
iv: el.clientSecretIV,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
tag: el.clientSecretTag,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
|
||||||
ciphertext: el.encryptedClientSecret
|
|
||||||
})
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const encryptedOidcClientId = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedClientId)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
const encryptedOidcClientSecret = orgKmsService.encryptor({
|
|
||||||
plainText: Buffer.from(decryptedClientSecret)
|
|
||||||
}).cipherTextBlob;
|
|
||||||
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
|
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.OidcConfig)
|
|
||||||
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
|
|
||||||
.onConflict("id")
|
|
||||||
.merge();
|
|
||||||
}
|
|
||||||
if (hasOidcConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
|
||||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
|
|
||||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
await reencryptSamlConfig(knex);
|
|
||||||
await reencryptLdapConfig(knex);
|
|
||||||
await reencryptOidcConfig(knex);
|
|
||||||
}
|
|
||||||
|
|
||||||
const dropSamlConfigColumns = async (knex: Knex) => {
|
|
||||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
|
||||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
|
||||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
|
||||||
|
|
||||||
if (hasSamlConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
|
||||||
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
|
|
||||||
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
|
|
||||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const dropLdapConfigColumns = async (knex: Knex) => {
|
|
||||||
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
|
||||||
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
|
||||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
|
||||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
|
||||||
|
|
||||||
if (hasLdapConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
|
||||||
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
|
|
||||||
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
|
|
||||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const dropOidcConfigColumns = async (knex: Knex) => {
|
|
||||||
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
|
||||||
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
|
|
||||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
|
||||||
|
|
||||||
if (hasOidcConfigTable) {
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
|
||||||
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
|
|
||||||
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await dropSamlConfigColumns(knex);
|
|
||||||
await dropLdapConfigColumns(knex);
|
|
||||||
await dropOidcConfigColumns(knex);
|
|
||||||
}
|
|
@ -1,115 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
|
|
||||||
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.string("rootCaKeyAlgorithm").notNullable();
|
|
||||||
|
|
||||||
t.datetime("rootCaIssuedAt").notNullable();
|
|
||||||
t.datetime("rootCaExpiration").notNullable();
|
|
||||||
t.string("rootCaSerialNumber").notNullable();
|
|
||||||
t.binary("encryptedRootCaCertificate").notNullable();
|
|
||||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
|
||||||
|
|
||||||
t.datetime("clientCaIssuedAt").notNullable();
|
|
||||||
t.datetime("clientCaExpiration").notNullable();
|
|
||||||
t.string("clientCaSerialNumber");
|
|
||||||
t.binary("encryptedClientCaCertificate").notNullable();
|
|
||||||
t.binary("encryptedClientCaPrivateKey").notNullable();
|
|
||||||
|
|
||||||
t.string("clientCertSerialNumber").notNullable();
|
|
||||||
t.string("clientCertKeyAlgorithm").notNullable();
|
|
||||||
t.datetime("clientCertIssuedAt").notNullable();
|
|
||||||
t.datetime("clientCertExpiration").notNullable();
|
|
||||||
t.binary("encryptedClientCertificate").notNullable();
|
|
||||||
t.binary("encryptedClientPrivateKey").notNullable();
|
|
||||||
|
|
||||||
t.datetime("gatewayCaIssuedAt").notNullable();
|
|
||||||
t.datetime("gatewayCaExpiration").notNullable();
|
|
||||||
t.string("gatewayCaSerialNumber").notNullable();
|
|
||||||
t.binary("encryptedGatewayCaCertificate").notNullable();
|
|
||||||
t.binary("encryptedGatewayCaPrivateKey").notNullable();
|
|
||||||
|
|
||||||
t.uuid("orgId").notNullable();
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
t.unique("orgId");
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.Gateway))) {
|
|
||||||
await knex.schema.createTable(TableName.Gateway, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
|
|
||||||
t.string("name").notNullable();
|
|
||||||
t.string("serialNumber").notNullable();
|
|
||||||
t.string("keyAlgorithm").notNullable();
|
|
||||||
t.datetime("issuedAt").notNullable();
|
|
||||||
t.datetime("expiration").notNullable();
|
|
||||||
t.datetime("heartbeat");
|
|
||||||
|
|
||||||
t.binary("relayAddress").notNullable();
|
|
||||||
|
|
||||||
t.uuid("orgGatewayRootCaId").notNullable();
|
|
||||||
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
|
|
||||||
|
|
||||||
t.uuid("identityId").notNullable();
|
|
||||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
|
||||||
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.Gateway);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
|
|
||||||
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
|
|
||||||
t.string("projectId").notNullable();
|
|
||||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
|
|
||||||
t.uuid("gatewayId").notNullable();
|
|
||||||
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
|
|
||||||
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
|
||||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
|
||||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
|
||||||
// not setting a foreign constraint so that cascade effects are not triggered
|
|
||||||
if (!doesGatewayColExist) {
|
|
||||||
t.uuid("projectGatewayId");
|
|
||||||
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
|
||||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
|
||||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
|
||||||
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.Gateway);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.Gateway);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
|
||||||
}
|
|
@ -1,25 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
|
||||||
if (!hasSharingTypeColumn) {
|
|
||||||
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
|
||||||
if (hasSharingTypeColumn) {
|
|
||||||
table.dropColumn("type");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,31 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
|
||||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
|
||||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
|
||||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
|
||||||
if (!hasAuthConsentContentCol) {
|
|
||||||
t.text("authConsentContent");
|
|
||||||
}
|
|
||||||
if (!hasPageFrameContentCol) {
|
|
||||||
t.text("pageFrameContent");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
|
||||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
|
||||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
|
||||||
if (hasAuthConsentContentCol) {
|
|
||||||
t.dropColumn("authConsentContent");
|
|
||||||
}
|
|
||||||
if (hasPageFrameContentCol) {
|
|
||||||
t.dropColumn("pageFrameContent");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,35 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
for await (const tableName of [
|
|
||||||
TableName.SecretV2,
|
|
||||||
TableName.SecretVersionV2,
|
|
||||||
TableName.SecretApprovalRequestSecretV2
|
|
||||||
]) {
|
|
||||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
|
||||||
|
|
||||||
if (hasReminderNoteCol) {
|
|
||||||
await knex.schema.alterTable(tableName, (t) => {
|
|
||||||
t.string("reminderNote", 1024).alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
for await (const tableName of [
|
|
||||||
TableName.SecretV2,
|
|
||||||
TableName.SecretVersionV2,
|
|
||||||
TableName.SecretApprovalRequestSecretV2
|
|
||||||
]) {
|
|
||||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
|
||||||
|
|
||||||
if (hasReminderNoteCol) {
|
|
||||||
await knex.schema.alterTable(tableName, (t) => {
|
|
||||||
t.string("reminderNote").alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
|
||||||
|
|
||||||
if (!hasProjectDescription) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
|
||||||
t.string("description");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
|
||||||
|
|
||||||
if (hasProjectDescription) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
|
||||||
t.dropColumn("description");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,19 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
|
||||||
t.string("comment");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
|
||||||
t.dropColumn("comment");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,45 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
|
||||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
|
||||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
|
||||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
|
||||||
if (!hasSecretVersionV2UserActorId) {
|
|
||||||
t.uuid("userActorId");
|
|
||||||
t.foreign("userActorId").references("id").inTable(TableName.Users);
|
|
||||||
}
|
|
||||||
if (!hasSecretVersionV2IdentityActorId) {
|
|
||||||
t.uuid("identityActorId");
|
|
||||||
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
|
|
||||||
}
|
|
||||||
if (!hasSecretVersionV2ActorType) {
|
|
||||||
t.string("actorType");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
|
||||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
|
||||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
|
||||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
|
||||||
if (hasSecretVersionV2UserActorId) {
|
|
||||||
t.dropColumn("userActorId");
|
|
||||||
}
|
|
||||||
if (hasSecretVersionV2IdentityActorId) {
|
|
||||||
t.dropColumn("identityActorId");
|
|
||||||
}
|
|
||||||
if (hasSecretVersionV2ActorType) {
|
|
||||||
t.dropColumn("actorType");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,32 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
|
||||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
|
||||||
TableName.Organization,
|
|
||||||
"allowSecretSharingOutsideOrganization"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!hasSecretShareToAnyoneCol) {
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
|
||||||
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
|
||||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
|
||||||
TableName.Organization,
|
|
||||||
"allowSecretSharingOutsideOrganization"
|
|
||||||
);
|
|
||||||
if (hasSecretShareToAnyoneCol) {
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
|
||||||
t.dropColumn("allowSecretSharingOutsideOrganization");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
|
||||||
if (!hasMappingField) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
|
||||||
t.jsonb("claimMetadataMapping");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
|
||||||
if (hasMappingField) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
|
||||||
t.dropColumn("claimMetadataMapping");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,19 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas/models";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
|
|
||||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
|
||||||
t.specificType("adminIdentityIds", "text[]");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
|
|
||||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
|
||||||
t.dropColumn("adminIdentityIds");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,53 +0,0 @@
|
|||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zpStr } from "@app/lib/zod";
|
|
||||||
|
|
||||||
const envSchema = z
|
|
||||||
.object({
|
|
||||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
|
||||||
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
|
||||||
),
|
|
||||||
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
|
||||||
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
|
||||||
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
|
|
||||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
|
||||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
|
||||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
|
||||||
// TODO(akhilmhdh): will be changed to one
|
|
||||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
|
||||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
|
||||||
// HSM
|
|
||||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
|
||||||
HSM_PIN: zpStr(z.string().optional()),
|
|
||||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
|
||||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
|
||||||
})
|
|
||||||
// To ensure that basic encryption is always possible.
|
|
||||||
.refine(
|
|
||||||
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
|
||||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
|
||||||
)
|
|
||||||
.transform((data) => ({
|
|
||||||
...data,
|
|
||||||
isHsmConfigured:
|
|
||||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
|
|
||||||
}));
|
|
||||||
|
|
||||||
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
|
||||||
|
|
||||||
export const getMigrationEnvConfig = () => {
|
|
||||||
const parsedEnv = envSchema.safeParse(process.env);
|
|
||||||
if (!parsedEnv.success) {
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.error("Invalid environment variables. Check the error below");
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.error(
|
|
||||||
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
|
|
||||||
);
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.error(parsedEnv.error.issues);
|
|
||||||
process.exit(-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Object.freeze(parsedEnv.data);
|
|
||||||
};
|
|
105
backend/src/db/migrations/utils/kms.ts
Normal file
105
backend/src/db/migrations/utils/kms.ts
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
import slugify from "@sindresorhus/slugify";
|
||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
import { randomSecureBytes } from "@app/lib/crypto";
|
||||||
|
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
const getInstanceRootKey = async (knex: Knex) => {
|
||||||
|
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
|
||||||
|
// if root key its base64 encoded
|
||||||
|
const isBase64 = !process.env.ENCRYPTION_KEY;
|
||||||
|
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
|
||||||
|
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||||
|
|
||||||
|
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||||
|
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
|
||||||
|
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||||
|
if (kmsRootConfig) {
|
||||||
|
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||||
|
// set the flag so that other instancen nodes can start
|
||||||
|
return decryptedRootKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newRootKey = randomSecureBytes(32);
|
||||||
|
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
||||||
|
await knex(TableName.KmsServerRootConfig).insert({
|
||||||
|
encryptedRootKey,
|
||||||
|
// eslint-disable-next-line
|
||||||
|
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
|
||||||
|
id: KMS_ROOT_CONFIG_UUID
|
||||||
|
});
|
||||||
|
return encryptedRootKey;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
|
||||||
|
const KMS_VERSION = "v01";
|
||||||
|
const KMS_VERSION_BLOB_LENGTH = 3;
|
||||||
|
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||||
|
const project = await knex(TableName.Project).where({ id: projectId }).first();
|
||||||
|
if (!project) throw new Error("Missing project id");
|
||||||
|
|
||||||
|
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
|
||||||
|
|
||||||
|
let secretManagerKmsKey;
|
||||||
|
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
|
||||||
|
if (projectSecretManagerKmsId) {
|
||||||
|
const kmsDoc = await knex(TableName.KmsKey)
|
||||||
|
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
|
||||||
|
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
|
||||||
|
.first();
|
||||||
|
if (!kmsDoc) throw new Error("missing kms");
|
||||||
|
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
||||||
|
} else {
|
||||||
|
const [kmsDoc] = await knex(TableName.KmsKey)
|
||||||
|
.insert({
|
||||||
|
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||||
|
orgId: project.orgId,
|
||||||
|
isReserved: false
|
||||||
|
})
|
||||||
|
.returning("*");
|
||||||
|
|
||||||
|
secretManagerKmsKey = randomSecureBytes(32);
|
||||||
|
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
|
||||||
|
await knex(TableName.InternalKms).insert({
|
||||||
|
version: 1,
|
||||||
|
encryptedKey: encryptedKeyMaterial,
|
||||||
|
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
|
||||||
|
kmsKeyId: kmsDoc.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
|
||||||
|
let dataKey: Buffer;
|
||||||
|
if (!encryptedSecretManagerDataKey) {
|
||||||
|
dataKey = randomSecureBytes();
|
||||||
|
// the below versioning we do it automatically in kms service
|
||||||
|
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
|
||||||
|
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||||
|
await knex(TableName.Project)
|
||||||
|
.where({ id: projectId })
|
||||||
|
.update({
|
||||||
|
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||||
|
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
encryptor: ({ plainText }: { plainText: Buffer }) => {
|
||||||
|
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
|
||||||
|
|
||||||
|
// Buffer#1 encrypted text + Buffer#2 version number
|
||||||
|
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||||
|
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
|
||||||
|
return { cipherTextBlob };
|
||||||
|
},
|
||||||
|
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
|
||||||
|
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||||
|
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
|
||||||
|
return decryptedBlob;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
@ -1,19 +0,0 @@
|
|||||||
export const createCircularCache = <T>(bufferSize = 10) => {
|
|
||||||
const bufferItems: { id: string; item: T }[] = [];
|
|
||||||
let bufferIndex = 0;
|
|
||||||
|
|
||||||
const push = (id: string, item: T) => {
|
|
||||||
if (bufferItems.length < bufferSize) {
|
|
||||||
bufferItems.push({ id, item });
|
|
||||||
} else {
|
|
||||||
bufferItems[bufferIndex] = { id, item };
|
|
||||||
}
|
|
||||||
bufferIndex = (bufferIndex + 1) % bufferSize;
|
|
||||||
};
|
|
||||||
|
|
||||||
const getItem = (id: string) => {
|
|
||||||
return bufferItems.find((i) => i.id === id)?.item;
|
|
||||||
};
|
|
||||||
|
|
||||||
return { push, getItem };
|
|
||||||
};
|
|
@ -1,52 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
|
||||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
|
||||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
|
||||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
|
||||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
|
||||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
|
||||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
|
||||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
|
||||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
|
||||||
|
|
||||||
import { TMigrationEnvConfig } from "./env-config";
|
|
||||||
|
|
||||||
type TDependencies = {
|
|
||||||
envConfig: TMigrationEnvConfig;
|
|
||||||
db: Knex;
|
|
||||||
keyStore: TKeyStoreFactory;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
const hsmModule = initializeHsmModule(envConfig);
|
|
||||||
hsmModule.initialize();
|
|
||||||
|
|
||||||
const hsmService = hsmServiceFactory({
|
|
||||||
hsmModule: hsmModule.getModule(),
|
|
||||||
envConfig
|
|
||||||
});
|
|
||||||
|
|
||||||
const orgDAL = orgDALFactory(db);
|
|
||||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
|
||||||
const kmsDAL = kmskeyDALFactory(db);
|
|
||||||
const internalKmsDAL = internalKmsDALFactory(db);
|
|
||||||
const projectDAL = projectDALFactory(db);
|
|
||||||
|
|
||||||
const kmsService = kmsServiceFactory({
|
|
||||||
kmsRootConfigDAL,
|
|
||||||
keyStore,
|
|
||||||
kmsDAL,
|
|
||||||
internalKmsDAL,
|
|
||||||
orgDAL,
|
|
||||||
projectDAL,
|
|
||||||
hsmService,
|
|
||||||
envConfig
|
|
||||||
});
|
|
||||||
|
|
||||||
await hsmService.startService();
|
|
||||||
await kmsService.startService();
|
|
||||||
|
|
||||||
return { kmsService };
|
|
||||||
};
|
|
@ -1,56 +0,0 @@
|
|||||||
import path from "node:path";
|
|
||||||
|
|
||||||
import dotenv from "dotenv";
|
|
||||||
|
|
||||||
import { initAuditLogDbConnection, initDbConnection } from "./instance";
|
|
||||||
|
|
||||||
const isProduction = process.env.NODE_ENV === "production";
|
|
||||||
|
|
||||||
// Update with your config settings. .
|
|
||||||
dotenv.config({
|
|
||||||
path: path.join(__dirname, "../../../.env.migration")
|
|
||||||
});
|
|
||||||
dotenv.config({
|
|
||||||
path: path.join(__dirname, "../../../.env")
|
|
||||||
});
|
|
||||||
|
|
||||||
const runRename = async () => {
|
|
||||||
if (!isProduction) return;
|
|
||||||
const migrationTable = "infisical_migrations";
|
|
||||||
const applicationDb = initDbConnection({
|
|
||||||
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
|
|
||||||
dbRootCert: process.env.DB_ROOT_CERT
|
|
||||||
});
|
|
||||||
|
|
||||||
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
|
|
||||||
? initAuditLogDbConnection({
|
|
||||||
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
|
||||||
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
|
||||||
})
|
|
||||||
: undefined;
|
|
||||||
|
|
||||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
|
||||||
if (hasMigrationTable) {
|
|
||||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
|
||||||
if (firstFile?.name?.includes(".ts")) {
|
|
||||||
await applicationDb(migrationTable).update({
|
|
||||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (auditLogDb) {
|
|
||||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
|
||||||
if (hasMigrationTableInAuditLog) {
|
|
||||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
|
||||||
if (firstFile?.name?.includes(".ts")) {
|
|
||||||
await auditLogDb(migrationTable).update({
|
|
||||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await applicationDb.destroy();
|
|
||||||
await auditLogDb?.destroy();
|
|
||||||
};
|
|
||||||
|
|
||||||
void runRename();
|
|
@ -1,27 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const AppConnectionsSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
name: z.string(),
|
|
||||||
description: z.string().nullable().optional(),
|
|
||||||
app: z.string(),
|
|
||||||
method: z.string(),
|
|
||||||
encryptedCredentials: zodBuffer,
|
|
||||||
version: z.number().default(1),
|
|
||||||
orgId: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
|
|
||||||
export type TAppConnectionsInsert = Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>;
|
|
||||||
export type TAppConnectionsUpdate = Partial<Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>>;
|
|
@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const DynamicSecretsSchema = z.object({
|
export const DynamicSecretsSchema = z.object({
|
||||||
@ -16,18 +14,16 @@ export const DynamicSecretsSchema = z.object({
|
|||||||
type: z.string(),
|
type: z.string(),
|
||||||
defaultTTL: z.string(),
|
defaultTTL: z.string(),
|
||||||
maxTTL: z.string().nullable().optional(),
|
maxTTL: z.string().nullable().optional(),
|
||||||
inputIV: z.string().nullable().optional(),
|
inputIV: z.string(),
|
||||||
inputCiphertext: z.string().nullable().optional(),
|
inputCiphertext: z.string(),
|
||||||
inputTag: z.string().nullable().optional(),
|
inputTag: z.string(),
|
||||||
algorithm: z.string().default("aes-256-gcm"),
|
algorithm: z.string().default("aes-256-gcm"),
|
||||||
keyEncoding: z.string().default("utf8"),
|
keyEncoding: z.string().default("utf8"),
|
||||||
folderId: z.string().uuid(),
|
folderId: z.string().uuid(),
|
||||||
status: z.string().nullable().optional(),
|
status: z.string().nullable().optional(),
|
||||||
statusDetails: z.string().nullable().optional(),
|
statusDetails: z.string().nullable().optional(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date()
|
||||||
encryptedInput: zodBuffer,
|
|
||||||
projectGatewayId: z.string().uuid().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||||
|
@ -1,29 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const GatewaysSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
name: z.string(),
|
|
||||||
serialNumber: z.string(),
|
|
||||||
keyAlgorithm: z.string(),
|
|
||||||
issuedAt: z.date(),
|
|
||||||
expiration: z.date(),
|
|
||||||
heartbeat: z.date().nullable().optional(),
|
|
||||||
relayAddress: zodBuffer,
|
|
||||||
orgGatewayRootCaId: z.string().uuid(),
|
|
||||||
identityId: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TGateways = z.infer<typeof GatewaysSchema>;
|
|
||||||
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
|
|
||||||
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;
|
|
@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
|
|||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
identityId: z.string().uuid(),
|
identityId: z.string().uuid(),
|
||||||
type: z.string(),
|
type: z.string(),
|
||||||
allowedServiceAccounts: z.string().nullable().optional(),
|
allowedServiceAccounts: z.string(),
|
||||||
allowedProjects: z.string().nullable().optional(),
|
allowedProjects: z.string(),
|
||||||
allowedZones: z.string().nullable().optional()
|
allowedZones: z.string()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
|
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
|
||||||
|
@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const IdentityKubernetesAuthsSchema = z.object({
|
export const IdentityKubernetesAuthsSchema = z.object({
|
||||||
@ -19,17 +17,15 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
|||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
identityId: z.string().uuid(),
|
identityId: z.string().uuid(),
|
||||||
kubernetesHost: z.string(),
|
kubernetesHost: z.string(),
|
||||||
encryptedCaCert: z.string().nullable().optional(),
|
encryptedCaCert: z.string(),
|
||||||
caCertIV: z.string().nullable().optional(),
|
caCertIV: z.string(),
|
||||||
caCertTag: z.string().nullable().optional(),
|
caCertTag: z.string(),
|
||||||
encryptedTokenReviewerJwt: z.string().nullable().optional(),
|
encryptedTokenReviewerJwt: z.string(),
|
||||||
tokenReviewerJwtIV: z.string().nullable().optional(),
|
tokenReviewerJwtIV: z.string(),
|
||||||
tokenReviewerJwtTag: z.string().nullable().optional(),
|
tokenReviewerJwtTag: z.string(),
|
||||||
allowedNamespaces: z.string(),
|
allowedNamespaces: z.string(),
|
||||||
allowedNames: z.string(),
|
allowedNames: z.string(),
|
||||||
allowedAudience: z.string(),
|
allowedAudience: z.string()
|
||||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
|
||||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||||
|
@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const IdentityOidcAuthsSchema = z.object({
|
export const IdentityOidcAuthsSchema = z.object({
|
||||||
@ -17,17 +15,15 @@ export const IdentityOidcAuthsSchema = z.object({
|
|||||||
accessTokenTrustedIps: z.unknown(),
|
accessTokenTrustedIps: z.unknown(),
|
||||||
identityId: z.string().uuid(),
|
identityId: z.string().uuid(),
|
||||||
oidcDiscoveryUrl: z.string(),
|
oidcDiscoveryUrl: z.string(),
|
||||||
encryptedCaCert: z.string().nullable().optional(),
|
encryptedCaCert: z.string(),
|
||||||
caCertIV: z.string().nullable().optional(),
|
caCertIV: z.string(),
|
||||||
caCertTag: z.string().nullable().optional(),
|
caCertTag: z.string(),
|
||||||
boundIssuer: z.string(),
|
boundIssuer: z.string(),
|
||||||
boundAudiences: z.string(),
|
boundAudiences: z.string(),
|
||||||
boundClaims: z.unknown(),
|
boundClaims: z.unknown(),
|
||||||
boundSubject: z.string().nullable().optional(),
|
boundSubject: z.string().nullable().optional(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date()
|
||||||
encryptedCaCertificate: zodBuffer.nullable().optional(),
|
|
||||||
claimMetadataMapping: z.unknown().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||||
|
@ -20,7 +20,6 @@ export * from "./certificates";
|
|||||||
export * from "./dynamic-secret-leases";
|
export * from "./dynamic-secret-leases";
|
||||||
export * from "./dynamic-secrets";
|
export * from "./dynamic-secrets";
|
||||||
export * from "./external-kms";
|
export * from "./external-kms";
|
||||||
export * from "./gateways";
|
|
||||||
export * from "./git-app-install-sessions";
|
export * from "./git-app-install-sessions";
|
||||||
export * from "./git-app-org";
|
export * from "./git-app-org";
|
||||||
export * from "./group-project-membership-roles";
|
export * from "./group-project-membership-roles";
|
||||||
@ -46,10 +45,6 @@ export * from "./incident-contacts";
|
|||||||
export * from "./integration-auths";
|
export * from "./integration-auths";
|
||||||
export * from "./integrations";
|
export * from "./integrations";
|
||||||
export * from "./internal-kms";
|
export * from "./internal-kms";
|
||||||
export * from "./kmip-client-certificates";
|
|
||||||
export * from "./kmip-clients";
|
|
||||||
export * from "./kmip-org-configs";
|
|
||||||
export * from "./kmip-org-server-certificates";
|
|
||||||
export * from "./kms-key-versions";
|
export * from "./kms-key-versions";
|
||||||
export * from "./kms-keys";
|
export * from "./kms-keys";
|
||||||
export * from "./kms-root-config";
|
export * from "./kms-root-config";
|
||||||
@ -58,7 +53,6 @@ export * from "./ldap-group-maps";
|
|||||||
export * from "./models";
|
export * from "./models";
|
||||||
export * from "./oidc-configs";
|
export * from "./oidc-configs";
|
||||||
export * from "./org-bots";
|
export * from "./org-bots";
|
||||||
export * from "./org-gateway-config";
|
|
||||||
export * from "./org-memberships";
|
export * from "./org-memberships";
|
||||||
export * from "./org-roles";
|
export * from "./org-roles";
|
||||||
export * from "./organizations";
|
export * from "./organizations";
|
||||||
@ -67,7 +61,6 @@ export * from "./pki-collection-items";
|
|||||||
export * from "./pki-collections";
|
export * from "./pki-collections";
|
||||||
export * from "./project-bots";
|
export * from "./project-bots";
|
||||||
export * from "./project-environments";
|
export * from "./project-environments";
|
||||||
export * from "./project-gateways";
|
|
||||||
export * from "./project-keys";
|
export * from "./project-keys";
|
||||||
export * from "./project-memberships";
|
export * from "./project-memberships";
|
||||||
export * from "./project-roles";
|
export * from "./project-roles";
|
||||||
@ -78,7 +71,6 @@ export * from "./project-user-additional-privilege";
|
|||||||
export * from "./project-user-membership-roles";
|
export * from "./project-user-membership-roles";
|
||||||
export * from "./projects";
|
export * from "./projects";
|
||||||
export * from "./rate-limit";
|
export * from "./rate-limit";
|
||||||
export * from "./resource-metadata";
|
|
||||||
export * from "./saml-configs";
|
export * from "./saml-configs";
|
||||||
export * from "./scim-tokens";
|
export * from "./scim-tokens";
|
||||||
export * from "./secret-approval-policies";
|
export * from "./secret-approval-policies";
|
||||||
@ -115,11 +107,6 @@ export * from "./secrets";
|
|||||||
export * from "./secrets-v2";
|
export * from "./secrets-v2";
|
||||||
export * from "./service-tokens";
|
export * from "./service-tokens";
|
||||||
export * from "./slack-integrations";
|
export * from "./slack-integrations";
|
||||||
export * from "./ssh-certificate-authorities";
|
|
||||||
export * from "./ssh-certificate-authority-secrets";
|
|
||||||
export * from "./ssh-certificate-bodies";
|
|
||||||
export * from "./ssh-certificate-templates";
|
|
||||||
export * from "./ssh-certificates";
|
|
||||||
export * from "./super-admin";
|
export * from "./super-admin";
|
||||||
export * from "./totp-configs";
|
export * from "./totp-configs";
|
||||||
export * from "./trusted-ips";
|
export * from "./trusted-ips";
|
||||||
|
@ -1,23 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const KmipClientCertificatesSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
kmipClientId: z.string().uuid(),
|
|
||||||
serialNumber: z.string(),
|
|
||||||
keyAlgorithm: z.string(),
|
|
||||||
issuedAt: z.date(),
|
|
||||||
expiration: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TKmipClientCertificates = z.infer<typeof KmipClientCertificatesSchema>;
|
|
||||||
export type TKmipClientCertificatesInsert = Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>;
|
|
||||||
export type TKmipClientCertificatesUpdate = Partial<
|
|
||||||
Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>
|
|
||||||
>;
|
|
@ -1,20 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const KmipClientsSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
name: z.string(),
|
|
||||||
permissions: z.string().array().nullable().optional(),
|
|
||||||
description: z.string().nullable().optional(),
|
|
||||||
projectId: z.string()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TKmipClients = z.infer<typeof KmipClientsSchema>;
|
|
||||||
export type TKmipClientsInsert = Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>;
|
|
||||||
export type TKmipClientsUpdate = Partial<Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>>;
|
|
@ -1,39 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const KmipOrgConfigsSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
orgId: z.string().uuid(),
|
|
||||||
caKeyAlgorithm: z.string(),
|
|
||||||
rootCaIssuedAt: z.date(),
|
|
||||||
rootCaExpiration: z.date(),
|
|
||||||
rootCaSerialNumber: z.string(),
|
|
||||||
encryptedRootCaCertificate: zodBuffer,
|
|
||||||
encryptedRootCaPrivateKey: zodBuffer,
|
|
||||||
serverIntermediateCaIssuedAt: z.date(),
|
|
||||||
serverIntermediateCaExpiration: z.date(),
|
|
||||||
serverIntermediateCaSerialNumber: z.string().nullable().optional(),
|
|
||||||
encryptedServerIntermediateCaCertificate: zodBuffer,
|
|
||||||
encryptedServerIntermediateCaChain: zodBuffer,
|
|
||||||
encryptedServerIntermediateCaPrivateKey: zodBuffer,
|
|
||||||
clientIntermediateCaIssuedAt: z.date(),
|
|
||||||
clientIntermediateCaExpiration: z.date(),
|
|
||||||
clientIntermediateCaSerialNumber: z.string(),
|
|
||||||
encryptedClientIntermediateCaCertificate: zodBuffer,
|
|
||||||
encryptedClientIntermediateCaChain: zodBuffer,
|
|
||||||
encryptedClientIntermediateCaPrivateKey: zodBuffer,
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TKmipOrgConfigs = z.infer<typeof KmipOrgConfigsSchema>;
|
|
||||||
export type TKmipOrgConfigsInsert = Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>;
|
|
||||||
export type TKmipOrgConfigsUpdate = Partial<Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>>;
|
|
@ -1,29 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const KmipOrgServerCertificatesSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
orgId: z.string().uuid(),
|
|
||||||
commonName: z.string(),
|
|
||||||
altNames: z.string(),
|
|
||||||
serialNumber: z.string(),
|
|
||||||
keyAlgorithm: z.string(),
|
|
||||||
issuedAt: z.date(),
|
|
||||||
expiration: z.date(),
|
|
||||||
encryptedCertificate: zodBuffer,
|
|
||||||
encryptedChain: zodBuffer
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TKmipOrgServerCertificates = z.infer<typeof KmipOrgServerCertificatesSchema>;
|
|
||||||
export type TKmipOrgServerCertificatesInsert = Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>;
|
|
||||||
export type TKmipOrgServerCertificatesUpdate = Partial<
|
|
||||||
Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>
|
|
||||||
>;
|
|
@ -16,7 +16,8 @@ export const KmsKeysSchema = z.object({
|
|||||||
name: z.string(),
|
name: z.string(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
projectId: z.string().nullable().optional()
|
projectId: z.string().nullable().optional(),
|
||||||
|
slug: z.string().nullable().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||||
|
@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const LdapConfigsSchema = z.object({
|
export const LdapConfigsSchema = z.object({
|
||||||
@ -14,25 +12,22 @@ export const LdapConfigsSchema = z.object({
|
|||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean(),
|
||||||
url: z.string(),
|
url: z.string(),
|
||||||
encryptedBindDN: z.string().nullable().optional(),
|
encryptedBindDN: z.string(),
|
||||||
bindDNIV: z.string().nullable().optional(),
|
bindDNIV: z.string(),
|
||||||
bindDNTag: z.string().nullable().optional(),
|
bindDNTag: z.string(),
|
||||||
encryptedBindPass: z.string().nullable().optional(),
|
encryptedBindPass: z.string(),
|
||||||
bindPassIV: z.string().nullable().optional(),
|
bindPassIV: z.string(),
|
||||||
bindPassTag: z.string().nullable().optional(),
|
bindPassTag: z.string(),
|
||||||
searchBase: z.string(),
|
searchBase: z.string(),
|
||||||
encryptedCACert: z.string().nullable().optional(),
|
encryptedCACert: z.string(),
|
||||||
caCertIV: z.string().nullable().optional(),
|
caCertIV: z.string(),
|
||||||
caCertTag: z.string().nullable().optional(),
|
caCertTag: z.string(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
groupSearchBase: z.string().default(""),
|
groupSearchBase: z.string().default(""),
|
||||||
groupSearchFilter: z.string().default(""),
|
groupSearchFilter: z.string().default(""),
|
||||||
searchFilter: z.string().default(""),
|
searchFilter: z.string().default(""),
|
||||||
uniqueUserAttribute: z.string().default(""),
|
uniqueUserAttribute: z.string().default("")
|
||||||
encryptedLdapBindDN: zodBuffer,
|
|
||||||
encryptedLdapBindPass: zodBuffer,
|
|
||||||
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||||
|
@ -2,11 +2,6 @@ import { z } from "zod";
|
|||||||
|
|
||||||
export enum TableName {
|
export enum TableName {
|
||||||
Users = "users",
|
Users = "users",
|
||||||
SshCertificateAuthority = "ssh_certificate_authorities",
|
|
||||||
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
|
|
||||||
SshCertificateTemplate = "ssh_certificate_templates",
|
|
||||||
SshCertificate = "ssh_certificates",
|
|
||||||
SshCertificateBody = "ssh_certificate_bodies",
|
|
||||||
CertificateAuthority = "certificate_authorities",
|
CertificateAuthority = "certificate_authorities",
|
||||||
CertificateTemplateEstConfig = "certificate_template_est_configs",
|
CertificateTemplateEstConfig = "certificate_template_est_configs",
|
||||||
CertificateAuthorityCert = "certificate_authority_certs",
|
CertificateAuthorityCert = "certificate_authority_certs",
|
||||||
@ -80,7 +75,6 @@ export enum TableName {
|
|||||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||||
// used by both identity and users
|
// used by both identity and users
|
||||||
IdentityMetadata = "identity_metadata",
|
IdentityMetadata = "identity_metadata",
|
||||||
ResourceMetadata = "resource_metadata",
|
|
||||||
ScimToken = "scim_tokens",
|
ScimToken = "scim_tokens",
|
||||||
AccessApprovalPolicy = "access_approval_policies",
|
AccessApprovalPolicy = "access_approval_policies",
|
||||||
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
||||||
@ -113,10 +107,6 @@ export enum TableName {
|
|||||||
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
||||||
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
||||||
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
||||||
// Gateway
|
|
||||||
OrgGatewayConfig = "org_gateway_config",
|
|
||||||
Gateway = "gateways",
|
|
||||||
ProjectGateway = "project_gateways",
|
|
||||||
// junction tables with tags
|
// junction tables with tags
|
||||||
SecretV2JnTag = "secret_v2_tag_junction",
|
SecretV2JnTag = "secret_v2_tag_junction",
|
||||||
JnSecretTag = "secret_tag_junction",
|
JnSecretTag = "secret_tag_junction",
|
||||||
@ -134,13 +124,7 @@ export enum TableName {
|
|||||||
KmsKeyVersion = "kms_key_versions",
|
KmsKeyVersion = "kms_key_versions",
|
||||||
WorkflowIntegrations = "workflow_integrations",
|
WorkflowIntegrations = "workflow_integrations",
|
||||||
SlackIntegrations = "slack_integrations",
|
SlackIntegrations = "slack_integrations",
|
||||||
ProjectSlackConfigs = "project_slack_configs",
|
ProjectSlackConfigs = "project_slack_configs"
|
||||||
AppConnection = "app_connections",
|
|
||||||
SecretSync = "secret_syncs",
|
|
||||||
KmipClient = "kmip_clients",
|
|
||||||
KmipOrgConfig = "kmip_org_configs",
|
|
||||||
KmipOrgServerCertificates = "kmip_org_server_certificates",
|
|
||||||
KmipClientCertificates = "kmip_client_certificates"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||||
@ -221,15 +205,5 @@ export enum IdentityAuthMethod {
|
|||||||
export enum ProjectType {
|
export enum ProjectType {
|
||||||
SecretManager = "secret-manager",
|
SecretManager = "secret-manager",
|
||||||
CertificateManager = "cert-manager",
|
CertificateManager = "cert-manager",
|
||||||
KMS = "kms",
|
KMS = "kms"
|
||||||
SSH = "ssh"
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum ActionProjectType {
|
|
||||||
SecretManager = ProjectType.SecretManager,
|
|
||||||
CertificateManager = ProjectType.CertificateManager,
|
|
||||||
KMS = ProjectType.KMS,
|
|
||||||
SSH = ProjectType.SSH,
|
|
||||||
// project operations that happen on all types
|
|
||||||
Any = "any"
|
|
||||||
}
|
}
|
||||||
|
@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const OidcConfigsSchema = z.object({
|
export const OidcConfigsSchema = z.object({
|
||||||
@ -17,22 +15,19 @@ export const OidcConfigsSchema = z.object({
|
|||||||
jwksUri: z.string().nullable().optional(),
|
jwksUri: z.string().nullable().optional(),
|
||||||
tokenEndpoint: z.string().nullable().optional(),
|
tokenEndpoint: z.string().nullable().optional(),
|
||||||
userinfoEndpoint: z.string().nullable().optional(),
|
userinfoEndpoint: z.string().nullable().optional(),
|
||||||
encryptedClientId: z.string().nullable().optional(),
|
encryptedClientId: z.string(),
|
||||||
configurationType: z.string(),
|
configurationType: z.string(),
|
||||||
clientIdIV: z.string().nullable().optional(),
|
clientIdIV: z.string(),
|
||||||
clientIdTag: z.string().nullable().optional(),
|
clientIdTag: z.string(),
|
||||||
encryptedClientSecret: z.string().nullable().optional(),
|
encryptedClientSecret: z.string(),
|
||||||
clientSecretIV: z.string().nullable().optional(),
|
clientSecretIV: z.string(),
|
||||||
clientSecretTag: z.string().nullable().optional(),
|
clientSecretTag: z.string(),
|
||||||
allowedEmailDomains: z.string().nullable().optional(),
|
allowedEmailDomains: z.string().nullable().optional(),
|
||||||
isActive: z.boolean(),
|
isActive: z.boolean(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
lastUsed: z.date().nullable().optional(),
|
lastUsed: z.date().nullable().optional()
|
||||||
manageGroupMemberships: z.boolean().default(false),
|
|
||||||
encryptedOidcClientId: zodBuffer,
|
|
||||||
encryptedOidcClientSecret: zodBuffer
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const OrgGatewayConfigSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
rootCaKeyAlgorithm: z.string(),
|
|
||||||
rootCaIssuedAt: z.date(),
|
|
||||||
rootCaExpiration: z.date(),
|
|
||||||
rootCaSerialNumber: z.string(),
|
|
||||||
encryptedRootCaCertificate: zodBuffer,
|
|
||||||
encryptedRootCaPrivateKey: zodBuffer,
|
|
||||||
clientCaIssuedAt: z.date(),
|
|
||||||
clientCaExpiration: z.date(),
|
|
||||||
clientCaSerialNumber: z.string().nullable().optional(),
|
|
||||||
encryptedClientCaCertificate: zodBuffer,
|
|
||||||
encryptedClientCaPrivateKey: zodBuffer,
|
|
||||||
clientCertSerialNumber: z.string(),
|
|
||||||
clientCertKeyAlgorithm: z.string(),
|
|
||||||
clientCertIssuedAt: z.date(),
|
|
||||||
clientCertExpiration: z.date(),
|
|
||||||
encryptedClientCertificate: zodBuffer,
|
|
||||||
encryptedClientPrivateKey: zodBuffer,
|
|
||||||
gatewayCaIssuedAt: z.date(),
|
|
||||||
gatewayCaExpiration: z.date(),
|
|
||||||
gatewayCaSerialNumber: z.string(),
|
|
||||||
encryptedGatewayCaCertificate: zodBuffer,
|
|
||||||
encryptedGatewayCaPrivateKey: zodBuffer,
|
|
||||||
orgId: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
|
|
||||||
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
|
|
||||||
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;
|
|
@ -22,8 +22,7 @@ export const OrganizationsSchema = z.object({
|
|||||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||||
defaultMembershipRole: z.string().default("member"),
|
defaultMembershipRole: z.string().default("member"),
|
||||||
enforceMfa: z.boolean().default(false),
|
enforceMfa: z.boolean().default(false),
|
||||||
selectedMfaMethod: z.string().nullable().optional(),
|
selectedMfaMethod: z.string().nullable().optional()
|
||||||
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const ProjectGatewaysSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
projectId: z.string(),
|
|
||||||
gatewayId: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
|
|
||||||
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
|
|
||||||
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;
|
|
@ -13,7 +13,7 @@ export const ProjectsSchema = z.object({
|
|||||||
id: z.string(),
|
id: z.string(),
|
||||||
name: z.string(),
|
name: z.string(),
|
||||||
slug: z.string(),
|
slug: z.string(),
|
||||||
autoCapitalization: z.boolean().default(false).nullable().optional(),
|
autoCapitalization: z.boolean().default(true).nullable().optional(),
|
||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
@ -25,8 +25,7 @@ export const ProjectsSchema = z.object({
|
|||||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||||
description: z.string().nullable().optional(),
|
description: z.string().nullable().optional(),
|
||||||
type: z.string(),
|
type: z.string()
|
||||||
enforceCapitalization: z.boolean().default(false)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||||
|
@ -1,24 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const ResourceMetadataSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
key: z.string(),
|
|
||||||
value: z.string(),
|
|
||||||
orgId: z.string().uuid(),
|
|
||||||
userId: z.string().uuid().nullable().optional(),
|
|
||||||
identityId: z.string().uuid().nullable().optional(),
|
|
||||||
secretId: z.string().uuid().nullable().optional(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TResourceMetadata = z.infer<typeof ResourceMetadataSchema>;
|
|
||||||
export type TResourceMetadataInsert = Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>;
|
|
||||||
export type TResourceMetadataUpdate = Partial<Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>>;
|
|
@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const SamlConfigsSchema = z.object({
|
export const SamlConfigsSchema = z.object({
|
||||||
@ -25,10 +23,7 @@ export const SamlConfigsSchema = z.object({
|
|||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
orgId: z.string().uuid(),
|
orgId: z.string().uuid(),
|
||||||
lastUsed: z.date().nullable().optional(),
|
lastUsed: z.date().nullable().optional()
|
||||||
encryptedSamlEntryPoint: zodBuffer,
|
|
||||||
encryptedSamlIssuer: zodBuffer,
|
|
||||||
encryptedSamlCertificate: zodBuffer
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
||||||
|
@ -13,8 +13,7 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
|||||||
requestId: z.string().uuid(),
|
requestId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
reviewerUserId: z.string().uuid(),
|
reviewerUserId: z.string().uuid()
|
||||||
comment: z.string().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||||
|
@ -24,8 +24,7 @@ export const SecretApprovalRequestsSecretsV2Schema = z.object({
|
|||||||
requestId: z.string().uuid(),
|
requestId: z.string().uuid(),
|
||||||
op: z.string(),
|
op: z.string(),
|
||||||
secretId: z.string().uuid().nullable().optional(),
|
secretId: z.string().uuid().nullable().optional(),
|
||||||
secretVersion: z.string().uuid().nullable().optional(),
|
secretVersion: z.string().uuid().nullable().optional()
|
||||||
secretMetadata: z.unknown().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;
|
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;
|
||||||
|
@ -15,8 +15,7 @@ export const SecretFoldersSchema = z.object({
|
|||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
envId: z.string().uuid(),
|
envId: z.string().uuid(),
|
||||||
parentId: z.string().uuid().nullable().optional(),
|
parentId: z.string().uuid().nullable().optional(),
|
||||||
isReserved: z.boolean().default(false).nullable().optional(),
|
isReserved: z.boolean().default(false).nullable().optional()
|
||||||
description: z.string().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||||
|
@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const SecretRotationsSchema = z.object({
|
export const SecretRotationsSchema = z.object({
|
||||||
@ -24,8 +22,7 @@ export const SecretRotationsSchema = z.object({
|
|||||||
keyEncoding: z.string().nullable().optional(),
|
keyEncoding: z.string().nullable().optional(),
|
||||||
envId: z.string().uuid(),
|
envId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date()
|
||||||
encryptedRotationData: zodBuffer
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
||||||
|
@ -26,8 +26,7 @@ export const SecretSharingSchema = z.object({
|
|||||||
lastViewedAt: z.date().nullable().optional(),
|
lastViewedAt: z.date().nullable().optional(),
|
||||||
password: z.string().nullable().optional(),
|
password: z.string().nullable().optional(),
|
||||||
encryptedSecret: zodBuffer.nullable().optional(),
|
encryptedSecret: zodBuffer.nullable().optional(),
|
||||||
identifier: z.string().nullable().optional(),
|
identifier: z.string().nullable().optional()
|
||||||
type: z.string().default("share")
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||||
|
@ -1,40 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const SecretSyncsSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
name: z.string(),
|
|
||||||
description: z.string().nullable().optional(),
|
|
||||||
destination: z.string(),
|
|
||||||
isAutoSyncEnabled: z.boolean().default(true),
|
|
||||||
version: z.number().default(1),
|
|
||||||
destinationConfig: z.unknown(),
|
|
||||||
syncOptions: z.unknown(),
|
|
||||||
projectId: z.string(),
|
|
||||||
folderId: z.string().uuid().nullable().optional(),
|
|
||||||
connectionId: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date(),
|
|
||||||
syncStatus: z.string().nullable().optional(),
|
|
||||||
lastSyncJobId: z.string().nullable().optional(),
|
|
||||||
lastSyncMessage: z.string().nullable().optional(),
|
|
||||||
lastSyncedAt: z.date().nullable().optional(),
|
|
||||||
importStatus: z.string().nullable().optional(),
|
|
||||||
lastImportJobId: z.string().nullable().optional(),
|
|
||||||
lastImportMessage: z.string().nullable().optional(),
|
|
||||||
lastImportedAt: z.date().nullable().optional(),
|
|
||||||
removeStatus: z.string().nullable().optional(),
|
|
||||||
lastRemoveJobId: z.string().nullable().optional(),
|
|
||||||
lastRemoveMessage: z.string().nullable().optional(),
|
|
||||||
lastRemovedAt: z.date().nullable().optional()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TSecretSyncs = z.infer<typeof SecretSyncsSchema>;
|
|
||||||
export type TSecretSyncsInsert = Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>;
|
|
||||||
export type TSecretSyncsUpdate = Partial<Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>>;
|
|
@ -25,10 +25,7 @@ export const SecretVersionsV2Schema = z.object({
|
|||||||
folderId: z.string().uuid(),
|
folderId: z.string().uuid(),
|
||||||
userId: z.string().uuid().nullable().optional(),
|
userId: z.string().uuid().nullable().optional(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date()
|
||||||
userActorId: z.string().uuid().nullable().optional(),
|
|
||||||
identityActorId: z.string().uuid().nullable().optional(),
|
|
||||||
actorType: z.string().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;
|
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;
|
||||||
|
@ -1,24 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const SshCertificateAuthoritiesSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date(),
|
|
||||||
projectId: z.string(),
|
|
||||||
status: z.string(),
|
|
||||||
friendlyName: z.string(),
|
|
||||||
keyAlgorithm: z.string()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;
|
|
||||||
export type TSshCertificateAuthoritiesInsert = Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>;
|
|
||||||
export type TSshCertificateAuthoritiesUpdate = Partial<
|
|
||||||
Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>
|
|
||||||
>;
|
|
@ -1,27 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const SshCertificateAuthoritySecretsSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date(),
|
|
||||||
sshCaId: z.string().uuid(),
|
|
||||||
encryptedPrivateKey: zodBuffer
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TSshCertificateAuthoritySecrets = z.infer<typeof SshCertificateAuthoritySecretsSchema>;
|
|
||||||
export type TSshCertificateAuthoritySecretsInsert = Omit<
|
|
||||||
z.input<typeof SshCertificateAuthoritySecretsSchema>,
|
|
||||||
TImmutableDBKeys
|
|
||||||
>;
|
|
||||||
export type TSshCertificateAuthoritySecretsUpdate = Partial<
|
|
||||||
Omit<z.input<typeof SshCertificateAuthoritySecretsSchema>, TImmutableDBKeys>
|
|
||||||
>;
|
|
@ -1,22 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const SshCertificateBodiesSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date(),
|
|
||||||
sshCertId: z.string().uuid(),
|
|
||||||
encryptedCertificate: zodBuffer
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TSshCertificateBodies = z.infer<typeof SshCertificateBodiesSchema>;
|
|
||||||
export type TSshCertificateBodiesInsert = Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>;
|
|
||||||
export type TSshCertificateBodiesUpdate = Partial<Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>>;
|
|
@ -1,30 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const SshCertificateTemplatesSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date(),
|
|
||||||
sshCaId: z.string().uuid(),
|
|
||||||
status: z.string(),
|
|
||||||
name: z.string(),
|
|
||||||
ttl: z.string(),
|
|
||||||
maxTTL: z.string(),
|
|
||||||
allowedUsers: z.string().array(),
|
|
||||||
allowedHosts: z.string().array(),
|
|
||||||
allowUserCertificates: z.boolean(),
|
|
||||||
allowHostCertificates: z.boolean(),
|
|
||||||
allowCustomKeyIds: z.boolean()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TSshCertificateTemplates = z.infer<typeof SshCertificateTemplatesSchema>;
|
|
||||||
export type TSshCertificateTemplatesInsert = Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>;
|
|
||||||
export type TSshCertificateTemplatesUpdate = Partial<
|
|
||||||
Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>
|
|
||||||
>;
|
|
@ -1,26 +0,0 @@
|
|||||||
// Code generated by automation script, DO NOT EDIT.
|
|
||||||
// Automated by pulling database and generating zod schema
|
|
||||||
// To update. Just run npm run generate:schema
|
|
||||||
// Written by akhilmhdh.
|
|
||||||
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
|
||||||
|
|
||||||
export const SshCertificatesSchema = z.object({
|
|
||||||
id: z.string().uuid(),
|
|
||||||
createdAt: z.date(),
|
|
||||||
updatedAt: z.date(),
|
|
||||||
sshCaId: z.string().uuid(),
|
|
||||||
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
|
|
||||||
serialNumber: z.string(),
|
|
||||||
certType: z.string(),
|
|
||||||
principals: z.string().array(),
|
|
||||||
keyId: z.string(),
|
|
||||||
notBefore: z.date(),
|
|
||||||
notAfter: z.date()
|
|
||||||
});
|
|
||||||
|
|
||||||
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;
|
|
||||||
export type TSshCertificatesInsert = Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>;
|
|
||||||
export type TSshCertificatesUpdate = Partial<Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>>;
|
|
@ -23,10 +23,7 @@ export const SuperAdminSchema = z.object({
|
|||||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||||
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
|
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||||
authConsentContent: z.string().nullable().optional(),
|
|
||||||
pageFrameContent: z.string().nullable().optional(),
|
|
||||||
adminIdentityIds: z.string().array().nullable().optional()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||||
|
@ -5,14 +5,12 @@
|
|||||||
|
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { zodBuffer } from "@app/lib/zod";
|
|
||||||
|
|
||||||
import { TImmutableDBKeys } from "./models";
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
export const WebhooksSchema = z.object({
|
export const WebhooksSchema = z.object({
|
||||||
id: z.string().uuid(),
|
id: z.string().uuid(),
|
||||||
secretPath: z.string().default("/"),
|
secretPath: z.string().default("/"),
|
||||||
url: z.string().nullable().optional(),
|
url: z.string(),
|
||||||
lastStatus: z.string().nullable().optional(),
|
lastStatus: z.string().nullable().optional(),
|
||||||
lastRunErrorMessage: z.string().nullable().optional(),
|
lastRunErrorMessage: z.string().nullable().optional(),
|
||||||
isDisabled: z.boolean().default(false),
|
isDisabled: z.boolean().default(false),
|
||||||
@ -27,9 +25,7 @@ export const WebhooksSchema = z.object({
|
|||||||
urlCipherText: z.string().nullable().optional(),
|
urlCipherText: z.string().nullable().optional(),
|
||||||
urlIV: z.string().nullable().optional(),
|
urlIV: z.string().nullable().optional(),
|
||||||
urlTag: z.string().nullable().optional(),
|
urlTag: z.string().nullable().optional(),
|
||||||
type: z.string().default("general").nullable().optional(),
|
type: z.string().default("general").nullable().optional()
|
||||||
encryptedPassKey: zodBuffer.nullable().optional(),
|
|
||||||
encryptedUrl: zodBuffer
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
|
import ms from "ms";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||||
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||||
import { daysToMillisecond } from "@app/lib/dates";
|
import { daysToMillisecond } from "@app/lib/dates";
|
||||||
import { removeTrailingSlash } from "@app/lib/fn";
|
import { removeTrailingSlash } from "@app/lib/fn";
|
||||||
import { ms } from "@app/lib/ms";
|
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import ms from "ms";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||||
@ -5,7 +6,6 @@ import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/pro
|
|||||||
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||||
import { daysToMillisecond } from "@app/lib/dates";
|
import { daysToMillisecond } from "@app/lib/dates";
|
||||||
import { removeTrailingSlash } from "@app/lib/fn";
|
import { removeTrailingSlash } from "@app/lib/fn";
|
||||||
import { ms } from "@app/lib/ms";
|
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { slugSchema } from "@app/server/lib/schemas";
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
@ -1,265 +0,0 @@
|
|||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
import { GatewaysSchema } from "@app/db/schemas";
|
|
||||||
import { isValidIp } from "@app/lib/ip";
|
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
|
||||||
import { slugSchema } from "@app/server/lib/schemas";
|
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
|
||||||
|
|
||||||
const SanitizedGatewaySchema = GatewaysSchema.pick({
|
|
||||||
id: true,
|
|
||||||
identityId: true,
|
|
||||||
name: true,
|
|
||||||
createdAt: true,
|
|
||||||
updatedAt: true,
|
|
||||||
issuedAt: true,
|
|
||||||
serialNumber: true,
|
|
||||||
heartbeat: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const isValidRelayAddress = (relayAddress: string) => {
|
|
||||||
const [ip, port] = relayAddress.split(":");
|
|
||||||
return isValidIp(ip) && Number(port) <= 65535 && Number(port) >= 40000;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
|
||||||
server.route({
|
|
||||||
method: "POST",
|
|
||||||
url: "/register-identity",
|
|
||||||
config: {
|
|
||||||
rateLimit: writeLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
turnServerUsername: z.string(),
|
|
||||||
turnServerPassword: z.string(),
|
|
||||||
turnServerRealm: z.string(),
|
|
||||||
turnServerAddress: z.string(),
|
|
||||||
infisicalStaticIp: z.string().optional()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
|
||||||
handler: async (req) => {
|
|
||||||
const relayDetails = await server.services.gateway.getGatewayRelayDetails(
|
|
||||||
req.permission.id,
|
|
||||||
req.permission.orgId,
|
|
||||||
req.permission.authMethod
|
|
||||||
);
|
|
||||||
return relayDetails;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
server.route({
|
|
||||||
method: "POST",
|
|
||||||
url: "/exchange-cert",
|
|
||||||
config: {
|
|
||||||
rateLimit: writeLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
body: z.object({
|
|
||||||
relayAddress: z.string().refine(isValidRelayAddress, { message: "Invalid relay address" })
|
|
||||||
}),
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
serialNumber: z.string(),
|
|
||||||
privateKey: z.string(),
|
|
||||||
certificate: z.string(),
|
|
||||||
certificateChain: z.string()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
|
||||||
handler: async (req) => {
|
|
||||||
const gatewayCertificates = await server.services.gateway.exchangeAllocatedRelayAddress({
|
|
||||||
identityOrg: req.permission.orgId,
|
|
||||||
identityId: req.permission.id,
|
|
||||||
relayAddress: req.body.relayAddress,
|
|
||||||
identityOrgAuthMethod: req.permission.authMethod
|
|
||||||
});
|
|
||||||
return gatewayCertificates;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
server.route({
|
|
||||||
method: "POST",
|
|
||||||
url: "/heartbeat",
|
|
||||||
config: {
|
|
||||||
rateLimit: writeLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
message: z.string()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
|
||||||
handler: async (req) => {
|
|
||||||
await server.services.gateway.heartbeat({
|
|
||||||
orgPermission: req.permission
|
|
||||||
});
|
|
||||||
return { message: "Successfully registered heartbeat" };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
server.route({
|
|
||||||
method: "GET",
|
|
||||||
url: "/",
|
|
||||||
config: {
|
|
||||||
rateLimit: readLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
querystring: z.object({
|
|
||||||
projectId: z.string().optional()
|
|
||||||
}),
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
gateways: SanitizedGatewaySchema.extend({
|
|
||||||
identity: z.object({
|
|
||||||
name: z.string(),
|
|
||||||
id: z.string()
|
|
||||||
}),
|
|
||||||
projects: z
|
|
||||||
.object({
|
|
||||||
name: z.string(),
|
|
||||||
id: z.string(),
|
|
||||||
slug: z.string()
|
|
||||||
})
|
|
||||||
.array()
|
|
||||||
}).array()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
|
||||||
handler: async (req) => {
|
|
||||||
const gateways = await server.services.gateway.listGateways({
|
|
||||||
orgPermission: req.permission
|
|
||||||
});
|
|
||||||
return { gateways };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
server.route({
|
|
||||||
method: "GET",
|
|
||||||
url: "/projects/:projectId",
|
|
||||||
config: {
|
|
||||||
rateLimit: readLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
params: z.object({
|
|
||||||
projectId: z.string()
|
|
||||||
}),
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
gateways: SanitizedGatewaySchema.extend({
|
|
||||||
identity: z.object({
|
|
||||||
name: z.string(),
|
|
||||||
id: z.string()
|
|
||||||
}),
|
|
||||||
projectGatewayId: z.string()
|
|
||||||
}).array()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
|
||||||
handler: async (req) => {
|
|
||||||
const gateways = await server.services.gateway.getProjectGateways({
|
|
||||||
projectId: req.params.projectId,
|
|
||||||
projectPermission: req.permission
|
|
||||||
});
|
|
||||||
return { gateways };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
server.route({
|
|
||||||
method: "GET",
|
|
||||||
url: "/:id",
|
|
||||||
config: {
|
|
||||||
rateLimit: readLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
params: z.object({
|
|
||||||
id: z.string()
|
|
||||||
}),
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
gateway: SanitizedGatewaySchema.extend({
|
|
||||||
identity: z.object({
|
|
||||||
name: z.string(),
|
|
||||||
id: z.string()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
|
||||||
handler: async (req) => {
|
|
||||||
const gateway = await server.services.gateway.getGatewayById({
|
|
||||||
orgPermission: req.permission,
|
|
||||||
id: req.params.id
|
|
||||||
});
|
|
||||||
return { gateway };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
server.route({
|
|
||||||
method: "PATCH",
|
|
||||||
url: "/:id",
|
|
||||||
config: {
|
|
||||||
rateLimit: writeLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
params: z.object({
|
|
||||||
id: z.string()
|
|
||||||
}),
|
|
||||||
body: z.object({
|
|
||||||
name: slugSchema({ field: "name" }).optional(),
|
|
||||||
projectIds: z.string().array().optional()
|
|
||||||
}),
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
gateway: SanitizedGatewaySchema
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
|
||||||
handler: async (req) => {
|
|
||||||
const gateway = await server.services.gateway.updateGatewayById({
|
|
||||||
orgPermission: req.permission,
|
|
||||||
id: req.params.id,
|
|
||||||
name: req.body.name,
|
|
||||||
projectIds: req.body.projectIds
|
|
||||||
});
|
|
||||||
return { gateway };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
server.route({
|
|
||||||
method: "DELETE",
|
|
||||||
url: "/:id",
|
|
||||||
config: {
|
|
||||||
rateLimit: writeLimit
|
|
||||||
},
|
|
||||||
schema: {
|
|
||||||
params: z.object({
|
|
||||||
id: z.string()
|
|
||||||
}),
|
|
||||||
response: {
|
|
||||||
200: z.object({
|
|
||||||
gateway: SanitizedGatewaySchema
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
|
||||||
handler: async (req) => {
|
|
||||||
const gateway = await server.services.gateway.deleteGatewayById({
|
|
||||||
orgPermission: req.permission,
|
|
||||||
id: req.params.id
|
|
||||||
});
|
|
||||||
return { gateway };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
@ -1,11 +1,11 @@
|
|||||||
import slugify from "@sindresorhus/slugify";
|
import slugify from "@sindresorhus/slugify";
|
||||||
|
import ms from "ms";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||||
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
|
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
|
||||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||||
import { UnauthorizedError } from "@app/lib/errors";
|
import { UnauthorizedError } from "@app/lib/errors";
|
||||||
import { ms } from "@app/lib/ms";
|
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { slugSchema } from "@app/server/lib/schemas";
|
import { slugSchema } from "@app/server/lib/schemas";
|
||||||
|
@ -7,11 +7,8 @@ import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
|||||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||||
import { registerExternalKmsRouter } from "./external-kms-router";
|
import { registerExternalKmsRouter } from "./external-kms-router";
|
||||||
import { registerGatewayRouter } from "./gateway-router";
|
|
||||||
import { registerGroupRouter } from "./group-router";
|
import { registerGroupRouter } from "./group-router";
|
||||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||||
import { registerKmipRouter } from "./kmip-router";
|
|
||||||
import { registerKmipSpecRouter } from "./kmip-spec-router";
|
|
||||||
import { registerLdapRouter } from "./ldap-router";
|
import { registerLdapRouter } from "./ldap-router";
|
||||||
import { registerLicenseRouter } from "./license-router";
|
import { registerLicenseRouter } from "./license-router";
|
||||||
import { registerOidcRouter } from "./oidc-router";
|
import { registerOidcRouter } from "./oidc-router";
|
||||||
@ -25,13 +22,9 @@ import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-rou
|
|||||||
import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router";
|
import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router";
|
||||||
import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router";
|
import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router";
|
||||||
import { registerSecretRotationRouter } from "./secret-rotation-router";
|
import { registerSecretRotationRouter } from "./secret-rotation-router";
|
||||||
import { registerSecretRouter } from "./secret-router";
|
|
||||||
import { registerSecretScanningRouter } from "./secret-scanning-router";
|
import { registerSecretScanningRouter } from "./secret-scanning-router";
|
||||||
import { registerSecretVersionRouter } from "./secret-version-router";
|
import { registerSecretVersionRouter } from "./secret-version-router";
|
||||||
import { registerSnapshotRouter } from "./snapshot-router";
|
import { registerSnapshotRouter } from "./snapshot-router";
|
||||||
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
|
|
||||||
import { registerSshCertRouter } from "./ssh-certificate-router";
|
|
||||||
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
|
|
||||||
import { registerTrustedIpRouter } from "./trusted-ip-router";
|
import { registerTrustedIpRouter } from "./trusted-ip-router";
|
||||||
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
|
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
|
||||||
|
|
||||||
@ -68,8 +61,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
|||||||
{ prefix: "/dynamic-secrets" }
|
{ prefix: "/dynamic-secrets" }
|
||||||
);
|
);
|
||||||
|
|
||||||
await server.register(registerGatewayRouter, { prefix: "/gateways" });
|
|
||||||
|
|
||||||
await server.register(
|
await server.register(
|
||||||
async (pkiRouter) => {
|
async (pkiRouter) => {
|
||||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
|
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
|
||||||
@ -77,15 +68,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
|||||||
{ prefix: "/pki" }
|
{ prefix: "/pki" }
|
||||||
);
|
);
|
||||||
|
|
||||||
await server.register(
|
|
||||||
async (sshRouter) => {
|
|
||||||
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
|
|
||||||
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
|
|
||||||
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
|
|
||||||
},
|
|
||||||
{ prefix: "/ssh" }
|
|
||||||
);
|
|
||||||
|
|
||||||
await server.register(
|
await server.register(
|
||||||
async (ssoRouter) => {
|
async (ssoRouter) => {
|
||||||
await ssoRouter.register(registerSamlRouter);
|
await ssoRouter.register(registerSamlRouter);
|
||||||
@ -98,7 +80,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
|||||||
await server.register(registerLdapRouter, { prefix: "/ldap" });
|
await server.register(registerLdapRouter, { prefix: "/ldap" });
|
||||||
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
|
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
|
||||||
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
|
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
|
||||||
await server.register(registerSecretRouter, { prefix: "/secrets" });
|
|
||||||
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
|
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
|
||||||
await server.register(registerGroupRouter, { prefix: "/groups" });
|
await server.register(registerGroupRouter, { prefix: "/groups" });
|
||||||
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
|
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
|
||||||
@ -115,12 +96,4 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
|
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
|
||||||
|
|
||||||
await server.register(
|
|
||||||
async (kmipRouter) => {
|
|
||||||
await kmipRouter.register(registerKmipRouter);
|
|
||||||
await kmipRouter.register(registerKmipSpecRouter, { prefix: "/spec" });
|
|
||||||
},
|
|
||||||
{ prefix: "/kmip" }
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user