mirror of
https://github.com/Infisical/infisical.git
synced 2025-06-29 04:31:59 +00:00
Compare commits
6 Commits
feat/add-m
...
secret-syn
Author | SHA1 | Date | |
---|---|---|---|
41484239c6 | |||
06c1471f3f | |||
2a987c61eb | |||
e26a67d545 | |||
487a679aa9 | |||
b57bdd869c |
25
.env.example
25
.env.example
@ -26,8 +26,7 @@ SITE_URL=http://localhost:8080
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_FROM_ADDRESS=
|
||||
SMTP_FROM_NAME=
|
||||
SMTP_NAME=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
@ -92,31 +91,17 @@ ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
|
||||
# App Connections
|
||||
|
||||
# aws assume-role connection
|
||||
# aws assume-role
|
||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# github oauth connection
|
||||
# github oauth
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github app connection
|
||||
#github app
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
3
.envrc
3
.envrc
@ -1,3 +0,0 @@
|
||||
# Learn more at https://direnv.net
|
||||
# We instruct direnv to use our Nix flake for a consistent development environment.
|
||||
use flake
|
@ -32,23 +32,10 @@ jobs:
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
@ -56,48 +43,35 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.21.5"
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/oasdiff/oasdiff@latest
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
|
212
.github/workflows/deployment-pipeline.yml
vendored
Normal file
212
.github/workflows/deployment-pipeline.yml
vendored
Normal file
@ -0,0 +1,212 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Integration tests
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-us:
|
||||
name: US production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-eu:
|
||||
name: EU production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [production-us]
|
||||
environment:
|
||||
name: production-eu
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
@ -1,4 +1,4 @@
|
||||
name: Release Infisical Core Helm chart
|
||||
name: Release Helm Charts
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
@ -17,6 +17,6 @@ jobs:
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
||||
run: cd helm-charts && sh upload-to-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
27
.github/workflows/release-k8-operator-helm.yml
vendored
27
.github/workflows/release-k8-operator-helm.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
252
.github/workflows/release_build_infisical_cli.yml
vendored
252
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,147 +1,131 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
|
||||
with:
|
||||
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
- name: Install deb-s3
|
||||
run: gem install deb-s3
|
||||
- name: Configure GPG Key
|
||||
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
|
||||
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
130
.github/workflows/release_docker_k8_operator.yaml
vendored
130
.github/workflows/release_docker_k8_operator.yaml
vendored
@ -1,107 +1,37 @@
|
||||
name: Release K8 Operator Docker Image
|
||||
name: Release Docker image for K8 operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
jobs:
|
||||
release-image:
|
||||
name: Generate Helm Chart PR
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# Install binaries for helm generation
|
||||
- name: Install dependencies
|
||||
working-directory: k8-operator
|
||||
run: |
|
||||
make helmify
|
||||
make kustomize
|
||||
make controller-gen
|
||||
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
echo "Current git status:"
|
||||
git status
|
||||
echo ""
|
||||
echo "Modified files:"
|
||||
git diff --name-only
|
||||
|
||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
||||
if [ -z "$(git diff --name-only)" ]; then
|
||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Helm Chart PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
||||
delete-branch: true
|
||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
body: |
|
||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
||||
|
||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
||||
base: main
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
8
.github/workflows/run-backend-tests.yml
vendored
8
.github/workflows/run-backend-tests.yml
vendored
@ -34,10 +34,7 @@ jobs:
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Run unit test
|
||||
run: npm run test:unit
|
||||
working-directory: backend
|
||||
- name: Run integration test
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
@ -47,5 +44,4 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
|
||||
docker compose -f "docker-compose.dev.yml" down
|
@ -162,24 +162,6 @@ scoop:
|
||||
description: "The official Infisical CLI"
|
||||
license: MIT
|
||||
|
||||
winget:
|
||||
- name: infisical
|
||||
publisher: infisical
|
||||
license: MIT
|
||||
homepage: https://infisical.com
|
||||
short_description: "The official Infisical CLI"
|
||||
repository:
|
||||
owner: infisical
|
||||
name: winget-pkgs
|
||||
branch: "infisical-{{.Version}}"
|
||||
pull_request:
|
||||
enabled: true
|
||||
draft: false
|
||||
base:
|
||||
owner: microsoft
|
||||
name: winget-pkgs
|
||||
branch: master
|
||||
|
||||
aurs:
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
|
@ -7,10 +7,3 @@ docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
||||
docs/cli/commands/bootstrap.mdx:jwt:86
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
|
||||
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
|
||||
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
|
||||
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
|
||||
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
|
||||
|
@ -161,9 +161,6 @@ COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
|
@ -3,10 +3,13 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
@ -42,8 +45,8 @@ RUN npm run build
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
@ -53,23 +56,21 @@ USER non-root-user
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apt-get update && apt-get install -y \
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
freetds-dev
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -85,19 +86,18 @@ FROM base AS backend-runner
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apt-get update && apt-get install -y \
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -109,36 +109,34 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
openssh
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
@ -156,11 +154,11 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
@ -168,7 +166,6 @@ ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
3
Makefile
3
Makefile
@ -30,6 +30,3 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
up-dev-sso:
|
||||
docker compose -f docker-compose.dev.yml --profile sso up --build
|
||||
|
11
README.md
11
README.md
@ -56,7 +56,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
||||
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
|
||||
|
||||
### Infisical (Internal) PKI:
|
||||
### Internal PKI:
|
||||
|
||||
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
|
||||
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
|
||||
@ -64,17 +64,12 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
||||
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
|
||||
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
|
||||
|
||||
### Infisical Key Management System (KMS):
|
||||
### Key Management (KMS):
|
||||
|
||||
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
||||
|
||||
### Infisical SSH
|
||||
|
||||
- **[Signed SSH Certificates](https://infisical.com/docs/documentation/platform/ssh)**: Issue ephemeral SSH credentials for secure, short-lived, and centralized access to infrastructure.
|
||||
|
||||
### General Platform:
|
||||
|
||||
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
|
||||
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
|
||||
@ -125,7 +120,7 @@ Install pre commit hook to scan each commit before you push to your repository
|
||||
infisical scan install --pre-commit-hook
|
||||
```
|
||||
|
||||
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
|
||||
|
||||
## Open-source vs. paid
|
||||
|
||||
|
@ -1,22 +1,23 @@
|
||||
# Build stage
|
||||
FROM node:20-slim AS build
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -25,36 +26,36 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
@ -45,18 +45,16 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -1,85 +0,0 @@
|
||||
FROM node:20-slim
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config \
|
||||
perl \
|
||||
wget
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
WORKDIR /openssl-build
|
||||
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
&& tar -xf openssl-3.1.2.tar.gz \
|
||||
&& cd openssl-3.1.2 \
|
||||
&& ./Configure enable-fips \
|
||||
&& make \
|
||||
&& make install_fips
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package.json
|
||||
COPY package-lock.json package-lock.json
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
ENV OPENSSL_CONF=/app/nodejs.cnf
|
||||
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
|
||||
ENV NODE_OPTIONS=--force-fips
|
||||
|
||||
CMD ["npm", "run", "dev:docker"]
|
@ -11,7 +11,6 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
schedulePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
|
@ -535,107 +535,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
|
@ -23,14 +23,14 @@ export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = initLogger();
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
@ -42,7 +42,6 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
@ -53,24 +52,14 @@ export default {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({
|
||||
db,
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -84,8 +73,8 @@ export default {
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
|
@ -1,16 +0,0 @@
|
||||
nodejs_conf = nodejs_init
|
||||
|
||||
.include /usr/local/ssl/fipsmodule.cnf
|
||||
|
||||
[nodejs_init]
|
||||
providers = provider_sect
|
||||
|
||||
[provider_sect]
|
||||
default = default_sect
|
||||
fips = fips_sect
|
||||
|
||||
[default_sect]
|
||||
activate = 1
|
||||
|
||||
[algorithm_sect]
|
||||
default_properties = fips=yes
|
2506
backend/package-lock.json
generated
2506
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -40,38 +40,29 @@
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@ -138,7 +129,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@ -147,18 +138,17 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
@ -166,8 +156,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -179,7 +169,6 @@
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -188,7 +177,6 @@
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
@ -201,7 +189,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
28
backend/src/@types/fastify.d.ts
vendored
28
backend/src/@types/fastify.d.ts
vendored
@ -13,13 +13,9 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
|
||||
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
|
||||
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
@ -33,7 +29,6 @@ import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
@ -98,19 +93,6 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
identityAuthInfo?: {
|
||||
identityId: string;
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
@ -138,11 +120,6 @@ declare module "fastify" {
|
||||
isUserCompleted: string;
|
||||
providerAuthToken: string;
|
||||
};
|
||||
kmipUser: {
|
||||
projectId: string;
|
||||
clientId: string;
|
||||
name: string;
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
@ -235,16 +212,11 @@ declare module "fastify" {
|
||||
totp: TTotpServiceFactory;
|
||||
appConnection: TAppConnectionServiceFactory;
|
||||
secretSync: TSecretSyncServiceFactory;
|
||||
kmip: TKmipServiceFactory;
|
||||
kmipOperation: TKmipOperationServiceFactory;
|
||||
gateway: TGatewayServiceFactory;
|
||||
secretRotationV2: TSecretRotationV2ServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
store: {
|
||||
user: Pick<TUserDALFactory, "findById">;
|
||||
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
108
backend/src/@types/knex.d.ts
vendored
108
backend/src/@types/knex.d.ts
vendored
@ -17,9 +17,6 @@ import {
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAppConnections,
|
||||
TAppConnectionsInsert,
|
||||
TAppConnectionsUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
@ -68,15 +65,9 @@ import {
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
@ -152,18 +143,6 @@ import {
|
||||
TInternalKms,
|
||||
TInternalKmsInsert,
|
||||
TInternalKmsUpdate,
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate,
|
||||
TKmipClients,
|
||||
TKmipClientsInsert,
|
||||
TKmipClientsUpdate,
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate,
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
@ -188,9 +167,6 @@ import {
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
@ -212,9 +188,6 @@ import {
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
@ -245,9 +218,6 @@ import {
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
@ -305,12 +275,6 @@ import {
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate,
|
||||
TSecretRotationsV2,
|
||||
TSecretRotationsV2Insert,
|
||||
TSecretRotationsV2Update,
|
||||
TSecretRotationV2SecretMappings,
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
@ -332,27 +296,15 @@ import {
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate,
|
||||
TSecretsUpdate,
|
||||
TSecretsV2,
|
||||
TSecretsV2Insert,
|
||||
TSecretsV2Update,
|
||||
TSecretSyncs,
|
||||
TSecretSyncsInsert,
|
||||
TSecretSyncsUpdate,
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate,
|
||||
TSecretTags,
|
||||
TSecretTagsInsert,
|
||||
TSecretTagsUpdate,
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate,
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate,
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update,
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate,
|
||||
@ -411,6 +363,24 @@ import {
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
} from "@app/db/schemas/secret-v2-tag-junction";
|
||||
import {
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
} from "@app/db/schemas/secret-versions-v2";
|
||||
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
@ -918,53 +888,11 @@ declare module "knex/types/tables" {
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate
|
||||
>;
|
||||
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate
|
||||
>;
|
||||
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
|
||||
TAppConnections,
|
||||
TAppConnectionsInsert,
|
||||
TAppConnectionsUpdate
|
||||
>;
|
||||
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
|
||||
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
|
||||
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate
|
||||
>;
|
||||
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate
|
||||
>;
|
||||
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate
|
||||
>;
|
||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate
|
||||
>;
|
||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate
|
||||
>;
|
||||
[TableName.SecretRotationV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationsV2,
|
||||
TSecretRotationsV2Insert,
|
||||
TSecretRotationsV2Update
|
||||
>;
|
||||
[TableName.SecretRotationV2SecretMapping]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationV2SecretMappings,
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -1,105 +0,0 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { PgSqlLock } from "./keystore/keystore";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
type TArgs = {
|
||||
auditLogDb?: Knex;
|
||||
applicationDb: Knex;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
const migrationConfig = {
|
||||
directory: path.join(__dirname, "./db/migrations"),
|
||||
loadExtensions: [".mjs", ".ts"],
|
||||
tableName: "infisical_migrations"
|
||||
};
|
||||
|
||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
||||
// happens for first time in which the migration table itself is not created yet
|
||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
||||
if (err?.message?.includes("does not exist")) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
};
|
||||
|
||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
||||
try {
|
||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
||||
if (isProduction) {
|
||||
const migrationTable = migrationConfig.tableName;
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldRunMigration = Boolean(
|
||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
||||
); // db.length - code.length
|
||||
if (!shouldRunMigration) {
|
||||
logger.info("No migrations pending: Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (auditLogDb) {
|
||||
await auditLogDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running audit log migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await auditLogDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished audit log migrations.");
|
||||
});
|
||||
}
|
||||
|
||||
await applicationDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running application migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await applicationDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished application migrations.");
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Boot up migration failed");
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
@ -49,9 +49,6 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
@ -67,9 +64,6 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -104,9 +98,6 @@ export const initAuditLogDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -38,8 +38,7 @@ export default {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@ -63,8 +62,7 @@ export default {
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
|
@ -1,40 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.ResourceMetadata))) {
|
||||
await knex.schema.createTable(TableName.ResourceMetadata, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("key").notNullable();
|
||||
tb.string("value", 1020).notNullable();
|
||||
tb.uuid("orgId").notNullable();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
tb.uuid("userId");
|
||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
tb.uuid("identityId");
|
||||
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
tb.uuid("secretId");
|
||||
tb.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
||||
if (!hasSecretMetadataField) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
||||
t.jsonb("secretMetadata");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.ResourceMetadata);
|
||||
|
||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
||||
if (hasSecretMetadataField) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
||||
t.dropColumn("secretMetadata");
|
||||
});
|
||||
}
|
||||
}
|
@ -10,34 +10,30 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.string("name", 32).notNullable();
|
||||
t.string("description");
|
||||
t.string("destination").notNullable();
|
||||
t.boolean("isAutoSyncEnabled").notNullable().defaultTo(true);
|
||||
t.boolean("isEnabled").notNullable().defaultTo(true);
|
||||
t.integer("version").defaultTo(1).notNullable();
|
||||
t.jsonb("destinationConfig").notNullable();
|
||||
t.jsonb("syncOptions").notNullable();
|
||||
// we're including projectId in addition to folder ID because we allow folderId to be null (if the folder
|
||||
// is deleted), to preserve sync configuration
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("folderId");
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("SET NULL");
|
||||
t.uuid("folderId").notNullable();
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
|
||||
t.uuid("connectionId").notNullable();
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.timestamps(true, true, true);
|
||||
// sync secrets to destination
|
||||
// sync
|
||||
t.string("syncStatus");
|
||||
t.string("lastSyncJobId");
|
||||
t.string("lastSyncMessage");
|
||||
t.datetime("lastSyncedAt");
|
||||
// import secrets from destination
|
||||
// import
|
||||
t.string("importStatus");
|
||||
t.string("lastImportJobId");
|
||||
t.string("lastImportMessage");
|
||||
t.datetime("lastImportedAt");
|
||||
// remove secrets from destination
|
||||
t.string("removeStatus");
|
||||
t.string("lastRemoveJobId");
|
||||
t.string("lastRemoveMessage");
|
||||
t.datetime("lastRemovedAt");
|
||||
// erase
|
||||
t.string("eraseStatus");
|
||||
t.string("lastEraseJobId");
|
||||
t.string("lastEraseMessage");
|
||||
t.datetime("lastErasedAt");
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretSync);
|
@ -1,49 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// find any duplicate group names within organizations
|
||||
const duplicates = await knex(TableName.Groups)
|
||||
.select("orgId", "name")
|
||||
.count("* as count")
|
||||
.groupBy("orgId", "name")
|
||||
.having(knex.raw("count(*) > 1"));
|
||||
|
||||
// for each set of duplicates, update all but one with a numbered suffix
|
||||
for await (const duplicate of duplicates) {
|
||||
const groups = await knex(TableName.Groups)
|
||||
.select("id", "name")
|
||||
.where({
|
||||
orgId: duplicate.orgId,
|
||||
name: duplicate.name
|
||||
})
|
||||
.orderBy("createdAt", "asc"); // keep original name for oldest group
|
||||
|
||||
// skip the first (oldest) group, rename others with numbered suffix
|
||||
for (let i = 1; i < groups.length; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Groups)
|
||||
.where("id", groups[i].id)
|
||||
.update({
|
||||
name: `${groups[i].name} (${i})`,
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore TS doesn't know about Knex's timestamp types
|
||||
updatedAt: new Date()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// add the unique constraint
|
||||
await knex.schema.alterTable(TableName.Groups, (t) => {
|
||||
t.unique(["orgId", "name"]);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// Remove the unique constraint
|
||||
await knex.schema.alterTable(TableName.Groups, (t) => {
|
||||
t.dropUnique(["orgId", "name"]);
|
||||
});
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
|
||||
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
|
||||
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (!hasEnforceCapitalizationCol) {
|
||||
t.boolean("enforceCapitalization").defaultTo(false).notNullable();
|
||||
}
|
||||
|
||||
if (hasAutoCapitalizationCol) {
|
||||
t.boolean("autoCapitalization").defaultTo(false).alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
|
||||
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
|
||||
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (hasEnforceCapitalizationCol) {
|
||||
t.dropColumn("enforceCapitalization");
|
||||
}
|
||||
|
||||
if (hasAutoCapitalizationCol) {
|
||||
t.boolean("autoCapitalization").defaultTo(true).alter();
|
||||
}
|
||||
});
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
||||
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
||||
if (!hasManageGroupMembershipsCol) {
|
||||
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
|
||||
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasManageGroupMembershipsCol) {
|
||||
t.dropColumn("manageGroupMemberships");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,108 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
|
||||
if (!hasKmipClientTable) {
|
||||
await knex.schema.createTable(TableName.KmipClient, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.specificType("permissions", "text[]");
|
||||
t.string("description");
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
|
||||
if (!hasKmipOrgPkiConfig) {
|
||||
await knex.schema.createTable(TableName.KmipOrgConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.unique("orgId");
|
||||
|
||||
t.string("caKeyAlgorithm").notNullable();
|
||||
|
||||
t.datetime("rootCaIssuedAt").notNullable();
|
||||
t.datetime("rootCaExpiration").notNullable();
|
||||
t.string("rootCaSerialNumber").notNullable();
|
||||
t.binary("encryptedRootCaCertificate").notNullable();
|
||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("serverIntermediateCaIssuedAt").notNullable();
|
||||
t.datetime("serverIntermediateCaExpiration").notNullable();
|
||||
t.string("serverIntermediateCaSerialNumber");
|
||||
t.binary("encryptedServerIntermediateCaCertificate").notNullable();
|
||||
t.binary("encryptedServerIntermediateCaChain").notNullable();
|
||||
t.binary("encryptedServerIntermediateCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("clientIntermediateCaIssuedAt").notNullable();
|
||||
t.datetime("clientIntermediateCaExpiration").notNullable();
|
||||
t.string("clientIntermediateCaSerialNumber").notNullable();
|
||||
t.binary("encryptedClientIntermediateCaCertificate").notNullable();
|
||||
t.binary("encryptedClientIntermediateCaChain").notNullable();
|
||||
t.binary("encryptedClientIntermediateCaPrivateKey").notNullable();
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmipOrgConfig);
|
||||
}
|
||||
|
||||
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
|
||||
if (!hasKmipOrgServerCertTable) {
|
||||
await knex.schema.createTable(TableName.KmipOrgServerCertificates, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.string("commonName").notNullable();
|
||||
t.string("altNames").notNullable();
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
t.binary("encryptedChain").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
|
||||
if (!hasKmipClientCertTable) {
|
||||
await knex.schema.createTable(TableName.KmipClientCertificates, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("kmipClientId").notNullable();
|
||||
t.foreign("kmipClientId").references("id").inTable(TableName.KmipClient).onDelete("CASCADE");
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
|
||||
if (hasKmipOrgPkiConfig) {
|
||||
await knex.schema.dropTable(TableName.KmipOrgConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmipOrgConfig);
|
||||
}
|
||||
|
||||
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
|
||||
if (hasKmipOrgServerCertTable) {
|
||||
await knex.schema.dropTable(TableName.KmipOrgServerCertificates);
|
||||
}
|
||||
|
||||
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
|
||||
if (hasKmipClientCertTable) {
|
||||
await knex.schema.dropTable(TableName.KmipClientCertificates);
|
||||
}
|
||||
|
||||
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
|
||||
if (hasKmipClientTable) {
|
||||
await knex.schema.dropTable(TableName.KmipClient);
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.unique(["orgId", "name"]);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
t.unique(["projectId", "name"]);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.dropUnique(["orgId", "name"]);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
t.dropUnique(["projectId", "name"]);
|
||||
});
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
||||
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
||||
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityGcpAuth,
|
||||
"allowedServiceAccounts"
|
||||
);
|
||||
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
||||
if (hasTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
|
||||
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
|
||||
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
|
||||
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
|
||||
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityGcpAuth,
|
||||
"allowedServiceAccounts"
|
||||
);
|
||||
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
|
||||
if (hasTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
|
||||
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
|
||||
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
|
||||
if (hasAllowedZones) t.string("allowedZones").alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (hasSlugCol) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||
t.dropColumn("slug");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (!hasSlugCol) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||
t.string("slug", 32);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
|
||||
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
|
||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
|
||||
if (hasLastImportMessage) t.string("lastImportMessage").alter();
|
||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -1,130 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedKey) t.binary("encryptedPassKey");
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl");
|
||||
if (hasUrl) t.string("url").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const webhooks = await knex(TableName.Webhook)
|
||||
.where({})
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
||||
.select(
|
||||
"url",
|
||||
"encryptedSecretKey",
|
||||
"iv",
|
||||
"tag",
|
||||
"keyEncoding",
|
||||
"urlCipherText",
|
||||
"urlIV",
|
||||
"urlTag",
|
||||
knex.ref("id").withSchema(TableName.Webhook),
|
||||
"envId"
|
||||
)
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedWebhooks = await Promise.all(
|
||||
webhooks.map(async (el) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: el.projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||
}
|
||||
|
||||
let encryptedSecretKey = null;
|
||||
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
||||
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
ciphertext: el.encryptedSecretKey
|
||||
});
|
||||
encryptedSecretKey = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decyptedSecretKey, "utf8")
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
|
||||
const decryptedUrl =
|
||||
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.urlIV,
|
||||
tag: el.urlTag,
|
||||
ciphertext: el.urlCipherText
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedUrl = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedUrl || el.url || "")
|
||||
}).cipherTextBlob;
|
||||
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Webhook)
|
||||
.insert(
|
||||
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
|
||||
id: el.id,
|
||||
envId: el.envId,
|
||||
url: "",
|
||||
encryptedUrl: el.encryptedUrl,
|
||||
encryptedPassKey: el.encryptedSecretKey
|
||||
}))
|
||||
)
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
|
||||
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,111 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
||||
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
||||
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
|
||||
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
|
||||
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
|
||||
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedDynamicSecrets = await Promise.all(
|
||||
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedInputData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.inputIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.inputTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.inputCiphertext
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedInput = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedInputData)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return { ...el, encryptedInput };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.DynamicSecret)
|
||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,103 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const secretRotations = await knex(TableName.SecretRotation)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
|
||||
.select(selectAllTableCols(TableName.SecretRotation))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedRotationData = await Promise.all(
|
||||
secretRotations.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedRotationData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.encryptedDataIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.encryptedDataTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedData
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedRotationData = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedRotationData)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedRotationData };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretRotation)
|
||||
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,200 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
|
||||
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedTokenReviewerJwt"
|
||||
);
|
||||
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtIV"
|
||||
);
|
||||
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtTag"
|
||||
);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
|
||||
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
|
||||
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityKubernetesConfigs = [];
|
||||
|
||||
for await (const {
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyKeyEncoding,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyIV,
|
||||
orgId,
|
||||
...el
|
||||
} of identityKubernetesConfigs) {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedTokenReviewerJwt =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.tokenReviewerJwtIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.tokenReviewerJwtTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedTokenReviewerJwt
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedTokenReviewerJwt)
|
||||
}).cipherTextBlob;
|
||||
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
updatedIdentityKubernetesConfigs.push({
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedKubernetesCaCertificate,
|
||||
encryptedKubernetesTokenReviewerJwt
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityKubernetesAuth)
|
||||
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt)
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityK8sAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityK8sColumns = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityK8sColumns(knex);
|
||||
}
|
@ -1,141 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
|
||||
|
||||
if (hasidentityOidcAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityOidcAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityOidcConfigs = await Promise.all(
|
||||
identityOidcConfig.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return {
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedCaCertificate
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityOidcAuth)
|
||||
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityOidcAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityOidcColumns = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
if (hasidentityOidcTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityOidcColumns(knex);
|
||||
}
|
@ -1,493 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptSamlConfig = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const samlConfigs = await knex(TableName.SamlConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.SamlConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedSamlConfigs = await Promise.all(
|
||||
samlConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedEntryPoint =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.entryPointIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.entryPointTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedEntryPoint
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedIssuer =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedIssuer && el.issuerIV && el.issuerTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.issuerIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.issuerTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedIssuer
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCert && el.certIV && el.certTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.certIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.certTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedSamlIssuer = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedIssuer)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlEntryPoint = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedEntryPoint)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SamlConfig)
|
||||
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptLdapConfig = async (knex: Knex) => {
|
||||
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
|
||||
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
|
||||
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
|
||||
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
|
||||
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
|
||||
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
|
||||
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
|
||||
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
|
||||
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
|
||||
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
|
||||
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
|
||||
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const ldapConfigs = await knex(TableName.LdapConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.LdapConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedLdapConfigs = await Promise.all(
|
||||
ldapConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedBindDN =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindDNIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindDNTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindDN
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedBindPass =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindPassIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindPassTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindPass
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCACert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCACert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedLdapBindDN = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindDN)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapBindPass = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindPass)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.LdapConfig)
|
||||
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptOidcConfig = async (knex: Knex) => {
|
||||
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
|
||||
TableName.OidcConfig,
|
||||
"encryptedOidcClientSecret"
|
||||
);
|
||||
|
||||
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
|
||||
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
|
||||
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
|
||||
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
|
||||
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
|
||||
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
|
||||
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
|
||||
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
|
||||
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
|
||||
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
|
||||
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
|
||||
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const oidcConfigs = await knex(TableName.OidcConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.OidcConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedOidcConfigs = await Promise.all(
|
||||
oidcConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedClientId =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientId && el.clientIdIV && el.clientIdTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientIdIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientIdTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientId
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedClientSecret =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientSecretIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientSecretTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientSecret
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedOidcClientId = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientId)
|
||||
}).cipherTextBlob;
|
||||
const encryptedOidcClientSecret = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientSecret)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.OidcConfig)
|
||||
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptSamlConfig(knex);
|
||||
await reencryptLdapConfig(knex);
|
||||
await reencryptOidcConfig(knex);
|
||||
}
|
||||
|
||||
const dropSamlConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
|
||||
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropLdapConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
|
||||
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropOidcConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
|
||||
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropSamlConfigColumns(knex);
|
||||
await dropLdapConfigColumns(knex);
|
||||
await dropOidcConfigColumns(knex);
|
||||
}
|
@ -1,115 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
|
||||
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("rootCaKeyAlgorithm").notNullable();
|
||||
|
||||
t.datetime("rootCaIssuedAt").notNullable();
|
||||
t.datetime("rootCaExpiration").notNullable();
|
||||
t.string("rootCaSerialNumber").notNullable();
|
||||
t.binary("encryptedRootCaCertificate").notNullable();
|
||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("clientCaIssuedAt").notNullable();
|
||||
t.datetime("clientCaExpiration").notNullable();
|
||||
t.string("clientCaSerialNumber");
|
||||
t.binary("encryptedClientCaCertificate").notNullable();
|
||||
t.binary("encryptedClientCaPrivateKey").notNullable();
|
||||
|
||||
t.string("clientCertSerialNumber").notNullable();
|
||||
t.string("clientCertKeyAlgorithm").notNullable();
|
||||
t.datetime("clientCertIssuedAt").notNullable();
|
||||
t.datetime("clientCertExpiration").notNullable();
|
||||
t.binary("encryptedClientCertificate").notNullable();
|
||||
t.binary("encryptedClientPrivateKey").notNullable();
|
||||
|
||||
t.datetime("gatewayCaIssuedAt").notNullable();
|
||||
t.datetime("gatewayCaExpiration").notNullable();
|
||||
t.string("gatewayCaSerialNumber").notNullable();
|
||||
t.binary("encryptedGatewayCaCertificate").notNullable();
|
||||
t.binary("encryptedGatewayCaPrivateKey").notNullable();
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.unique("orgId");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.Gateway))) {
|
||||
await knex.schema.createTable(TableName.Gateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("name").notNullable();
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
t.datetime("heartbeat");
|
||||
|
||||
t.binary("relayAddress").notNullable();
|
||||
|
||||
t.uuid("orgGatewayRootCaId").notNullable();
|
||||
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
|
||||
|
||||
t.uuid("identityId").notNullable();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.Gateway);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
|
||||
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
|
||||
t.uuid("gatewayId").notNullable();
|
||||
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
// not setting a foreign constraint so that cascade effects are not triggered
|
||||
if (!doesGatewayColExist) {
|
||||
t.uuid("projectGatewayId");
|
||||
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Gateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.Gateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (!hasSharingTypeColumn) {
|
||||
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (hasSharingTypeColumn) {
|
||||
table.dropColumn("type");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasAuthConsentContentCol) {
|
||||
t.text("authConsentContent");
|
||||
}
|
||||
if (!hasPageFrameContentCol) {
|
||||
t.text("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasAuthConsentContentCol) {
|
||||
t.dropColumn("authConsentContent");
|
||||
}
|
||||
if (hasPageFrameContentCol) {
|
||||
t.dropColumn("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote").alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.string("comment");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.dropColumn("comment");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (!hasSecretVersionV2UserActorId) {
|
||||
t.uuid("userActorId");
|
||||
t.foreign("userActorId").references("id").inTable(TableName.Users);
|
||||
}
|
||||
if (!hasSecretVersionV2IdentityActorId) {
|
||||
t.uuid("identityActorId");
|
||||
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
|
||||
}
|
||||
if (!hasSecretVersionV2ActorType) {
|
||||
t.string("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (hasSecretVersionV2UserActorId) {
|
||||
t.dropColumn("userActorId");
|
||||
}
|
||||
if (hasSecretVersionV2IdentityActorId) {
|
||||
t.dropColumn("identityActorId");
|
||||
}
|
||||
if (hasSecretVersionV2ActorType) {
|
||||
t.dropColumn("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
|
||||
if (!hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
if (hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("allowSecretSharingOutsideOrganization");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem");
|
||||
t.string("privilegeUpgradeInitiatedByUsername");
|
||||
t.dateTime("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
|
||||
await knex(TableName.Organization).update({
|
||||
shouldUseNewPrivilegeSystem: false
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("shouldUseNewPrivilegeSystem");
|
||||
t.dropColumn("privilegeUpgradeInitiatedByUsername");
|
||||
t.dropColumn("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (!hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.jsonb("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.dropColumn("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.specificType("adminIdentityIds", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("adminIdentityIds");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.index(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropIndex(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasReviewerJwtCol = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
if (hasReviewerJwtCol) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// we can't make it back to non nullable, it will fail
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials"))) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.boolean("isPlatformManagedCredentials").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials")) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.dropColumn("isPlatformManagedCredentials");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,58 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretRotationV2))) {
|
||||
await knex.schema.createTable(TableName.SecretRotationV2, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name", 32).notNullable();
|
||||
t.string("description");
|
||||
t.string("type").notNullable();
|
||||
t.jsonb("parameters").notNullable();
|
||||
t.jsonb("secretsMapping").notNullable();
|
||||
t.binary("encryptedGeneratedCredentials").notNullable();
|
||||
t.boolean("isAutoRotationEnabled").notNullable().defaultTo(true);
|
||||
t.integer("activeIndex").notNullable().defaultTo(0);
|
||||
t.uuid("folderId").notNullable();
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
|
||||
t.uuid("connectionId").notNullable();
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.timestamps(true, true, true);
|
||||
t.integer("rotationInterval").notNullable();
|
||||
t.jsonb("rotateAtUtc").notNullable(); // { hours: number; minutes: number }
|
||||
t.string("rotationStatus").notNullable();
|
||||
t.datetime("lastRotationAttemptedAt").notNullable();
|
||||
t.datetime("lastRotatedAt").notNullable();
|
||||
t.binary("encryptedLastRotationMessage"); // we encrypt this because it may contain sensitive info (SQL errors showing credentials)
|
||||
t.string("lastRotationJobId");
|
||||
t.datetime("nextRotationAt");
|
||||
t.boolean("isLastRotationManual").notNullable().defaultTo(true); // creation is considered a "manual" rotation
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretRotationV2);
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretRotationV2, (t) => {
|
||||
t.unique(["folderId", "name"]);
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretRotationV2SecretMapping))) {
|
||||
await knex.schema.createTable(TableName.SecretRotationV2SecretMapping, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("secretId").notNullable();
|
||||
// scott: this is deferred to block secret deletion but not prevent folder/environment/project deletion
|
||||
// ie, if rotation is being deleted as well we permit it, otherwise throw
|
||||
t.foreign("secretId").references("id").inTable(TableName.SecretV2).deferrable("deferred");
|
||||
t.uuid("rotationId").notNullable();
|
||||
t.foreign("rotationId").references("id").inTable(TableName.SecretRotationV2).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretRotationV2SecretMapping);
|
||||
await knex.schema.dropTableIfExists(TableName.SecretRotationV2);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretRotationV2);
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
|
||||
if (!hasCol) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.datetime("lastSecretModified");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
|
||||
if (hasCol) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("lastSecretModified");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.ResourceMetadata, "dynamicSecretId"))) {
|
||||
await knex.schema.alterTable(TableName.ResourceMetadata, (tb) => {
|
||||
tb.uuid("dynamicSecretId");
|
||||
tb.foreign("dynamicSecretId").references("id").inTable(TableName.DynamicSecret).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.ResourceMetadata, "dynamicSecretId")) {
|
||||
await knex.schema.alterTable(TableName.ResourceMetadata, (tb) => {
|
||||
tb.dropColumn("dynamicSecretId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { zpStr } from "@app/lib/zod";
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
||||
),
|
||||
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
||||
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
||||
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
// TODO(akhilmhdh): will be changed to one
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||
)
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
|
||||
}));
|
||||
|
||||
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
||||
|
||||
export const getMigrationEnvConfig = () => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Invalid environment variables. Check the error below");
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
|
||||
);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
return Object.freeze(parsedEnv.data);
|
||||
};
|
105
backend/src/db/migrations/utils/kms.ts
Normal file
105
backend/src/db/migrations/utils/kms.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
const getInstanceRootKey = async (knex: Knex) => {
|
||||
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
|
||||
// if root key its base64 encoded
|
||||
const isBase64 = !process.env.ENCRYPTION_KEY;
|
||||
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
if (kmsRootConfig) {
|
||||
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||
// set the flag so that other instancen nodes can start
|
||||
return decryptedRootKey;
|
||||
}
|
||||
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
||||
await knex(TableName.KmsServerRootConfig).insert({
|
||||
encryptedRootKey,
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
|
||||
id: KMS_ROOT_CONFIG_UUID
|
||||
});
|
||||
return encryptedRootKey;
|
||||
};
|
||||
|
||||
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
|
||||
const KMS_VERSION = "v01";
|
||||
const KMS_VERSION_BLOB_LENGTH = 3;
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const project = await knex(TableName.Project).where({ id: projectId }).first();
|
||||
if (!project) throw new Error("Missing project id");
|
||||
|
||||
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
|
||||
|
||||
let secretManagerKmsKey;
|
||||
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
|
||||
if (projectSecretManagerKmsId) {
|
||||
const kmsDoc = await knex(TableName.KmsKey)
|
||||
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
|
||||
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
|
||||
.first();
|
||||
if (!kmsDoc) throw new Error("missing kms");
|
||||
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
||||
} else {
|
||||
const [kmsDoc] = await knex(TableName.KmsKey)
|
||||
.insert({
|
||||
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
orgId: project.orgId,
|
||||
isReserved: false
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
secretManagerKmsKey = randomSecureBytes(32);
|
||||
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
|
||||
await knex(TableName.InternalKms).insert({
|
||||
version: 1,
|
||||
encryptedKey: encryptedKeyMaterial,
|
||||
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
|
||||
kmsKeyId: kmsDoc.id
|
||||
});
|
||||
}
|
||||
|
||||
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
|
||||
let dataKey: Buffer;
|
||||
if (!encryptedSecretManagerDataKey) {
|
||||
dataKey = randomSecureBytes();
|
||||
// the below versioning we do it automatically in kms service
|
||||
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
await knex(TableName.Project)
|
||||
.where({ id: projectId })
|
||||
.update({
|
||||
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
|
||||
});
|
||||
} else {
|
||||
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
|
||||
}
|
||||
|
||||
return {
|
||||
encryptor: ({ plainText }: { plainText: Buffer }) => {
|
||||
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
|
||||
|
||||
// Buffer#1 encrypted text + Buffer#2 version number
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
|
||||
return { cipherTextBlob };
|
||||
},
|
||||
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
|
||||
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
|
||||
return decryptedBlob;
|
||||
}
|
||||
};
|
||||
};
|
@ -1,19 +0,0 @@
|
||||
export const createCircularCache = <T>(bufferSize = 10) => {
|
||||
const bufferItems: { id: string; item: T }[] = [];
|
||||
let bufferIndex = 0;
|
||||
|
||||
const push = (id: string, item: T) => {
|
||||
if (bufferItems.length < bufferSize) {
|
||||
bufferItems.push({ id, item });
|
||||
} else {
|
||||
bufferItems[bufferIndex] = { id, item };
|
||||
}
|
||||
bufferIndex = (bufferIndex + 1) % bufferSize;
|
||||
};
|
||||
|
||||
const getItem = (id: string) => {
|
||||
return bufferItems.find((i) => i.id === id)?.item;
|
||||
};
|
||||
|
||||
return { push, getItem };
|
||||
};
|
@ -1,52 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
type TDependencies = {
|
||||
envConfig: TMigrationEnvConfig;
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
};
|
||||
|
||||
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const projectDAL = projectDALFactory(db);
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
await kmsService.startService();
|
||||
|
||||
return { kmsService };
|
||||
};
|
@ -1,56 +0,0 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
const runRename = async () => {
|
||||
if (!isProduction) return;
|
||||
const migrationTable = "infisical_migrations";
|
||||
const applicationDb = initDbConnection({
|
||||
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
|
||||
dbRootCert: process.env.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
? initAuditLogDbConnection({
|
||||
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
})
|
||||
: undefined;
|
||||
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
await applicationDb.destroy();
|
||||
await auditLogDb?.destroy();
|
||||
};
|
||||
|
||||
void runRename();
|
@ -16,8 +16,7 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
deletedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||
|
@ -19,8 +19,7 @@ export const AppConnectionsSchema = z.object({
|
||||
version: z.number().default(1),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isPlatformManagedCredentials: z.boolean().default(false).nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
|
||||
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const DynamicSecretsSchema = z.object({
|
||||
@ -16,18 +14,16 @@ export const DynamicSecretsSchema = z.object({
|
||||
type: z.string(),
|
||||
defaultTTL: z.string(),
|
||||
maxTTL: z.string().nullable().optional(),
|
||||
inputIV: z.string().nullable().optional(),
|
||||
inputCiphertext: z.string().nullable().optional(),
|
||||
inputTag: z.string().nullable().optional(),
|
||||
inputIV: z.string(),
|
||||
inputCiphertext: z.string(),
|
||||
inputTag: z.string(),
|
||||
algorithm: z.string().default("aes-256-gcm"),
|
||||
keyEncoding: z.string().default("utf8"),
|
||||
folderId: z.string().uuid(),
|
||||
status: z.string().nullable().optional(),
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer,
|
||||
projectGatewayId: z.string().uuid().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@ -1,29 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const GatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date(),
|
||||
heartbeat: z.date().nullable().optional(),
|
||||
relayAddress: zodBuffer,
|
||||
orgGatewayRootCaId: z.string().uuid(),
|
||||
identityId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TGateways = z.infer<typeof GatewaysSchema>;
|
||||
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;
|
@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
type: z.string(),
|
||||
allowedServiceAccounts: z.string().nullable().optional(),
|
||||
allowedProjects: z.string().nullable().optional(),
|
||||
allowedZones: z.string().nullable().optional()
|
||||
allowedServiceAccounts: z.string(),
|
||||
allowedProjects: z.string(),
|
||||
allowedZones: z.string()
|
||||
});
|
||||
|
||||
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
|
||||
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityKubernetesAuthsSchema = z.object({
|
||||
@ -19,17 +17,15 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
kubernetesHost: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
encryptedTokenReviewerJwt: z.string().nullable().optional(),
|
||||
tokenReviewerJwtIV: z.string().nullable().optional(),
|
||||
tokenReviewerJwtTag: z.string().nullable().optional(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedTokenReviewerJwt: z.string(),
|
||||
tokenReviewerJwtIV: z.string(),
|
||||
tokenReviewerJwtTag: z.string(),
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
allowedAudience: z.string()
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityOidcAuthsSchema = z.object({
|
||||
@ -17,17 +15,15 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
identityId: z.string().uuid(),
|
||||
oidcDiscoveryUrl: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
boundIssuer: z.string(),
|
||||
boundAudiences: z.string(),
|
||||
boundClaims: z.unknown(),
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional(),
|
||||
claimMetadataMapping: z.unknown().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
@ -3,7 +3,6 @@ export * from "./access-approval-policies-approvers";
|
||||
export * from "./access-approval-requests";
|
||||
export * from "./access-approval-requests-reviewers";
|
||||
export * from "./api-keys";
|
||||
export * from "./app-connections";
|
||||
export * from "./audit-log-streams";
|
||||
export * from "./audit-logs";
|
||||
export * from "./auth-token-sessions";
|
||||
@ -20,9 +19,7 @@ export * from "./certificate-templates";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./external-group-org-role-mappings";
|
||||
export * from "./external-kms";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
export * from "./git-app-org";
|
||||
export * from "./group-project-membership-roles";
|
||||
@ -48,10 +45,6 @@ export * from "./incident-contacts";
|
||||
export * from "./integration-auths";
|
||||
export * from "./integrations";
|
||||
export * from "./internal-kms";
|
||||
export * from "./kmip-client-certificates";
|
||||
export * from "./kmip-clients";
|
||||
export * from "./kmip-org-configs";
|
||||
export * from "./kmip-org-server-certificates";
|
||||
export * from "./kms-key-versions";
|
||||
export * from "./kms-keys";
|
||||
export * from "./kms-root-config";
|
||||
@ -60,7 +53,6 @@ export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./oidc-configs";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-gateway-config";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
export * from "./organizations";
|
||||
@ -69,7 +61,6 @@ export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
export * from "./project-bots";
|
||||
export * from "./project-environments";
|
||||
export * from "./project-gateways";
|
||||
export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
@ -80,7 +71,6 @@ export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
export * from "./rate-limit";
|
||||
export * from "./resource-metadata";
|
||||
export * from "./saml-configs";
|
||||
export * from "./scim-tokens";
|
||||
export * from "./secret-approval-policies";
|
||||
@ -99,16 +89,13 @@ export * from "./secret-references";
|
||||
export * from "./secret-references-v2";
|
||||
export * from "./secret-rotation-output-v2";
|
||||
export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotation-v2-secret-mappings";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-rotations-v2";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
export * from "./secret-sharing";
|
||||
export * from "./secret-snapshot-folders";
|
||||
export * from "./secret-snapshot-secrets";
|
||||
export * from "./secret-snapshot-secrets-v2";
|
||||
export * from "./secret-snapshots";
|
||||
export * from "./secret-syncs";
|
||||
export * from "./secret-tag-junction";
|
||||
export * from "./secret-tags";
|
||||
export * from "./secret-v2-tag-junction";
|
||||
|
@ -1,23 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipClientCertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
kmipClientId: z.string().uuid(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date()
|
||||
});
|
||||
|
||||
export type TKmipClientCertificates = z.infer<typeof KmipClientCertificatesSchema>;
|
||||
export type TKmipClientCertificatesInsert = Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>;
|
||||
export type TKmipClientCertificatesUpdate = Partial<
|
||||
Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,20 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipClientsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
permissions: z.string().array().nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
projectId: z.string()
|
||||
});
|
||||
|
||||
export type TKmipClients = z.infer<typeof KmipClientsSchema>;
|
||||
export type TKmipClientsInsert = Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>;
|
||||
export type TKmipClientsUpdate = Partial<Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>>;
|
@ -1,39 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipOrgConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
caKeyAlgorithm: z.string(),
|
||||
rootCaIssuedAt: z.date(),
|
||||
rootCaExpiration: z.date(),
|
||||
rootCaSerialNumber: z.string(),
|
||||
encryptedRootCaCertificate: zodBuffer,
|
||||
encryptedRootCaPrivateKey: zodBuffer,
|
||||
serverIntermediateCaIssuedAt: z.date(),
|
||||
serverIntermediateCaExpiration: z.date(),
|
||||
serverIntermediateCaSerialNumber: z.string().nullable().optional(),
|
||||
encryptedServerIntermediateCaCertificate: zodBuffer,
|
||||
encryptedServerIntermediateCaChain: zodBuffer,
|
||||
encryptedServerIntermediateCaPrivateKey: zodBuffer,
|
||||
clientIntermediateCaIssuedAt: z.date(),
|
||||
clientIntermediateCaExpiration: z.date(),
|
||||
clientIntermediateCaSerialNumber: z.string(),
|
||||
encryptedClientIntermediateCaCertificate: zodBuffer,
|
||||
encryptedClientIntermediateCaChain: zodBuffer,
|
||||
encryptedClientIntermediateCaPrivateKey: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TKmipOrgConfigs = z.infer<typeof KmipOrgConfigsSchema>;
|
||||
export type TKmipOrgConfigsInsert = Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TKmipOrgConfigsUpdate = Partial<Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>>;
|
@ -1,29 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipOrgServerCertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
commonName: z.string(),
|
||||
altNames: z.string(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date(),
|
||||
encryptedCertificate: zodBuffer,
|
||||
encryptedChain: zodBuffer
|
||||
});
|
||||
|
||||
export type TKmipOrgServerCertificates = z.infer<typeof KmipOrgServerCertificatesSchema>;
|
||||
export type TKmipOrgServerCertificatesInsert = Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>;
|
||||
export type TKmipOrgServerCertificatesUpdate = Partial<
|
||||
Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -16,7 +16,8 @@ export const KmsKeysSchema = z.object({
|
||||
name: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string().nullable().optional()
|
||||
projectId: z.string().nullable().optional(),
|
||||
slug: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const LdapConfigsSchema = z.object({
|
||||
@ -14,25 +12,22 @@ export const LdapConfigsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string(),
|
||||
encryptedBindDN: z.string().nullable().optional(),
|
||||
bindDNIV: z.string().nullable().optional(),
|
||||
bindDNTag: z.string().nullable().optional(),
|
||||
encryptedBindPass: z.string().nullable().optional(),
|
||||
bindPassIV: z.string().nullable().optional(),
|
||||
bindPassTag: z.string().nullable().optional(),
|
||||
encryptedBindDN: z.string(),
|
||||
bindDNIV: z.string(),
|
||||
bindDNTag: z.string(),
|
||||
encryptedBindPass: z.string(),
|
||||
bindPassIV: z.string(),
|
||||
bindPassTag: z.string(),
|
||||
searchBase: z.string(),
|
||||
encryptedCACert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
encryptedCACert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default(""),
|
||||
uniqueUserAttribute: z.string().default(""),
|
||||
encryptedLdapBindDN: zodBuffer,
|
||||
encryptedLdapBindPass: zodBuffer,
|
||||
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
|
||||
uniqueUserAttribute: z.string().default("")
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
@ -80,7 +80,6 @@ export enum TableName {
|
||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||
// used by both identity and users
|
||||
IdentityMetadata = "identity_metadata",
|
||||
ResourceMetadata = "resource_metadata",
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
||||
@ -113,10 +112,6 @@ export enum TableName {
|
||||
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
||||
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
||||
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
||||
// Gateway
|
||||
OrgGatewayConfig = "org_gateway_config",
|
||||
Gateway = "gateways",
|
||||
ProjectGateway = "project_gateways",
|
||||
// junction tables with tags
|
||||
SecretV2JnTag = "secret_v2_tag_junction",
|
||||
JnSecretTag = "secret_tag_junction",
|
||||
@ -136,13 +131,7 @@ export enum TableName {
|
||||
SlackIntegrations = "slack_integrations",
|
||||
ProjectSlackConfigs = "project_slack_configs",
|
||||
AppConnection = "app_connections",
|
||||
SecretSync = "secret_syncs",
|
||||
KmipClient = "kmip_clients",
|
||||
KmipOrgConfig = "kmip_org_configs",
|
||||
KmipOrgServerCertificates = "kmip_org_server_certificates",
|
||||
KmipClientCertificates = "kmip_client_certificates",
|
||||
SecretRotationV2 = "secret_rotations_v2",
|
||||
SecretRotationV2SecretMapping = "secret_rotation_v2_secret_mappings"
|
||||
SecretSync = "secret_syncs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
@ -226,17 +215,3 @@ export enum ProjectType {
|
||||
KMS = "kms",
|
||||
SSH = "ssh"
|
||||
}
|
||||
|
||||
export enum ActionProjectType {
|
||||
SecretManager = ProjectType.SecretManager,
|
||||
CertificateManager = ProjectType.CertificateManager,
|
||||
KMS = ProjectType.KMS,
|
||||
SSH = ProjectType.SSH,
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
||||
export enum SortDirection {
|
||||
ASC = "asc",
|
||||
DESC = "desc"
|
||||
}
|
||||
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OidcConfigsSchema = z.object({
|
||||
@ -17,22 +15,19 @@ export const OidcConfigsSchema = z.object({
|
||||
jwksUri: z.string().nullable().optional(),
|
||||
tokenEndpoint: z.string().nullable().optional(),
|
||||
userinfoEndpoint: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string(),
|
||||
configurationType: z.string(),
|
||||
clientIdIV: z.string().nullable().optional(),
|
||||
clientIdTag: z.string().nullable().optional(),
|
||||
encryptedClientSecret: z.string().nullable().optional(),
|
||||
clientSecretIV: z.string().nullable().optional(),
|
||||
clientSecretTag: z.string().nullable().optional(),
|
||||
clientIdIV: z.string(),
|
||||
clientIdTag: z.string(),
|
||||
encryptedClientSecret: z.string(),
|
||||
clientSecretIV: z.string(),
|
||||
clientSecretTag: z.string(),
|
||||
allowedEmailDomains: z.string().nullable().optional(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
manageGroupMemberships: z.boolean().default(false),
|
||||
encryptedOidcClientId: zodBuffer,
|
||||
encryptedOidcClientSecret: zodBuffer
|
||||
lastUsed: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
|
@ -1,43 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OrgGatewayConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
rootCaKeyAlgorithm: z.string(),
|
||||
rootCaIssuedAt: z.date(),
|
||||
rootCaExpiration: z.date(),
|
||||
rootCaSerialNumber: z.string(),
|
||||
encryptedRootCaCertificate: zodBuffer,
|
||||
encryptedRootCaPrivateKey: zodBuffer,
|
||||
clientCaIssuedAt: z.date(),
|
||||
clientCaExpiration: z.date(),
|
||||
clientCaSerialNumber: z.string().nullable().optional(),
|
||||
encryptedClientCaCertificate: zodBuffer,
|
||||
encryptedClientCaPrivateKey: zodBuffer,
|
||||
clientCertSerialNumber: z.string(),
|
||||
clientCertKeyAlgorithm: z.string(),
|
||||
clientCertIssuedAt: z.date(),
|
||||
clientCertExpiration: z.date(),
|
||||
encryptedClientCertificate: zodBuffer,
|
||||
encryptedClientPrivateKey: zodBuffer,
|
||||
gatewayCaIssuedAt: z.date(),
|
||||
gatewayCaExpiration: z.date(),
|
||||
gatewayCaSerialNumber: z.string(),
|
||||
encryptedGatewayCaCertificate: zodBuffer,
|
||||
encryptedGatewayCaPrivateKey: zodBuffer,
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
|
||||
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
|
||||
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;
|
@ -22,11 +22,7 @@ export const OrganizationsSchema = z.object({
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional(),
|
||||
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional(),
|
||||
shouldUseNewPrivilegeSystem: z.boolean().default(true),
|
||||
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
|
||||
privilegeUpgradeInitiatedAt: z.date().nullable().optional()
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -1,20 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectGatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
gatewayId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
|
||||
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;
|
@ -13,7 +13,7 @@ export const ProjectsSchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
autoCapitalization: z.boolean().default(false).nullable().optional(),
|
||||
autoCapitalization: z.boolean().default(true).nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
@ -25,8 +25,7 @@ export const ProjectsSchema = z.object({
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
enforceCapitalization: z.boolean().default(false)
|
||||
type: z.string()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@ -1,25 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ResourceMetadataSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
key: z.string(),
|
||||
value: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
identityId: z.string().uuid().nullable().optional(),
|
||||
secretId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
dynamicSecretId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TResourceMetadata = z.infer<typeof ResourceMetadataSchema>;
|
||||
export type TResourceMetadataInsert = Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>;
|
||||
export type TResourceMetadataUpdate = Partial<Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>>;
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SamlConfigsSchema = z.object({
|
||||
@ -25,10 +23,7 @@ export const SamlConfigsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
encryptedSamlEntryPoint: zodBuffer,
|
||||
encryptedSamlIssuer: zodBuffer,
|
||||
encryptedSamlCertificate: zodBuffer
|
||||
lastUsed: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
||||
|
@ -16,8 +16,7 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
deletedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@ -13,8 +13,7 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid(),
|
||||
comment: z.string().nullable().optional()
|
||||
reviewerUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@ -24,8 +24,7 @@ export const SecretApprovalRequestsSecretsV2Schema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
op: z.string(),
|
||||
secretId: z.string().uuid().nullable().optional(),
|
||||
secretVersion: z.string().uuid().nullable().optional(),
|
||||
secretMetadata: z.unknown().nullable().optional()
|
||||
secretVersion: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;
|
||||
|
@ -15,9 +15,7 @@ export const SecretFoldersSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
lastSecretModified: z.date().nullable().optional()
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
@ -1,23 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationV2SecretMappingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
secretId: z.string().uuid(),
|
||||
rotationId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretRotationV2SecretMappings = z.infer<typeof SecretRotationV2SecretMappingsSchema>;
|
||||
export type TSecretRotationV2SecretMappingsInsert = Omit<
|
||||
z.input<typeof SecretRotationV2SecretMappingsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TSecretRotationV2SecretMappingsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretRotationV2SecretMappingsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,39 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationsV2Schema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
parameters: z.unknown(),
|
||||
secretsMapping: z.unknown(),
|
||||
encryptedGeneratedCredentials: zodBuffer,
|
||||
isAutoRotationEnabled: z.boolean().default(true),
|
||||
activeIndex: z.number().default(0),
|
||||
folderId: z.string().uuid(),
|
||||
connectionId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
rotationInterval: z.number(),
|
||||
rotateAtUtc: z.unknown(),
|
||||
rotationStatus: z.string(),
|
||||
lastRotationAttemptedAt: z.date(),
|
||||
lastRotatedAt: z.date(),
|
||||
encryptedLastRotationMessage: zodBuffer.nullable().optional(),
|
||||
lastRotationJobId: z.string().nullable().optional(),
|
||||
nextRotationAt: z.date().nullable().optional(),
|
||||
isLastRotationManual: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TSecretRotationsV2 = z.infer<typeof SecretRotationsV2Schema>;
|
||||
export type TSecretRotationsV2Insert = Omit<z.input<typeof SecretRotationsV2Schema>, TImmutableDBKeys>;
|
||||
export type TSecretRotationsV2Update = Partial<Omit<z.input<typeof SecretRotationsV2Schema>, TImmutableDBKeys>>;
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationsSchema = z.object({
|
||||
@ -24,8 +22,7 @@ export const SecretRotationsSchema = z.object({
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedRotationData: zodBuffer
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
||||
|
@ -26,8 +26,7 @@ export const SecretSharingSchema = z.object({
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional(),
|
||||
type: z.string().default("share")
|
||||
identifier: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user