mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-11 12:11:38 +00:00
Compare commits
1 Commits
offline-do
...
daniel/add
Author | SHA1 | Date | |
---|---|---|---|
d38243a1c6 |
65
.env.example
65
.env.example
@ -26,8 +26,7 @@ SITE_URL=http://localhost:8080
|
|||||||
# Mail/SMTP
|
# Mail/SMTP
|
||||||
SMTP_HOST=
|
SMTP_HOST=
|
||||||
SMTP_PORT=
|
SMTP_PORT=
|
||||||
SMTP_FROM_ADDRESS=
|
SMTP_NAME=
|
||||||
SMTP_FROM_NAME=
|
|
||||||
SMTP_USERNAME=
|
SMTP_USERNAME=
|
||||||
SMTP_PASSWORD=
|
SMTP_PASSWORD=
|
||||||
|
|
||||||
@ -37,22 +36,16 @@ CLIENT_ID_HEROKU=
|
|||||||
CLIENT_ID_VERCEL=
|
CLIENT_ID_VERCEL=
|
||||||
CLIENT_ID_NETLIFY=
|
CLIENT_ID_NETLIFY=
|
||||||
CLIENT_ID_GITHUB=
|
CLIENT_ID_GITHUB=
|
||||||
CLIENT_ID_GITHUB_APP=
|
|
||||||
CLIENT_SLUG_GITHUB_APP=
|
|
||||||
CLIENT_ID_GITLAB=
|
CLIENT_ID_GITLAB=
|
||||||
CLIENT_ID_BITBUCKET=
|
CLIENT_ID_BITBUCKET=
|
||||||
CLIENT_SECRET_HEROKU=
|
CLIENT_SECRET_HEROKU=
|
||||||
CLIENT_SECRET_VERCEL=
|
CLIENT_SECRET_VERCEL=
|
||||||
CLIENT_SECRET_NETLIFY=
|
CLIENT_SECRET_NETLIFY=
|
||||||
CLIENT_SECRET_GITHUB=
|
CLIENT_SECRET_GITHUB=
|
||||||
CLIENT_SECRET_GITHUB_APP=
|
|
||||||
CLIENT_SECRET_GITLAB=
|
CLIENT_SECRET_GITLAB=
|
||||||
CLIENT_SECRET_BITBUCKET=
|
CLIENT_SECRET_BITBUCKET=
|
||||||
CLIENT_SLUG_VERCEL=
|
CLIENT_SLUG_VERCEL=
|
||||||
|
|
||||||
CLIENT_PRIVATE_KEY_GITHUB_APP=
|
|
||||||
CLIENT_APP_ID_GITHUB_APP=
|
|
||||||
|
|
||||||
# Sentry (optional) for monitoring errors
|
# Sentry (optional) for monitoring errors
|
||||||
SENTRY_DSN=
|
SENTRY_DSN=
|
||||||
|
|
||||||
@ -75,63 +68,7 @@ CAPTCHA_SECRET=
|
|||||||
|
|
||||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||||
|
|
||||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
|
||||||
OTEL_EXPORT_TYPE=prometheus
|
|
||||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
|
||||||
OTEL_OTLP_PUSH_INTERVAL=
|
|
||||||
|
|
||||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
|
||||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
|
||||||
|
|
||||||
PLAIN_API_KEY=
|
PLAIN_API_KEY=
|
||||||
PLAIN_WISH_LABEL_IDS=
|
PLAIN_WISH_LABEL_IDS=
|
||||||
|
|
||||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||||
|
|
||||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
|
||||||
|
|
||||||
# App Connections
|
|
||||||
|
|
||||||
# aws assume-role connection
|
|
||||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
|
||||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
|
||||||
|
|
||||||
# github oauth connection
|
|
||||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
|
||||||
|
|
||||||
#github app connection
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
|
||||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
|
||||||
|
|
||||||
#gitlab app connection
|
|
||||||
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET=
|
|
||||||
|
|
||||||
#github radar app connection
|
|
||||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
|
|
||||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
|
|
||||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
|
|
||||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
|
|
||||||
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
|
|
||||||
|
|
||||||
#gcp app connection
|
|
||||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
|
||||||
|
|
||||||
# azure app connection
|
|
||||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
|
||||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
|
||||||
|
|
||||||
# datadog
|
|
||||||
SHOULD_USE_DATADOG_TRACER=
|
|
||||||
DATADOG_PROFILING_ENABLED=
|
|
||||||
DATADOG_ENV=
|
|
||||||
DATADOG_SERVICE=
|
|
||||||
DATADOG_HOSTNAME=
|
|
||||||
|
|
||||||
# kubernetes
|
|
||||||
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false
|
|
||||||
|
@ -1,2 +1 @@
|
|||||||
DB_CONNECTION_URI=
|
DB_CONNECTION_URI=
|
||||||
AUDIT_LOGS_DB_CONNECTION_URI=
|
|
||||||
|
3
.envrc
3
.envrc
@ -1,3 +0,0 @@
|
|||||||
# Learn more at https://direnv.net
|
|
||||||
# We instruct direnv to use our Nix flake for a consistent development environment.
|
|
||||||
use flake
|
|
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
- [ ] Bug fix
|
- [ ] Bug fix
|
||||||
- [ ] New feature
|
- [ ] New feature
|
||||||
- [ ] Improvement
|
|
||||||
- [ ] Breaking change
|
- [ ] Breaking change
|
||||||
- [ ] Documentation
|
- [ ] Documentation
|
||||||
|
|
||||||
|
91
.github/workflows/build-binaries.yml
vendored
91
.github/workflows/build-binaries.yml
vendored
@ -7,6 +7,7 @@ on:
|
|||||||
description: "Version number"
|
description: "Version number"
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./backend
|
working-directory: ./backend
|
||||||
@ -48,9 +49,9 @@ jobs:
|
|||||||
- name: Package into node binary
|
- name: Package into node binary
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ matrix.os }}" != "linux" ]; then
|
if [ "${{ matrix.os }}" != "linux" ]; then
|
||||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||||
else
|
else
|
||||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Set up .deb package structure (Debian/Ubuntu only)
|
# Set up .deb package structure (Debian/Ubuntu only)
|
||||||
@ -82,86 +83,6 @@ jobs:
|
|||||||
dpkg-deb --build infisical-core
|
dpkg-deb --build infisical-core
|
||||||
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
||||||
|
|
||||||
### RPM
|
|
||||||
|
|
||||||
# Set up .rpm package structure
|
|
||||||
- name: Set up .rpm package structure
|
|
||||||
if: matrix.os == 'linux'
|
|
||||||
run: |
|
|
||||||
mkdir -p infisical-core-rpm/usr/local/bin
|
|
||||||
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
|
|
||||||
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
|
|
||||||
|
|
||||||
# Install RPM build tools
|
|
||||||
- name: Install RPM build tools
|
|
||||||
if: matrix.os == 'linux'
|
|
||||||
run: sudo apt-get update && sudo apt-get install -y rpm
|
|
||||||
|
|
||||||
# Create .spec file for RPM
|
|
||||||
- name: Create .spec file for RPM
|
|
||||||
if: matrix.os == 'linux'
|
|
||||||
run: |
|
|
||||||
cat <<EOF > infisical-core.spec
|
|
||||||
|
|
||||||
%global _enable_debug_package 0
|
|
||||||
%global debug_package %{nil}
|
|
||||||
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
|
|
||||||
|
|
||||||
Name: infisical-core
|
|
||||||
Version: ${{ github.event.inputs.version }}
|
|
||||||
Release: 1%{?dist}
|
|
||||||
Summary: Infisical Core standalone executable
|
|
||||||
License: Proprietary
|
|
||||||
URL: https://app.infisical.com
|
|
||||||
|
|
||||||
%description
|
|
||||||
Infisical Core standalone executable (app.infisical.com)
|
|
||||||
|
|
||||||
%install
|
|
||||||
mkdir -p %{buildroot}/usr/local/bin
|
|
||||||
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
|
|
||||||
|
|
||||||
%files
|
|
||||||
/usr/local/bin/infisical-core
|
|
||||||
|
|
||||||
%pre
|
|
||||||
|
|
||||||
%post
|
|
||||||
|
|
||||||
%preun
|
|
||||||
|
|
||||||
%postun
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Build .rpm file
|
|
||||||
- name: Build .rpm package
|
|
||||||
if: matrix.os == 'linux'
|
|
||||||
run: |
|
|
||||||
# Create necessary directories
|
|
||||||
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
|
||||||
|
|
||||||
# Copy the binary directly to SOURCES
|
|
||||||
cp ./binary/infisical-core rpmbuild/SOURCES/
|
|
||||||
|
|
||||||
# Run rpmbuild with verbose output
|
|
||||||
rpmbuild -vv -bb \
|
|
||||||
--define "_topdir $(pwd)/rpmbuild" \
|
|
||||||
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
|
|
||||||
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
|
|
||||||
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
|
|
||||||
infisical-core.spec
|
|
||||||
|
|
||||||
# Try to find the RPM file
|
|
||||||
find rpmbuild -name "*.rpm"
|
|
||||||
|
|
||||||
# Move the RPM file if found
|
|
||||||
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
|
|
||||||
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
|
|
||||||
else
|
|
||||||
echo "RPM file not found!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x" # Specify the Python version you need
|
python-version: "3.x" # Specify the Python version you need
|
||||||
@ -176,12 +97,6 @@ jobs:
|
|||||||
working-directory: ./backend
|
working-directory: ./backend
|
||||||
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
||||||
|
|
||||||
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
|
|
||||||
- name: Publish .rpm to Cloudsmith
|
|
||||||
if: matrix.os == 'linux'
|
|
||||||
working-directory: ./backend
|
|
||||||
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
|
|
||||||
|
|
||||||
# Publish .exe file to Cloudsmith (Windows only)
|
# Publish .exe file to Cloudsmith (Windows only)
|
||||||
- name: Publish to Cloudsmith (Windows)
|
- name: Publish to Cloudsmith (Windows)
|
||||||
if: matrix.os == 'win'
|
if: matrix.os == 'win'
|
||||||
|
154
.github/workflows/build-staging-and-deploy-aws.yml
vendored
Normal file
154
.github/workflows/build-staging-and-deploy-aws.yml
vendored
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
name: Deployment pipeline
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
infisical-image:
|
||||||
|
name: Build backend image
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: ☁️ Checkout source
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: 📦 Install dependencies to test all dependencies
|
||||||
|
run: npm ci --only-production
|
||||||
|
working-directory: backend
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: 🔧 Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: 🐋 Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
- name: 🏗️ Build backend and push to docker hub
|
||||||
|
uses: depot/build-push-action@v1
|
||||||
|
with:
|
||||||
|
project: 64mmf0n610
|
||||||
|
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||||
|
push: true
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.standalone-infisical
|
||||||
|
tags: |
|
||||||
|
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
infisical/staging_infisical:latest
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: |
|
||||||
|
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||||
|
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||||
|
|
||||||
|
gamma-deployment:
|
||||||
|
name: Deploy to gamma
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [infisical-image]
|
||||||
|
environment:
|
||||||
|
name: Gamma
|
||||||
|
steps:
|
||||||
|
- uses: twingate/github-action@v1
|
||||||
|
with:
|
||||||
|
# The Twingate Service Key used to connect Twingate to the proper service
|
||||||
|
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||||
|
#
|
||||||
|
# Required
|
||||||
|
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
- name: Change directory to backend and install dependencies
|
||||||
|
env:
|
||||||
|
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||||
|
run: |
|
||||||
|
cd backend
|
||||||
|
npm install
|
||||||
|
npm run migration:latest
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
|
with:
|
||||||
|
audience: sts.amazonaws.com
|
||||||
|
aws-region: us-east-1
|
||||||
|
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: Download task definition
|
||||||
|
run: |
|
||||||
|
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||||
|
- name: Render Amazon ECS task definition
|
||||||
|
id: render-web-container
|
||||||
|
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: task-definition.json
|
||||||
|
container-name: infisical-core
|
||||||
|
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
environment-variables: "LOG_LEVEL=info"
|
||||||
|
- name: Deploy to Amazon ECS service
|
||||||
|
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||||
|
service: infisical-core-gamma-stage
|
||||||
|
cluster: infisical-gamma-stage
|
||||||
|
wait-for-service-stability: true
|
||||||
|
|
||||||
|
production-postgres-deployment:
|
||||||
|
name: Deploy to production
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [gamma-deployment]
|
||||||
|
environment:
|
||||||
|
name: Production
|
||||||
|
steps:
|
||||||
|
- uses: twingate/github-action@v1
|
||||||
|
with:
|
||||||
|
# The Twingate Service Key used to connect Twingate to the proper service
|
||||||
|
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||||
|
#
|
||||||
|
# Required
|
||||||
|
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
- name: Change directory to backend and install dependencies
|
||||||
|
env:
|
||||||
|
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||||
|
run: |
|
||||||
|
cd backend
|
||||||
|
npm install
|
||||||
|
npm run migration:latest
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
|
with:
|
||||||
|
audience: sts.amazonaws.com
|
||||||
|
aws-region: us-east-1
|
||||||
|
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: Download task definition
|
||||||
|
run: |
|
||||||
|
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||||
|
- name: Render Amazon ECS task definition
|
||||||
|
id: render-web-container
|
||||||
|
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: task-definition.json
|
||||||
|
container-name: infisical-core-platform
|
||||||
|
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
environment-variables: "LOG_LEVEL=info"
|
||||||
|
- name: Deploy to Amazon ECS service
|
||||||
|
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||||
|
service: infisical-core-platform
|
||||||
|
cluster: infisical-core-platform
|
||||||
|
wait-for-service-stability: true
|
@ -32,23 +32,10 @@ jobs:
|
|||||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||||
- name: Start the server
|
- name: Start the server
|
||||||
run: |
|
run: |
|
||||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||||
|
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||||
echo "Examining built image:"
|
|
||||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
|
||||||
|
|
||||||
docker run --name infisical-api -d -p 4000:4000 \
|
|
||||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
|
||||||
-e REDIS_URL=$REDIS_URL \
|
|
||||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
|
||||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
|
||||||
--env-file .env \
|
|
||||||
infisical-api
|
|
||||||
|
|
||||||
echo "Container status right after creation:"
|
|
||||||
docker ps -a | grep infisical-api
|
|
||||||
env:
|
env:
|
||||||
REDIS_URL: redis://172.17.0.1:6379
|
REDIS_URL: redis://172.17.0.1:6379
|
||||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||||
@ -56,48 +43,35 @@ jobs:
|
|||||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.5"
|
go-version: '1.21.5'
|
||||||
- name: Wait for container to be stable and check logs
|
- name: Wait for container to be stable and check logs
|
||||||
run: |
|
run: |
|
||||||
SECONDS=0
|
SECONDS=0
|
||||||
HEALTHY=0
|
HEALTHY=0
|
||||||
while [ $SECONDS -lt 60 ]; do
|
while [ $SECONDS -lt 60 ]; do
|
||||||
# Check if container is running
|
if docker ps | grep infisical-api | grep -q healthy; then
|
||||||
if docker ps | grep infisical-api; then
|
echo "Container is healthy."
|
||||||
# Try to access the API endpoint
|
HEALTHY=1
|
||||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
|
||||||
echo "API endpoint is responding. Container seems healthy."
|
|
||||||
HEALTHY=1
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Container is not running!"
|
|
||||||
docker ps -a | grep infisical-api
|
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||||
sleep 5
|
|
||||||
SECONDS=$((SECONDS+5))
|
docker logs infisical-api
|
||||||
|
|
||||||
|
sleep 2
|
||||||
|
SECONDS=$((SECONDS+2))
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ $HEALTHY -ne 1 ]; then
|
if [ $HEALTHY -ne 1 ]; then
|
||||||
echo "Container did not become healthy in time"
|
echo "Container did not become healthy in time"
|
||||||
echo "Container status:"
|
|
||||||
docker ps -a | grep infisical-api
|
|
||||||
echo "Container logs (if any):"
|
|
||||||
docker logs infisical-api || echo "No logs available"
|
|
||||||
echo "Container inspection:"
|
|
||||||
docker inspect infisical-api | grep -A 5 "State"
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
- name: Install openapi-diff
|
- name: Install openapi-diff
|
||||||
run: go install github.com/oasdiff/oasdiff@latest
|
run: go install github.com/tufin/oasdiff@latest
|
||||||
- name: Running OpenAPI Spec diff action
|
- name: Running OpenAPI Spec diff action
|
||||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||||
- name: cleanup
|
- name: cleanup
|
||||||
if: always()
|
|
||||||
run: |
|
run: |
|
||||||
docker compose -f "docker-compose.dev.yml" down
|
docker compose -f "docker-compose.dev.yml" down
|
||||||
docker stop infisical-api || true
|
docker stop infisical-api
|
||||||
docker rm infisical-api || true
|
docker remove infisical-api
|
||||||
|
4
.github/workflows/check-fe-ts-and-lint.yml
vendored
4
.github/workflows/check-fe-ts-and-lint.yml
vendored
@ -18,10 +18,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: ☁️ Checkout source
|
- name: ☁️ Checkout source
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: 🔧 Setup Node 20
|
- name: 🔧 Setup Node 16
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: "16"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: frontend/package-lock.json
|
cache-dependency-path: frontend/package-lock.json
|
||||||
- name: 📦 Install dependencies
|
- name: 📦 Install dependencies
|
||||||
|
53
.github/workflows/check-non-re2-regex.yml
vendored
53
.github/workflows/check-non-re2-regex.yml
vendored
@ -1,53 +0,0 @@
|
|||||||
name: Detect Non-RE2 Regex
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-non-re2-regex:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Get diff of backend/*
|
|
||||||
run: |
|
|
||||||
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
|
|
||||||
|
|
||||||
- name: Scan backend diff for non-RE2 regex
|
|
||||||
run: |
|
|
||||||
# Extract only added lines (excluding file headers)
|
|
||||||
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
|
|
||||||
|
|
||||||
if [ ! -s added_lines.txt ]; then
|
|
||||||
echo "✅ No added lines in backend/ to check for regex usage."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
|
|
||||||
|
|
||||||
# Find all added lines that contain regex patterns
|
|
||||||
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
|
|
||||||
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
|
|
||||||
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
|
|
||||||
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
|
|
||||||
echo ""
|
|
||||||
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
|
|
||||||
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
|
|
||||||
echo ""
|
|
||||||
echo "Offending lines:"
|
|
||||||
cat actual_violations.txt
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "✅ No regex patterns found in added/modified backend lines."
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Cleanup temporary files
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt
|
|
@ -1,77 +0,0 @@
|
|||||||
name: Release Infisical Core Helm chart
|
|
||||||
|
|
||||||
on: [workflow_dispatch]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-helm:
|
|
||||||
name: Test Helm Chart
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Helm
|
|
||||||
uses: azure/setup-helm@v4.2.0
|
|
||||||
with:
|
|
||||||
version: v3.17.0
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5.3.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Add Helm repositories
|
|
||||||
run: |
|
|
||||||
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
|
|
||||||
helm repo add bitnami https://charts.bitnami.com/bitnami
|
|
||||||
helm repo update
|
|
||||||
|
|
||||||
- name: Set up chart-testing
|
|
||||||
uses: helm/chart-testing-action@v2.7.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (lint)
|
|
||||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
|
|
||||||
|
|
||||||
- name: Create kind cluster
|
|
||||||
uses: helm/kind-action@v1.12.0
|
|
||||||
|
|
||||||
- name: Create namespace
|
|
||||||
run: kubectl create namespace infisical-standalone-postgres
|
|
||||||
|
|
||||||
- name: Create Infisical secrets
|
|
||||||
run: |
|
|
||||||
kubectl create secret generic infisical-secrets \
|
|
||||||
--namespace infisical-standalone-postgres \
|
|
||||||
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
|
|
||||||
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
|
|
||||||
--from-literal=SITE_URL=http://localhost:8080
|
|
||||||
|
|
||||||
- name: Run chart-testing (install)
|
|
||||||
run: |
|
|
||||||
ct install \
|
|
||||||
--config ct.yaml \
|
|
||||||
--charts helm-charts/infisical-standalone-postgres \
|
|
||||||
--helm-extra-args="--timeout=300s" \
|
|
||||||
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
|
|
||||||
--namespace infisical-standalone-postgres
|
|
||||||
|
|
||||||
release:
|
|
||||||
needs: test-helm
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Install Helm
|
|
||||||
uses: azure/setup-helm@v3
|
|
||||||
with:
|
|
||||||
version: v3.10.0
|
|
||||||
- name: Install python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
- name: Install Cloudsmith CLI
|
|
||||||
run: pip install --upgrade cloudsmith-cli
|
|
||||||
- name: Build and push helm package to Cloudsmith
|
|
||||||
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
|
||||||
env:
|
|
||||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
|
22
.github/workflows/helm_chart_release.yml
vendored
Normal file
22
.github/workflows/helm_chart_release.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
name: Release Helm Charts
|
||||||
|
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Install Helm
|
||||||
|
uses: azure/setup-helm@v3
|
||||||
|
with:
|
||||||
|
version: v3.10.0
|
||||||
|
- name: Install python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
- name: Install Cloudsmith CLI
|
||||||
|
run: pip install --upgrade cloudsmith-cli
|
||||||
|
- name: Build and push helm package to Cloudsmith
|
||||||
|
run: cd helm-charts && sh upload-to-cloudsmith.sh
|
||||||
|
env:
|
||||||
|
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
59
.github/workflows/release-k8-operator-helm.yml
vendored
59
.github/workflows/release-k8-operator-helm.yml
vendored
@ -1,59 +0,0 @@
|
|||||||
name: Release K8 Operator Helm Chart
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-helm:
|
|
||||||
name: Test Helm Chart
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Helm
|
|
||||||
uses: azure/setup-helm@v4.2.0
|
|
||||||
with:
|
|
||||||
version: v3.17.0
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5.3.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Set up chart-testing
|
|
||||||
uses: helm/chart-testing-action@v2.7.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (lint)
|
|
||||||
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
|
|
||||||
|
|
||||||
- name: Create kind cluster
|
|
||||||
uses: helm/kind-action@v1.12.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (install)
|
|
||||||
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
|
|
||||||
|
|
||||||
release-helm:
|
|
||||||
name: Release Helm Chart
|
|
||||||
needs: test-helm
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Helm
|
|
||||||
uses: azure/setup-helm@v3
|
|
||||||
with:
|
|
||||||
version: v3.10.0
|
|
||||||
|
|
||||||
- name: Install python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
|
|
||||||
- name: Install Cloudsmith CLI
|
|
||||||
run: pip install --upgrade cloudsmith-cli
|
|
||||||
|
|
||||||
- name: Build and push helm package to CloudSmith
|
|
||||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
|
||||||
env:
|
|
||||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
|
@ -1,115 +1,62 @@
|
|||||||
name: Release standalone docker image
|
name: Release standalone docker image
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- "infisical/v*.*.*-postgres"
|
- "infisical/v*.*.*-postgres"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
infisical-tests:
|
infisical-tests:
|
||||||
name: Run tests before deployment
|
name: Run tests before deployment
|
||||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||||
uses: ./.github/workflows/run-backend-tests.yml
|
uses: ./.github/workflows/run-backend-tests.yml
|
||||||
|
infisical-standalone:
|
||||||
infisical-standalone:
|
name: Build infisical standalone image postgres
|
||||||
name: Build infisical standalone image postgres
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-latest
|
needs: [infisical-tests]
|
||||||
needs: [infisical-tests]
|
steps:
|
||||||
steps:
|
- name: Extract version from tag
|
||||||
- name: Extract version from tag
|
id: extract_version
|
||||||
id: extract_version
|
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
- name: ☁️ Checkout source
|
||||||
- name: ☁️ Checkout source
|
uses: actions/checkout@v3
|
||||||
uses: actions/checkout@v3
|
with:
|
||||||
with:
|
fetch-depth: 0
|
||||||
fetch-depth: 0
|
- name: 📦 Install dependencies to test all dependencies
|
||||||
- name: 📦 Install dependencies to test all dependencies
|
run: npm ci --only-production
|
||||||
run: npm ci --only-production
|
working-directory: backend
|
||||||
working-directory: backend
|
- name: version output
|
||||||
- name: version output
|
run: |
|
||||||
run: |
|
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
- name: Save commit hashes for tag
|
||||||
- name: Save commit hashes for tag
|
id: commit
|
||||||
id: commit
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
- name: 🔧 Set up Docker Buildx
|
||||||
- name: 🔧 Set up Docker Buildx
|
uses: docker/setup-buildx-action@v2
|
||||||
uses: docker/setup-buildx-action@v2
|
- name: 🐋 Login to Docker Hub
|
||||||
- name: 🐋 Login to Docker Hub
|
uses: docker/login-action@v2
|
||||||
uses: docker/login-action@v2
|
with:
|
||||||
with:
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
- name: Set up Depot CLI
|
||||||
- name: Set up Depot CLI
|
uses: depot/setup-action@v1
|
||||||
uses: depot/setup-action@v1
|
- name: 📦 Build backend and export to Docker
|
||||||
- name: 📦 Build backend and export to Docker
|
uses: depot/build-push-action@v1
|
||||||
uses: depot/build-push-action@v1
|
with:
|
||||||
with:
|
project: 64mmf0n610
|
||||||
project: 64mmf0n610
|
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
push: true
|
||||||
push: true
|
context: .
|
||||||
context: .
|
tags: |
|
||||||
tags: |
|
infisical/infisical:latest-postgres
|
||||||
infisical/infisical:latest-postgres
|
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
platforms: linux/amd64,linux/arm64
|
||||||
platforms: linux/amd64,linux/arm64
|
file: Dockerfile.standalone-infisical
|
||||||
file: Dockerfile.standalone-infisical
|
build-args: |
|
||||||
build-args: |
|
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
|
||||||
|
|
||||||
infisical-fips-standalone:
|
|
||||||
name: Build infisical standalone image postgres
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [infisical-tests]
|
|
||||||
steps:
|
|
||||||
- name: Extract version from tag
|
|
||||||
id: extract_version
|
|
||||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
|
||||||
- name: ☁️ Checkout source
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: 📦 Install dependencies to test all dependencies
|
|
||||||
run: npm ci --only-production
|
|
||||||
working-directory: backend
|
|
||||||
- name: version output
|
|
||||||
run: |
|
|
||||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
|
||||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
|
||||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
|
||||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
|
||||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
|
||||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
|
||||||
- name: Save commit hashes for tag
|
|
||||||
id: commit
|
|
||||||
uses: pr-mpt/actions-commit-hash@v2
|
|
||||||
- name: 🔧 Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- name: 🐋 Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Set up Depot CLI
|
|
||||||
uses: depot/setup-action@v1
|
|
||||||
- name: 📦 Build backend and export to Docker
|
|
||||||
uses: depot/build-push-action@v1
|
|
||||||
with:
|
|
||||||
project: 64mmf0n610
|
|
||||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
|
||||||
push: true
|
|
||||||
context: .
|
|
||||||
tags: |
|
|
||||||
infisical/infisical-fips:latest-postgres
|
|
||||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
|
||||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
file: Dockerfile.fips.standalone-infisical
|
|
||||||
build-args: |
|
|
||||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
|
||||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
|
||||||
|
212
.github/workflows/release_build_infisical_cli.yml
vendored
212
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,153 +1,75 @@
|
|||||||
name: Build and release CLI
|
name: Build and release CLI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
push:
|
push:
|
||||||
# run only against tags
|
# run only against tags
|
||||||
tags:
|
tags:
|
||||||
- "infisical-cli/v*.*.*"
|
- "infisical-cli/v*.*.*"
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
# packages: write
|
||||||
|
# issues: write
|
||||||
jobs:
|
jobs:
|
||||||
cli-integration-tests:
|
cli-integration-tests:
|
||||||
name: Run tests before deployment
|
name: Run tests before deployment
|
||||||
uses: ./.github/workflows/run-cli-tests.yml
|
uses: ./.github/workflows/run-cli-tests.yml
|
||||||
secrets:
|
secrets:
|
||||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||||
|
|
||||||
npm-release:
|
goreleaser:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
env:
|
needs: [cli-integration-tests]
|
||||||
working-directory: ./npm
|
steps:
|
||||||
needs:
|
- uses: actions/checkout@v3
|
||||||
- cli-integration-tests
|
with:
|
||||||
- goreleaser
|
fetch-depth: 0
|
||||||
steps:
|
- name: 🐋 Login to Docker Hub
|
||||||
- uses: actions/checkout@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Extract version
|
- name: 🔧 Set up Docker Buildx
|
||||||
run: |
|
uses: docker/setup-buildx-action@v2
|
||||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
- run: git fetch --force --tags
|
||||||
echo "Version extracted: $VERSION"
|
- run: echo "Ref name ${{github.ref_name}}"
|
||||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
- uses: actions/setup-go@v3
|
||||||
|
with:
|
||||||
- name: Print version
|
go-version: ">=1.19.3"
|
||||||
run: echo ${{ env.CLI_VERSION }}
|
cache: true
|
||||||
|
cache-dependency-path: cli/go.sum
|
||||||
- name: Setup Node
|
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
run: |
|
||||||
with:
|
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||||
node-version: 20
|
sudo apt update && apt-cache policy libssl1.0-dev
|
||||||
cache: "npm"
|
sudo apt-get install libssl1.0-dev
|
||||||
cache-dependency-path: ./npm/package-lock.json
|
- name: OSXCross for CGO Support
|
||||||
- name: Install dependencies
|
run: |
|
||||||
working-directory: ${{ env.working-directory }}
|
mkdir ../../osxcross
|
||||||
run: npm install --ignore-scripts
|
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||||
|
- uses: goreleaser/goreleaser-action@v4
|
||||||
- name: Set NPM version
|
with:
|
||||||
working-directory: ${{ env.working-directory }}
|
distribution: goreleaser-pro
|
||||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
version: v1.26.2-pro
|
||||||
|
args: release --clean
|
||||||
- name: Setup NPM
|
env:
|
||||||
working-directory: ${{ env.working-directory }}
|
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||||
run: |
|
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||||
|
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
- uses: actions/setup-python@v4
|
||||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
- run: pip install --upgrade cloudsmith-cli
|
||||||
env:
|
- name: Publish to CloudSmith
|
||||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
run: sh cli/upload_to_cloudsmith.sh
|
||||||
|
env:
|
||||||
- name: Pack NPM
|
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||||
working-directory: ${{ env.working-directory }}
|
|
||||||
run: npm pack
|
|
||||||
|
|
||||||
- name: Publish NPM
|
|
||||||
working-directory: ${{ env.working-directory }}
|
|
||||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
|
||||||
env:
|
|
||||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
|
|
||||||
goreleaser:
|
|
||||||
runs-on: ubuntu-latest-8-cores
|
|
||||||
needs: [cli-integration-tests]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: 🐋 Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: 🔧 Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- run: git fetch --force --tags
|
|
||||||
- run: echo "Ref name ${{github.ref_name}}"
|
|
||||||
- uses: actions/setup-go@v3
|
|
||||||
with:
|
|
||||||
go-version: ">=1.19.3"
|
|
||||||
cache: true
|
|
||||||
cache-dependency-path: cli/go.sum
|
|
||||||
- name: Setup for libssl1.0-dev
|
|
||||||
run: |
|
|
||||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
|
||||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
|
||||||
sudo apt update
|
|
||||||
sudo apt-get install -y libssl1.0-dev
|
|
||||||
- name: OSXCross for CGO Support
|
|
||||||
run: |
|
|
||||||
mkdir ../../osxcross
|
|
||||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
|
||||||
- uses: goreleaser/goreleaser-action@v4
|
|
||||||
with:
|
|
||||||
distribution: goreleaser-pro
|
|
||||||
version: v1.26.2-pro
|
|
||||||
args: release --clean
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
|
||||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
|
||||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
|
||||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
|
||||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
- run: pip install --upgrade cloudsmith-cli
|
|
||||||
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
|
|
||||||
with:
|
|
||||||
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
|
|
||||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
|
||||||
- name: Install deb-s3
|
|
||||||
run: gem install deb-s3
|
|
||||||
- name: Configure GPG Key
|
|
||||||
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
|
|
||||||
env:
|
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
|
||||||
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
|
|
||||||
- name: Publish to CloudSmith
|
|
||||||
run: sh cli/upload_to_cloudsmith.sh
|
|
||||||
env:
|
|
||||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
|
||||||
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
|
|
||||||
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
- name: Invalidate Cloudfront cache
|
|
||||||
run: aws cloudfront create-invalidation --distribution-id $CLOUDFRONT_DISTRIBUTION_ID --paths '/deb/dists/stable/*'
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}
|
|
||||||
|
130
.github/workflows/release_docker_k8_operator.yaml
vendored
130
.github/workflows/release_docker_k8_operator.yaml
vendored
@ -1,107 +1,37 @@
|
|||||||
name: Release K8 Operator Docker Image
|
name: Release Docker image for K8 operator
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- "infisical-k8-operator/v*.*.*"
|
- "infisical-k8-operator/v*.*.*"
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release-image:
|
release:
|
||||||
name: Generate Helm Chart PR
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-latest
|
steps:
|
||||||
outputs:
|
- name: Extract version from tag
|
||||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
id: extract_version
|
||||||
steps:
|
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||||
- name: Extract version from tag
|
- uses: actions/checkout@v2
|
||||||
id: extract_version
|
|
||||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
|
||||||
|
|
||||||
- name: Checkout code
|
- name: 🔧 Set up QEMU
|
||||||
uses: actions/checkout@v2
|
uses: docker/setup-qemu-action@v1
|
||||||
|
|
||||||
# Dependency for helm generation
|
- name: 🔧 Set up Docker Buildx
|
||||||
- name: Install Helm
|
uses: docker/setup-buildx-action@v1
|
||||||
uses: azure/setup-helm@v3
|
|
||||||
with:
|
|
||||||
version: v3.10.0
|
|
||||||
|
|
||||||
# Dependency for helm generation
|
- name: 🐋 Login to Docker Hub
|
||||||
- name: Install Go
|
uses: docker/login-action@v1
|
||||||
uses: actions/setup-go@v4
|
with:
|
||||||
with:
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
go-version: 1.21
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
# Install binaries for helm generation
|
- name: Build and push
|
||||||
- name: Install dependencies
|
id: docker_build
|
||||||
working-directory: k8-operator
|
uses: docker/build-push-action@v2
|
||||||
run: |
|
with:
|
||||||
make helmify
|
context: k8-operator
|
||||||
make kustomize
|
push: true
|
||||||
make controller-gen
|
platforms: linux/amd64,linux/arm64
|
||||||
|
tags: |
|
||||||
- name: Generate Helm Chart
|
infisical/kubernetes-operator:latest
|
||||||
working-directory: k8-operator
|
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||||
run: make helm
|
|
||||||
|
|
||||||
- name: Update Helm Chart Version
|
|
||||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
|
||||||
|
|
||||||
- name: Debug - Check file changes
|
|
||||||
run: |
|
|
||||||
echo "Current git status:"
|
|
||||||
git status
|
|
||||||
echo ""
|
|
||||||
echo "Modified files:"
|
|
||||||
git diff --name-only
|
|
||||||
|
|
||||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
|
||||||
if [ -z "$(git diff --name-only)" ]; then
|
|
||||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Create Helm Chart PR
|
|
||||||
id: create-pr
|
|
||||||
uses: peter-evans/create-pull-request@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
|
||||||
committer: GitHub <noreply@github.com>
|
|
||||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
|
||||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
|
||||||
delete-branch: true
|
|
||||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
|
||||||
body: |
|
|
||||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
|
||||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
|
||||||
|
|
||||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
|
||||||
|
|
||||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
|
||||||
base: main
|
|
||||||
|
|
||||||
- name: 🔧 Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
|
|
||||||
- name: 🔧 Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
|
|
||||||
- name: 🐋 Login to Docker Hub
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and push
|
|
||||||
id: docker_build
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: k8-operator
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
tags: |
|
|
||||||
infisical/kubernetes-operator:latest
|
|
||||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
|
||||||
|
70
.github/workflows/release_helm_gateway.yaml
vendored
70
.github/workflows/release_helm_gateway.yaml
vendored
@ -1,70 +0,0 @@
|
|||||||
name: Release Gateway Helm Chart
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-helm:
|
|
||||||
name: Test Helm Chart
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Helm
|
|
||||||
uses: azure/setup-helm@v4.2.0
|
|
||||||
with:
|
|
||||||
version: v3.17.0
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5.3.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Set up chart-testing
|
|
||||||
uses: helm/chart-testing-action@v2.7.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (lint)
|
|
||||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
|
|
||||||
|
|
||||||
- name: Create kind cluster
|
|
||||||
uses: helm/kind-action@v1.12.0
|
|
||||||
|
|
||||||
- name: Create namespace
|
|
||||||
run: kubectl create namespace infisical-gateway
|
|
||||||
|
|
||||||
- name: Create gateway secret
|
|
||||||
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
|
|
||||||
|
|
||||||
- name: Run chart-testing (install)
|
|
||||||
run: |
|
|
||||||
ct install \
|
|
||||||
--config ct.yaml \
|
|
||||||
--charts helm-charts/infisical-gateway \
|
|
||||||
--helm-extra-args="--timeout=300s" \
|
|
||||||
--namespace infisical-gateway
|
|
||||||
|
|
||||||
release-helm:
|
|
||||||
name: Release Helm Chart
|
|
||||||
needs: test-helm
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Install Helm
|
|
||||||
uses: azure/setup-helm@v3
|
|
||||||
with:
|
|
||||||
version: v3.10.0
|
|
||||||
|
|
||||||
- name: Install python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
|
|
||||||
- name: Install Cloudsmith CLI
|
|
||||||
run: pip install --upgrade cloudsmith-cli
|
|
||||||
|
|
||||||
- name: Build and push helm package to CloudSmith
|
|
||||||
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
|
|
||||||
env:
|
|
||||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
|
6
.github/workflows/run-backend-tests.yml
vendored
6
.github/workflows/run-backend-tests.yml
vendored
@ -34,10 +34,7 @@ jobs:
|
|||||||
working-directory: backend
|
working-directory: backend
|
||||||
- name: Start postgres and redis
|
- name: Start postgres and redis
|
||||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||||
- name: Run unit test
|
- name: Start integration test
|
||||||
run: npm run test:unit
|
|
||||||
working-directory: backend
|
|
||||||
- name: Run integration test
|
|
||||||
run: npm run test:e2e
|
run: npm run test:e2e
|
||||||
working-directory: backend
|
working-directory: backend
|
||||||
env:
|
env:
|
||||||
@ -48,4 +45,3 @@ jobs:
|
|||||||
- name: cleanup
|
- name: cleanup
|
||||||
run: |
|
run: |
|
||||||
docker compose -f "docker-compose.dev.yml" down
|
docker compose -f "docker-compose.dev.yml" down
|
||||||
|
|
||||||
|
@ -1,49 +0,0 @@
|
|||||||
name: Run Helm Chart Tests for Gateway
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "helm-charts/infisical-gateway/**"
|
|
||||||
- ".github/workflows/run-helm-chart-tests-infisical-gateway.yml"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-helm:
|
|
||||||
name: Test Helm Chart
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Helm
|
|
||||||
uses: azure/setup-helm@v4.2.0
|
|
||||||
with:
|
|
||||||
version: v3.17.0
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5.3.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Set up chart-testing
|
|
||||||
uses: helm/chart-testing-action@v2.7.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (lint)
|
|
||||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
|
|
||||||
|
|
||||||
- name: Create kind cluster
|
|
||||||
uses: helm/kind-action@v1.12.0
|
|
||||||
|
|
||||||
- name: Create namespace
|
|
||||||
run: kubectl create namespace infisical-gateway
|
|
||||||
|
|
||||||
- name: Create gateway secret
|
|
||||||
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
|
|
||||||
|
|
||||||
- name: Run chart-testing (install)
|
|
||||||
run: |
|
|
||||||
ct install \
|
|
||||||
--config ct.yaml \
|
|
||||||
--charts helm-charts/infisical-gateway \
|
|
||||||
--helm-extra-args="--timeout=300s" \
|
|
||||||
--namespace infisical-gateway
|
|
@ -1,68 +0,0 @@
|
|||||||
name: Run Helm Chart Tests for Infisical Standalone Postgres
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "helm-charts/infisical-standalone-postgres/**"
|
|
||||||
- ".github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-helm:
|
|
||||||
name: Test Helm Chart
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Helm
|
|
||||||
uses: azure/setup-helm@v4.2.0
|
|
||||||
with:
|
|
||||||
version: v3.17.0
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5.3.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Add Helm repositories
|
|
||||||
run: |
|
|
||||||
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
|
|
||||||
helm repo add bitnami https://charts.bitnami.com/bitnami
|
|
||||||
helm repo update
|
|
||||||
|
|
||||||
- name: Set up chart-testing
|
|
||||||
uses: helm/chart-testing-action@v2.7.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (lint)
|
|
||||||
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
|
|
||||||
|
|
||||||
- name: Create kind cluster
|
|
||||||
uses: helm/kind-action@v1.12.0
|
|
||||||
|
|
||||||
- name: Create namespace
|
|
||||||
run: kubectl create namespace infisical-standalone-postgres
|
|
||||||
|
|
||||||
- name: Create Infisical secrets
|
|
||||||
run: |
|
|
||||||
kubectl create secret generic infisical-secrets \
|
|
||||||
--namespace infisical-standalone-postgres \
|
|
||||||
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
|
|
||||||
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
|
|
||||||
--from-literal=SITE_URL=http://localhost:8080
|
|
||||||
|
|
||||||
- name: Create bootstrap secret
|
|
||||||
run: |
|
|
||||||
kubectl create secret generic infisical-bootstrap-credentials \
|
|
||||||
--namespace infisical-standalone-postgres \
|
|
||||||
--from-literal=INFISICAL_ADMIN_EMAIL=admin@example.com \
|
|
||||||
--from-literal=INFISICAL_ADMIN_PASSWORD=admin-password
|
|
||||||
|
|
||||||
- name: Run chart-testing (install)
|
|
||||||
run: |
|
|
||||||
ct install \
|
|
||||||
--config ct.yaml \
|
|
||||||
--charts helm-charts/infisical-standalone-postgres \
|
|
||||||
--helm-extra-args="--timeout=300s" \
|
|
||||||
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres --set infisical.autoBootstrap.enabled=true" \
|
|
||||||
--namespace infisical-standalone-postgres
|
|
@ -1,38 +0,0 @@
|
|||||||
name: Run Helm Chart Tests for Secret Operator
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "helm-charts/secrets-operator/**"
|
|
||||||
- ".github/workflows/run-helm-chart-tests-secret-operator.yml"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-helm:
|
|
||||||
name: Test Helm Chart
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Helm
|
|
||||||
uses: azure/setup-helm@v4.2.0
|
|
||||||
with:
|
|
||||||
version: v3.17.0
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5.3.0
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Set up chart-testing
|
|
||||||
uses: helm/chart-testing-action@v2.7.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (lint)
|
|
||||||
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
|
|
||||||
|
|
||||||
- name: Create kind cluster
|
|
||||||
uses: helm/kind-action@v1.12.0
|
|
||||||
|
|
||||||
- name: Run chart-testing (install)
|
|
||||||
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
|
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -63,7 +63,6 @@ yarn-error.log*
|
|||||||
|
|
||||||
# Editor specific
|
# Editor specific
|
||||||
.vscode/*
|
.vscode/*
|
||||||
.idea/*
|
|
||||||
|
|
||||||
frontend-build
|
frontend-build
|
||||||
|
|
||||||
@ -71,5 +70,3 @@ frontend-build
|
|||||||
cli/infisical-merge
|
cli/infisical-merge
|
||||||
cli/test/infisical-merge
|
cli/test/infisical-merge
|
||||||
/backend/binary
|
/backend/binary
|
||||||
|
|
||||||
/npm/bin
|
|
||||||
|
@ -162,24 +162,6 @@ scoop:
|
|||||||
description: "The official Infisical CLI"
|
description: "The official Infisical CLI"
|
||||||
license: MIT
|
license: MIT
|
||||||
|
|
||||||
winget:
|
|
||||||
- name: infisical
|
|
||||||
publisher: infisical
|
|
||||||
license: MIT
|
|
||||||
homepage: https://infisical.com
|
|
||||||
short_description: "The official Infisical CLI"
|
|
||||||
repository:
|
|
||||||
owner: infisical
|
|
||||||
name: winget-pkgs
|
|
||||||
branch: "infisical-{{.Version}}"
|
|
||||||
pull_request:
|
|
||||||
enabled: true
|
|
||||||
draft: false
|
|
||||||
base:
|
|
||||||
owner: microsoft
|
|
||||||
name: winget-pkgs
|
|
||||||
branch: master
|
|
||||||
|
|
||||||
aurs:
|
aurs:
|
||||||
- name: infisical-bin
|
- name: infisical-bin
|
||||||
homepage: "https://infisical.com"
|
homepage: "https://infisical.com"
|
||||||
|
@ -1,12 +1,6 @@
|
|||||||
#!/usr/bin/env sh
|
#!/usr/bin/env sh
|
||||||
. "$(dirname -- "$0")/_/husky.sh"
|
. "$(dirname -- "$0")/_/husky.sh"
|
||||||
|
|
||||||
# Check if infisical is installed
|
|
||||||
if ! command -v infisical >/dev/null 2>&1; then
|
|
||||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
npx lint-staged
|
npx lint-staged
|
||||||
|
|
||||||
infisical scan git-changes --staged -v
|
infisical scan git-changes --staged -v
|
||||||
|
@ -6,43 +6,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
|
|||||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||||
docs/mint.json:generic-api-key:651
|
docs/mint.json:generic-api-key:651
|
||||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
|
||||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
|
||||||
docs/cli/commands/bootstrap.mdx:jwt:86
|
|
||||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
|
|
||||||
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
|
|
||||||
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
|
|
||||||
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
|
|
||||||
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
|
|
||||||
backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts:generic-api-key:125
|
|
||||||
frontend/src/components/permissions/AccessTree/nodes/RoleNode.tsx:generic-api-key:67
|
|
||||||
frontend/src/components/secret-rotations-v2/RotateSecretRotationV2Modal.tsx:generic-api-key:14
|
|
||||||
frontend/src/components/secret-rotations-v2/SecretRotationV2StatusBadge.tsx:generic-api-key:11
|
|
||||||
frontend/src/components/secret-rotations-v2/ViewSecretRotationV2GeneratedCredentials/ViewSecretRotationV2GeneratedCredentials.tsx:generic-api-key:23
|
|
||||||
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:28
|
|
||||||
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:65
|
|
||||||
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretRotationListView/SecretRotationItem.tsx:generic-api-key:26
|
|
||||||
docs/documentation/platform/kms/overview.mdx:generic-api-key:281
|
|
||||||
docs/documentation/platform/kms/overview.mdx:generic-api-key:344
|
|
||||||
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretOverviewTableRow.tsx:generic-api-key:85
|
|
||||||
docs/cli/commands/user.mdx:generic-api-key:51
|
|
||||||
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretOverviewTableRow.tsx:generic-api-key:76
|
|
||||||
docs/integrations/app-connections/hashicorp-vault.mdx:generic-api-key:188
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:567
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:569
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:570
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:572
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:574
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:575
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:576
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:577
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:578
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:579
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:581
|
|
||||||
cli/detect/config/gitleaks.toml:gcp-api-key:582
|
|
||||||
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:51
|
|
||||||
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:50
|
|
||||||
.github/workflows/helm-release-infisical-core.yml:generic-api-key:48
|
|
||||||
.github/workflows/helm-release-infisical-core.yml:generic-api-key:47
|
|
||||||
backend/src/services/smtp/smtp-service.ts:generic-api-key:79
|
|
||||||
frontend/src/components/secret-syncs/forms/SecretSyncDestinationFields/CloudflarePagesSyncFields.tsx:cloudflare-api-key:7
|
|
||||||
|
@ -1,185 +0,0 @@
|
|||||||
ARG POSTHOG_HOST=https://app.posthog.com
|
|
||||||
ARG POSTHOG_API_KEY=posthog-api-key
|
|
||||||
ARG INTERCOM_ID=intercom-id
|
|
||||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
|
||||||
|
|
||||||
FROM node:20-slim AS base
|
|
||||||
|
|
||||||
FROM base AS frontend-dependencies
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY frontend/package.json frontend/package-lock.json ./
|
|
||||||
|
|
||||||
# Install dependencies
|
|
||||||
RUN npm ci --only-production --ignore-scripts
|
|
||||||
|
|
||||||
# Rebuild the source code only when needed
|
|
||||||
FROM base AS frontend-builder
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copy dependencies
|
|
||||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
|
||||||
# Copy all files
|
|
||||||
COPY /frontend .
|
|
||||||
|
|
||||||
ENV NODE_ENV production
|
|
||||||
ARG POSTHOG_HOST
|
|
||||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
|
||||||
ARG POSTHOG_API_KEY
|
|
||||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
|
||||||
ARG INTERCOM_ID
|
|
||||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
|
||||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
|
||||||
ARG CAPTCHA_SITE_KEY
|
|
||||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
|
||||||
|
|
||||||
# Build
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
# Production image
|
|
||||||
FROM base AS frontend-runner
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
|
||||||
|
|
||||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
|
||||||
|
|
||||||
USER non-root-user
|
|
||||||
|
|
||||||
##
|
|
||||||
## BACKEND
|
|
||||||
##
|
|
||||||
FROM base AS backend-build
|
|
||||||
|
|
||||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
|
||||||
|
|
||||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Required for pkcs11js and ODBC
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
unixodbc \
|
|
||||||
unixodbc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
freetds-bin \
|
|
||||||
tdsodbc \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Configure ODBC
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
|
||||||
RUN npm ci --only-production
|
|
||||||
|
|
||||||
COPY /backend .
|
|
||||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
|
||||||
RUN npm i -D tsconfig-paths
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
# Production stage
|
|
||||||
FROM base AS backend-runner
|
|
||||||
|
|
||||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Required for pkcs11js and ODBC
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
unixodbc \
|
|
||||||
unixodbc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
freetds-bin \
|
|
||||||
tdsodbc \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Configure ODBC
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
|
||||||
RUN npm ci --only-production
|
|
||||||
|
|
||||||
COPY --from=backend-build /app .
|
|
||||||
|
|
||||||
RUN mkdir frontend-build
|
|
||||||
|
|
||||||
# Production stage
|
|
||||||
FROM base AS production
|
|
||||||
|
|
||||||
# Install necessary packages including ODBC
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
ca-certificates \
|
|
||||||
curl \
|
|
||||||
git \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
unixodbc \
|
|
||||||
unixodbc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
freetds-bin \
|
|
||||||
tdsodbc \
|
|
||||||
openssh-client \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Configure ODBC in production
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
# Install Infisical CLI
|
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
|
||||||
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
|
||||||
|
|
||||||
# Give non-root-user permission to update SSL certs
|
|
||||||
RUN chown -R non-root-user /etc/ssl/certs
|
|
||||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
|
||||||
RUN chmod -R u+rwx /etc/ssl/certs
|
|
||||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
|
||||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
|
||||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
|
||||||
|
|
||||||
## set pre baked keys
|
|
||||||
ARG POSTHOG_API_KEY
|
|
||||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
|
||||||
ARG INTERCOM_ID=intercom-id
|
|
||||||
ENV INTERCOM_ID=$INTERCOM_ID
|
|
||||||
ARG CAPTCHA_SITE_KEY
|
|
||||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
|
||||||
|
|
||||||
WORKDIR /
|
|
||||||
|
|
||||||
COPY --from=backend-runner /app /backend
|
|
||||||
|
|
||||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
|
||||||
|
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
|
||||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
|
||||||
|
|
||||||
ENV PORT 8080
|
|
||||||
ENV HOST=0.0.0.0
|
|
||||||
ENV HTTPS_ENABLED false
|
|
||||||
ENV NODE_ENV production
|
|
||||||
ENV STANDALONE_BUILD true
|
|
||||||
ENV STANDALONE_MODE true
|
|
||||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
|
||||||
|
|
||||||
WORKDIR /backend
|
|
||||||
|
|
||||||
ENV TELEMETRY_ENABLED true
|
|
||||||
|
|
||||||
EXPOSE 8080
|
|
||||||
EXPOSE 443
|
|
||||||
|
|
||||||
USER non-root-user
|
|
||||||
|
|
||||||
CMD ["./standalone-entrypoint.sh"]
|
|
@ -3,13 +3,16 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
|||||||
ARG INTERCOM_ID=intercom-id
|
ARG INTERCOM_ID=intercom-id
|
||||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||||
|
|
||||||
FROM node:20-slim AS base
|
FROM node:20-alpine AS base
|
||||||
|
|
||||||
FROM base AS frontend-dependencies
|
FROM base AS frontend-dependencies
|
||||||
|
|
||||||
|
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||||
|
RUN apk add --no-cache libc6-compat
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY frontend/package.json frontend/package-lock.json ./
|
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
RUN npm ci --only-production --ignore-scripts
|
RUN npm ci --only-production --ignore-scripts
|
||||||
@ -24,17 +27,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
|||||||
COPY /frontend .
|
COPY /frontend .
|
||||||
|
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
|
ENV NEXT_PUBLIC_ENV production
|
||||||
ARG POSTHOG_HOST
|
ARG POSTHOG_HOST
|
||||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||||
ARG POSTHOG_API_KEY
|
ARG POSTHOG_API_KEY
|
||||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||||
ARG INTERCOM_ID
|
ARG INTERCOM_ID
|
||||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
ARG INFISICAL_PLATFORM_VERSION
|
||||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
@ -43,42 +46,38 @@ RUN npm run build
|
|||||||
FROM base AS frontend-runner
|
FROM base AS frontend-runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
RUN groupadd --system --gid 1001 nodejs
|
RUN addgroup --system --gid 1001 nodejs
|
||||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
RUN adduser --system --uid 1001 non-root-user
|
||||||
|
|
||||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||||
|
VOLUME /app/.next/cache/images
|
||||||
|
|
||||||
|
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||||
|
COPY --from=frontend-builder /app/public ./public
|
||||||
|
RUN chown non-root-user:nodejs ./public/data
|
||||||
|
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||||
|
|
||||||
USER non-root-user
|
USER non-root-user
|
||||||
|
|
||||||
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|
||||||
##
|
##
|
||||||
## BACKEND
|
## BACKEND
|
||||||
##
|
##
|
||||||
FROM base AS backend-build
|
FROM base AS backend-build
|
||||||
|
RUN addgroup --system --gid 1001 nodejs \
|
||||||
|
&& adduser --system --uid 1001 non-root-user
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install all required dependencies for build
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
unixodbc \
|
|
||||||
freetds-bin \
|
|
||||||
unixodbc-dev \
|
|
||||||
libc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN groupadd --system --gid 1001 nodejs
|
|
||||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
COPY /backend .
|
COPY /backend .
|
||||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||||
RUN npm i -D tsconfig-paths
|
RUN npm i -D tsconfig-paths
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
@ -86,21 +85,6 @@ FROM base AS backend-runner
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install all required dependencies for runtime
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
unixodbc \
|
|
||||||
freetds-bin \
|
|
||||||
unixodbc-dev \
|
|
||||||
libc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Configure ODBC
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
@ -110,37 +94,9 @@ RUN mkdir frontend-build
|
|||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
FROM base AS production
|
FROM base AS production
|
||||||
|
RUN apk add --upgrade --no-cache ca-certificates
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN addgroup --system --gid 1001 nodejs \
|
||||||
ca-certificates \
|
&& adduser --system --uid 1001 non-root-user
|
||||||
bash \
|
|
||||||
curl \
|
|
||||||
git \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
unixodbc \
|
|
||||||
freetds-bin \
|
|
||||||
unixodbc-dev \
|
|
||||||
libc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
wget \
|
|
||||||
openssh-client \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Install Infisical CLI
|
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
|
|
||||||
&& apt-get update && apt-get install -y infisical=0.41.89 \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /
|
|
||||||
|
|
||||||
# Configure ODBC in production
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
# Setup user permissions
|
|
||||||
RUN groupadd --system --gid 1001 nodejs \
|
|
||||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
|
||||||
|
|
||||||
# Give non-root-user permission to update SSL certs
|
# Give non-root-user permission to update SSL certs
|
||||||
RUN chown -R non-root-user /etc/ssl/certs
|
RUN chown -R non-root-user /etc/ssl/certs
|
||||||
@ -152,17 +108,21 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
|
|||||||
|
|
||||||
## set pre baked keys
|
## set pre baked keys
|
||||||
ARG POSTHOG_API_KEY
|
ARG POSTHOG_API_KEY
|
||||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||||
ARG INTERCOM_ID=intercom-id
|
ARG INTERCOM_ID=intercom-id
|
||||||
ENV INTERCOM_ID=$INTERCOM_ID
|
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||||
|
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||||
ARG CAPTCHA_SITE_KEY
|
ARG CAPTCHA_SITE_KEY
|
||||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
COPY --from=backend-runner /app /backend
|
COPY --from=backend-runner /app /backend
|
||||||
|
|
||||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||||
|
|
||||||
ARG INFISICAL_PLATFORM_VERSION
|
|
||||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
|
||||||
|
|
||||||
ENV PORT 8080
|
ENV PORT 8080
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
@ -170,8 +130,6 @@ ENV HTTPS_ENABLED false
|
|||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
ENV STANDALONE_BUILD true
|
ENV STANDALONE_BUILD true
|
||||||
ENV STANDALONE_MODE true
|
ENV STANDALONE_MODE true
|
||||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
|
||||||
|
|
||||||
WORKDIR /backend
|
WORKDIR /backend
|
||||||
|
|
||||||
ENV TELEMETRY_ENABLED true
|
ENV TELEMETRY_ENABLED true
|
||||||
|
5
Makefile
5
Makefile
@ -10,9 +10,6 @@ up-dev:
|
|||||||
up-dev-ldap:
|
up-dev-ldap:
|
||||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||||
|
|
||||||
up-dev-metrics:
|
|
||||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
|
||||||
|
|
||||||
up-prod:
|
up-prod:
|
||||||
docker-compose -f docker-compose.prod.yml up --build
|
docker-compose -f docker-compose.prod.yml up --build
|
||||||
|
|
||||||
@ -31,5 +28,3 @@ reviewable-api:
|
|||||||
|
|
||||||
reviewable: reviewable-ui reviewable-api
|
reviewable: reviewable-ui reviewable-api
|
||||||
|
|
||||||
up-dev-sso:
|
|
||||||
docker compose -f docker-compose.dev.yml --profile sso up --build
|
|
||||||
|
@ -69,15 +69,6 @@ module.exports = {
|
|||||||
["^\\."]
|
["^\\."]
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
|
||||||
"import/extensions": [
|
|
||||||
"error",
|
|
||||||
"ignorePackages",
|
|
||||||
{
|
|
||||||
"": "never", // this is required to get the .tsx to work...
|
|
||||||
ts: "never",
|
|
||||||
tsx: "never"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,24 +1,8 @@
|
|||||||
# Build stage
|
# Build stage
|
||||||
FROM node:20-slim AS build
|
FROM node:20-alpine AS build
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Required for pkcs11js
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
openssh-client \
|
|
||||||
openssl
|
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
|
||||||
RUN apt-get install -y \
|
|
||||||
unixodbc \
|
|
||||||
freetds-bin \
|
|
||||||
freetds-dev \
|
|
||||||
unixodbc-dev \
|
|
||||||
libc-dev
|
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
@ -26,39 +10,23 @@ COPY . .
|
|||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
FROM node:20-slim
|
FROM node:20-alpine
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV npm_config_cache /home/node/.npm
|
ENV npm_config_cache /home/node/.npm
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++
|
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
|
||||||
RUN apt-get install -y \
|
|
||||||
unixodbc \
|
|
||||||
freetds-bin \
|
|
||||||
freetds-dev \
|
|
||||||
unixodbc-dev \
|
|
||||||
libc-dev
|
|
||||||
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
RUN npm ci --only-production && npm cache clean --force
|
RUN npm ci --only-production && npm cache clean --force
|
||||||
|
|
||||||
COPY --from=build /app .
|
COPY --from=build /app .
|
||||||
|
|
||||||
# Install Infisical CLI
|
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||||
RUN apt-get install -y curl bash && \
|
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||||
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||||
apt-get update && apt-get install -y infisical=0.41.89 git
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||||
CMD node healthcheck.js
|
CMD node healthcheck.js
|
||||||
|
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
|
|
||||||
|
@ -1,63 +1,8 @@
|
|||||||
FROM node:20-slim
|
FROM node:20-alpine
|
||||||
|
|
||||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||||
|
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||||
ARG SOFTHSM2_VERSION=2.5.0
|
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||||
|
|
||||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
|
||||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
|
||||||
|
|
||||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
build-essential \
|
|
||||||
autoconf \
|
|
||||||
automake \
|
|
||||||
git \
|
|
||||||
libtool \
|
|
||||||
libssl-dev \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
openssh-client \
|
|
||||||
openssl \
|
|
||||||
curl \
|
|
||||||
pkg-config
|
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
|
||||||
RUN apt-get install -y \
|
|
||||||
unixodbc \
|
|
||||||
unixodbc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
freetds-bin \
|
|
||||||
tdsodbc
|
|
||||||
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
# Build and install SoftHSM2
|
|
||||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
|
||||||
WORKDIR ${SOFTHSM2_SOURCES}
|
|
||||||
|
|
||||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
|
||||||
&& sh autogen.sh \
|
|
||||||
&& ./configure --prefix=/usr/local --disable-gost \
|
|
||||||
&& make \
|
|
||||||
&& make install
|
|
||||||
|
|
||||||
WORKDIR /root
|
|
||||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
|
||||||
|
|
||||||
# Install pkcs11-tool
|
|
||||||
RUN apt-get install -y opensc
|
|
||||||
|
|
||||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
|
||||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
|
||||||
|
|
||||||
# ? App setup
|
|
||||||
|
|
||||||
# Install Infisical CLI
|
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y infisical=0.41.89
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
@ -1,85 +0,0 @@
|
|||||||
FROM node:20-slim
|
|
||||||
|
|
||||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
|
||||||
|
|
||||||
ARG SOFTHSM2_VERSION=2.5.0
|
|
||||||
|
|
||||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
|
||||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
|
||||||
|
|
||||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
build-essential \
|
|
||||||
autoconf \
|
|
||||||
automake \
|
|
||||||
git \
|
|
||||||
libtool \
|
|
||||||
libssl-dev \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
openssh-client \
|
|
||||||
curl \
|
|
||||||
pkg-config \
|
|
||||||
perl \
|
|
||||||
wget
|
|
||||||
|
|
||||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
|
||||||
RUN apt-get install -y \
|
|
||||||
unixodbc \
|
|
||||||
unixodbc-dev \
|
|
||||||
freetds-dev \
|
|
||||||
freetds-bin \
|
|
||||||
tdsodbc
|
|
||||||
|
|
||||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
|
||||||
|
|
||||||
# Build and install SoftHSM2
|
|
||||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
|
||||||
WORKDIR ${SOFTHSM2_SOURCES}
|
|
||||||
|
|
||||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
|
||||||
&& sh autogen.sh \
|
|
||||||
&& ./configure --prefix=/usr/local --disable-gost \
|
|
||||||
&& make \
|
|
||||||
&& make install
|
|
||||||
|
|
||||||
WORKDIR /root
|
|
||||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
|
||||||
|
|
||||||
# Install pkcs11-tool
|
|
||||||
RUN apt-get install -y opensc
|
|
||||||
|
|
||||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
|
||||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
|
||||||
|
|
||||||
WORKDIR /openssl-build
|
|
||||||
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
|
||||||
&& tar -xf openssl-3.1.2.tar.gz \
|
|
||||||
&& cd openssl-3.1.2 \
|
|
||||||
&& ./Configure enable-fips \
|
|
||||||
&& make \
|
|
||||||
&& make install_fips
|
|
||||||
|
|
||||||
# ? App setup
|
|
||||||
|
|
||||||
# Install Infisical CLI
|
|
||||||
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y infisical=0.41.89
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY package.json package.json
|
|
||||||
COPY package-lock.json package-lock.json
|
|
||||||
|
|
||||||
RUN npm install
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
ENV HOST=0.0.0.0
|
|
||||||
ENV OPENSSL_CONF=/app/nodejs.cnf
|
|
||||||
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
|
|
||||||
ENV NODE_OPTIONS=--force-fips
|
|
||||||
|
|
||||||
CMD ["npm", "run", "dev:docker"]
|
|
@ -1,22 +1,14 @@
|
|||||||
import RE2 from "re2";
|
|
||||||
|
|
||||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||||
import { applyJitter } from "@app/lib/dates";
|
|
||||||
import { delay as delayMs } from "@app/lib/delay";
|
|
||||||
import { Lock } from "@app/lib/red-lock";
|
import { Lock } from "@app/lib/red-lock";
|
||||||
|
|
||||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||||
const store: Record<string, string | number | Buffer> = {};
|
const store: Record<string, string | number | Buffer> = {};
|
||||||
|
|
||||||
const getRegex = (pattern: string) =>
|
|
||||||
new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
setItem: async (key, value) => {
|
setItem: async (key, value) => {
|
||||||
store[key] = value;
|
store[key] = value;
|
||||||
return "OK";
|
return "OK";
|
||||||
},
|
},
|
||||||
setExpiry: async () => 0,
|
|
||||||
setItemWithExpiry: async (key, value) => {
|
setItemWithExpiry: async (key, value) => {
|
||||||
store[key] = value;
|
store[key] = value;
|
||||||
return "OK";
|
return "OK";
|
||||||
@ -25,27 +17,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
|
|||||||
delete store[key];
|
delete store[key];
|
||||||
return 1;
|
return 1;
|
||||||
},
|
},
|
||||||
deleteItems: async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }) => {
|
|
||||||
const regex = getRegex(pattern);
|
|
||||||
let totalDeleted = 0;
|
|
||||||
const keys = Object.keys(store);
|
|
||||||
|
|
||||||
for (let i = 0; i < keys.length; i += batchSize) {
|
|
||||||
const batch = keys.slice(i, i + batchSize);
|
|
||||||
|
|
||||||
for (const key of batch) {
|
|
||||||
if (regex.test(key)) {
|
|
||||||
delete store[key];
|
|
||||||
totalDeleted += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await delayMs(Math.max(0, applyJitter(delay, jitter)));
|
|
||||||
}
|
|
||||||
|
|
||||||
return totalDeleted;
|
|
||||||
},
|
|
||||||
getItem: async (key) => {
|
getItem: async (key) => {
|
||||||
const value = store[key];
|
const value = store[key];
|
||||||
if (typeof value === "string") {
|
if (typeof value === "string") {
|
||||||
@ -56,27 +27,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
|
|||||||
incrementBy: async () => {
|
incrementBy: async () => {
|
||||||
return 1;
|
return 1;
|
||||||
},
|
},
|
||||||
getItems: async (keys) => {
|
|
||||||
const values = keys.map((key) => {
|
|
||||||
const value = store[key];
|
|
||||||
if (typeof value === "string") {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
return values;
|
|
||||||
},
|
|
||||||
getKeysByPattern: async (pattern) => {
|
|
||||||
const regex = getRegex(pattern);
|
|
||||||
const keys = Object.keys(store);
|
|
||||||
return keys.filter((key) => regex.test(key));
|
|
||||||
},
|
|
||||||
deleteItemsByKeyIn: async (keys) => {
|
|
||||||
for (const key of keys) {
|
|
||||||
delete store[key];
|
|
||||||
}
|
|
||||||
return keys.length;
|
|
||||||
},
|
|
||||||
acquireLock: () => {
|
acquireLock: () => {
|
||||||
return Promise.resolve({
|
return Promise.resolve({
|
||||||
release: () => {}
|
release: () => {}
|
||||||
|
@ -10,24 +10,17 @@ export const mockQueue = (): TQueueServiceFactory => {
|
|||||||
queue: async (name, jobData) => {
|
queue: async (name, jobData) => {
|
||||||
job[name] = jobData;
|
job[name] = jobData;
|
||||||
},
|
},
|
||||||
queuePg: async () => {},
|
|
||||||
schedulePg: async () => {},
|
|
||||||
initialize: async () => {},
|
|
||||||
shutdown: async () => undefined,
|
shutdown: async () => undefined,
|
||||||
stopRepeatableJob: async () => true,
|
stopRepeatableJob: async () => true,
|
||||||
start: (name, jobFn) => {
|
start: (name, jobFn) => {
|
||||||
queues[name] = jobFn;
|
queues[name] = jobFn;
|
||||||
workers[name] = jobFn;
|
workers[name] = jobFn;
|
||||||
},
|
},
|
||||||
startPg: async () => {},
|
|
||||||
listen: (name, event) => {
|
listen: (name, event) => {
|
||||||
events[name] = event;
|
events[name] = event;
|
||||||
},
|
},
|
||||||
getRepeatableJobs: async () => [],
|
|
||||||
clearQueue: async () => {},
|
clearQueue: async () => {},
|
||||||
stopJobById: async () => {},
|
stopJobById: async () => {},
|
||||||
stopJobByIdPg: async () => {},
|
stopRepeatableJobByJobId: async () => true
|
||||||
stopRepeatableJobByJobId: async () => true,
|
|
||||||
stopRepeatableJobByKey: async () => true
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -5,9 +5,6 @@ export const mockSmtpServer = (): TSmtpService => {
|
|||||||
return {
|
return {
|
||||||
sendMail: async (data) => {
|
sendMail: async (data) => {
|
||||||
storage.push(data);
|
storage.push(data);
|
||||||
},
|
|
||||||
verify: async () => {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -34,7 +34,7 @@ describe("Identity v1", async () => {
|
|||||||
test("Create identity", async () => {
|
test("Create identity", async () => {
|
||||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||||
expect(newIdentity.name).toBe("mac1");
|
expect(newIdentity.name).toBe("mac1");
|
||||||
expect(newIdentity.authMethods).toEqual([]);
|
expect(newIdentity.authMethod).toBeNull();
|
||||||
|
|
||||||
await deleteIdentity(newIdentity.id);
|
await deleteIdentity(newIdentity.id);
|
||||||
});
|
});
|
||||||
@ -42,7 +42,7 @@ describe("Identity v1", async () => {
|
|||||||
test("Update identity", async () => {
|
test("Update identity", async () => {
|
||||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||||
expect(newIdentity.name).toBe("mac1");
|
expect(newIdentity.name).toBe("mac1");
|
||||||
expect(newIdentity.authMethods).toEqual([]);
|
expect(newIdentity.authMethod).toBeNull();
|
||||||
|
|
||||||
const updatedIdentity = await testServer.inject({
|
const updatedIdentity = await testServer.inject({
|
||||||
method: "PATCH",
|
method: "PATCH",
|
||||||
|
@ -39,6 +39,8 @@ describe("Login V1 Router", async () => {
|
|||||||
});
|
});
|
||||||
expect(res.statusCode).toBe(200);
|
expect(res.statusCode).toBe(200);
|
||||||
const payload = JSON.parse(res.payload);
|
const payload = JSON.parse(res.payload);
|
||||||
|
expect(payload).toHaveProperty("mfaEnabled");
|
||||||
expect(payload).toHaveProperty("token");
|
expect(payload).toHaveProperty("token");
|
||||||
|
expect(payload.mfaEnabled).toBeFalsy();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -123,7 +123,7 @@ describe("Project Environment Router", async () => {
|
|||||||
id: deletedProjectEnvironment.id,
|
id: deletedProjectEnvironment.id,
|
||||||
name: mockProjectEnv.name,
|
name: mockProjectEnv.name,
|
||||||
slug: mockProjectEnv.slug,
|
slug: mockProjectEnv.slug,
|
||||||
position: 5,
|
position: 4,
|
||||||
createdAt: expect.any(String),
|
createdAt: expect.any(String),
|
||||||
updatedAt: expect.any(String)
|
updatedAt: expect.any(String)
|
||||||
})
|
})
|
||||||
|
@ -1,36 +0,0 @@
|
|||||||
import { seedData1 } from "@app/db/seed-data";
|
|
||||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
|
||||||
|
|
||||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
|
|
||||||
const res = await testServer.inject({
|
|
||||||
method: "POST",
|
|
||||||
url: `/api/v1/secret-approvals`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${jwtAuthToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environment: seedData1.environment.slug,
|
|
||||||
name: dto.name,
|
|
||||||
secretPath: dto.secretPath,
|
|
||||||
approvers: dto.approvers,
|
|
||||||
approvals: dto.approvals
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(res.statusCode).toBe(200);
|
|
||||||
return res.json().approval;
|
|
||||||
};
|
|
||||||
|
|
||||||
describe("Secret approval policy router", async () => {
|
|
||||||
test("Create policy", async () => {
|
|
||||||
const policy = await createPolicy({
|
|
||||||
secretPath: "/",
|
|
||||||
approvals: 1,
|
|
||||||
approvers: [{id:seedData1.id, type: ApproverType.User}],
|
|
||||||
name: "test-policy"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(policy.name).toBe("test-policy");
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,61 +1,73 @@
|
|||||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
|
||||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
|
||||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
|
||||||
|
|
||||||
import { seedData1 } from "@app/db/seed-data";
|
import { seedData1 } from "@app/db/seed-data";
|
||||||
|
|
||||||
|
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "POST",
|
||||||
|
url: `/api/v1/secret-imports`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
workspaceId: seedData1.project.id,
|
||||||
|
environment: seedData1.environment.slug,
|
||||||
|
path: "/",
|
||||||
|
import: {
|
||||||
|
environment: importEnv,
|
||||||
|
path: importPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
const payload = JSON.parse(res.payload);
|
||||||
|
expect(payload).toHaveProperty("secretImport");
|
||||||
|
return payload.secretImport;
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteSecretImport = async (id: string) => {
|
||||||
|
const res = await testServer.inject({
|
||||||
|
method: "DELETE",
|
||||||
|
url: `/api/v1/secret-imports/${id}`,
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${jwtAuthToken}`
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
workspaceId: seedData1.project.id,
|
||||||
|
environment: seedData1.environment.slug,
|
||||||
|
path: "/"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(res.statusCode).toBe(200);
|
||||||
|
const payload = JSON.parse(res.payload);
|
||||||
|
expect(payload).toHaveProperty("secretImport");
|
||||||
|
return payload.secretImport;
|
||||||
|
};
|
||||||
|
|
||||||
describe("Secret Import Router", async () => {
|
describe("Secret Import Router", async () => {
|
||||||
test.each([
|
test.each([
|
||||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||||
// check for default environments
|
// check for default environments
|
||||||
const payload = await createSecretImport({
|
const payload = await createSecretImport(importPath, importEnv);
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
importPath,
|
|
||||||
importEnv
|
|
||||||
});
|
|
||||||
expect(payload).toEqual(
|
expect(payload).toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
importPath,
|
importPath: expect.any(String),
|
||||||
importEnv: expect.objectContaining({
|
importEnv: expect.objectContaining({
|
||||||
name: expect.any(String),
|
name: expect.any(String),
|
||||||
slug: importEnv,
|
slug: expect.any(String),
|
||||||
id: expect.any(String)
|
id: expect.any(String)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
await deleteSecretImport(payload.id);
|
||||||
await deleteSecretImport({
|
|
||||||
id: payload.id,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Get secret imports", async () => {
|
test("Get secret imports", async () => {
|
||||||
const createdImport1 = await createSecretImport({
|
const createdImport1 = await createSecretImport("/", "prod");
|
||||||
authToken: jwtAuthToken,
|
const createdImport2 = await createSecretImport("/", "staging");
|
||||||
secretPath: "/",
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
importPath: "/",
|
|
||||||
importEnv: "prod"
|
|
||||||
});
|
|
||||||
const createdImport2 = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
importPath: "/",
|
|
||||||
importEnv: "staging"
|
|
||||||
});
|
|
||||||
const res = await testServer.inject({
|
const res = await testServer.inject({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
url: `/api/v1/secret-imports`,
|
url: `/api/v1/secret-imports`,
|
||||||
@ -77,60 +89,25 @@ describe("Secret Import Router", async () => {
|
|||||||
expect.arrayContaining([
|
expect.arrayContaining([
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
importPath: "/",
|
importPath: expect.any(String),
|
||||||
importEnv: expect.objectContaining({
|
importEnv: expect.objectContaining({
|
||||||
name: expect.any(String),
|
name: expect.any(String),
|
||||||
slug: "prod",
|
slug: expect.any(String),
|
||||||
id: expect.any(String)
|
|
||||||
})
|
|
||||||
}),
|
|
||||||
expect.objectContaining({
|
|
||||||
id: expect.any(String),
|
|
||||||
importPath: "/",
|
|
||||||
importEnv: expect.objectContaining({
|
|
||||||
name: expect.any(String),
|
|
||||||
slug: "staging",
|
|
||||||
id: expect.any(String)
|
id: expect.any(String)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
await deleteSecretImport({
|
await deleteSecretImport(createdImport1.id);
|
||||||
id: createdImport1.id,
|
await deleteSecretImport(createdImport2.id);
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: createdImport2.id,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Update secret import position", async () => {
|
test("Update secret import position", async () => {
|
||||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||||
|
|
||||||
const createdImport1 = await createSecretImport({
|
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||||
authToken: jwtAuthToken,
|
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||||
secretPath: "/",
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
importPath: prodImportDetails.path,
|
|
||||||
importEnv: prodImportDetails.envSlug
|
|
||||||
});
|
|
||||||
const createdImport2 = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
importPath: stagingImportDetails.path,
|
|
||||||
importEnv: stagingImportDetails.envSlug
|
|
||||||
});
|
|
||||||
|
|
||||||
const updateImportRes = await testServer.inject({
|
const updateImportRes = await testServer.inject({
|
||||||
method: "PATCH",
|
method: "PATCH",
|
||||||
@ -184,55 +161,22 @@ describe("Secret Import Router", async () => {
|
|||||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||||
|
|
||||||
await deleteSecretImport({
|
await deleteSecretImport(createdImport1.id);
|
||||||
id: createdImport1.id,
|
await deleteSecretImport(createdImport2.id);
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: createdImport2.id,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Delete secret import position", async () => {
|
test("Delete secret import position", async () => {
|
||||||
const createdImport1 = await createSecretImport({
|
const createdImport1 = await createSecretImport("/", "prod");
|
||||||
authToken: jwtAuthToken,
|
const createdImport2 = await createSecretImport("/", "staging");
|
||||||
secretPath: "/",
|
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
importPath: "/",
|
|
||||||
importEnv: "prod"
|
|
||||||
});
|
|
||||||
const createdImport2 = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
importPath: "/",
|
|
||||||
importEnv: "staging"
|
|
||||||
});
|
|
||||||
const deletedImport = await deleteSecretImport({
|
|
||||||
id: createdImport1.id,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
// check for default environments
|
// check for default environments
|
||||||
expect(deletedImport).toEqual(
|
expect(deletedImport).toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
importPath: "/",
|
importPath: expect.any(String),
|
||||||
importEnv: expect.objectContaining({
|
importEnv: expect.objectContaining({
|
||||||
name: expect.any(String),
|
name: expect.any(String),
|
||||||
slug: "prod",
|
slug: expect.any(String),
|
||||||
id: expect.any(String)
|
id: expect.any(String)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -257,552 +201,6 @@ describe("Secret Import Router", async () => {
|
|||||||
expect(secretImportList.secretImports.length).toEqual(1);
|
expect(secretImportList.secretImports.length).toEqual(1);
|
||||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||||
|
|
||||||
await deleteSecretImport({
|
await deleteSecretImport(createdImport2.id);
|
||||||
id: createdImport2.id,
|
|
||||||
workspaceId: seedData1.project.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// dev <- stage <- prod
|
|
||||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
|
||||||
"Secret import waterfall pattern testing - %path",
|
|
||||||
({ path: testSuitePath }) => {
|
|
||||||
beforeAll(async () => {
|
|
||||||
let prodFolder: { id: string };
|
|
||||||
let stagingFolder: { id: string };
|
|
||||||
let devFolder: { id: string };
|
|
||||||
|
|
||||||
if (testSuitePath !== "/") {
|
|
||||||
prodFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
stagingFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
devFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const devImportFromStage = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "staging"
|
|
||||||
});
|
|
||||||
|
|
||||||
const stageImportFromProd = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "prod"
|
|
||||||
});
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: stageImportFromProd.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: devImportFromStage.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
if (prodFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: prodFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "prod"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stagingFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: stagingFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (devFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: devFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Check one level imported secret exist", async () => {
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY",
|
|
||||||
value: "stage-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
|
||||||
expect(secret.secretValue).toBe("stage-value");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "STAGING_KEY",
|
|
||||||
secretValue: "stage-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Check two level imported secret exist", async () => {
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY",
|
|
||||||
value: "prod-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(secret.secretKey).toBe("PROD_KEY");
|
|
||||||
expect(secret.secretValue).toBe("prod-value");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "PROD_KEY",
|
|
||||||
secretValue: "prod-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// dev <- stage, dev <- prod
|
|
||||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
|
||||||
"Secret import multiple destination to one source pattern testing - %path",
|
|
||||||
({ path: testSuitePath }) => {
|
|
||||||
beforeAll(async () => {
|
|
||||||
let prodFolder: { id: string };
|
|
||||||
let stagingFolder: { id: string };
|
|
||||||
let devFolder: { id: string };
|
|
||||||
|
|
||||||
if (testSuitePath !== "/") {
|
|
||||||
prodFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
stagingFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
devFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const devImportFromStage = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "staging"
|
|
||||||
});
|
|
||||||
|
|
||||||
const devImportFromProd = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "prod"
|
|
||||||
});
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: devImportFromProd.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: devImportFromStage.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
if (prodFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: prodFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "prod"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stagingFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: stagingFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (devFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: devFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Check imported secret exist", async () => {
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY",
|
|
||||||
value: "stage-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY",
|
|
||||||
value: "prod-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
|
||||||
expect(secret.secretValue).toBe("stage-value");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "STAGING_KEY",
|
|
||||||
secretValue: "stage-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
}),
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "PROD_KEY",
|
|
||||||
secretValue: "prod-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// dev -> stage, prod
|
|
||||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
|
||||||
"Secret import one source to multiple destination pattern testing - %path",
|
|
||||||
({ path: testSuitePath }) => {
|
|
||||||
beforeAll(async () => {
|
|
||||||
let prodFolder: { id: string };
|
|
||||||
let stagingFolder: { id: string };
|
|
||||||
let devFolder: { id: string };
|
|
||||||
|
|
||||||
if (testSuitePath !== "/") {
|
|
||||||
prodFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
stagingFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
devFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const stageImportFromDev = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: seedData1.environment.slug
|
|
||||||
});
|
|
||||||
|
|
||||||
const prodImportFromDev = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: seedData1.environment.slug
|
|
||||||
});
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: prodImportFromDev.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: stageImportFromDev.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
if (prodFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: prodFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "prod"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stagingFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: stagingFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (devFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: devFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Check imported secret exist", async () => {
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY",
|
|
||||||
value: "stage-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY",
|
|
||||||
value: "prod-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
const stagingSecret = await getSecretByNameV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
|
||||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
|
||||||
|
|
||||||
const prodSecret = await getSecretByNameV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
|
||||||
expect(prodSecret.secretValue).toBe("prod-value");
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
@ -1,406 +0,0 @@
|
|||||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
|
||||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
|
||||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
|
||||||
|
|
||||||
import { seedData1 } from "@app/db/seed-data";
|
|
||||||
|
|
||||||
// dev <- stage <- prod
|
|
||||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
|
||||||
"Secret replication waterfall pattern testing - %secretPath",
|
|
||||||
({ secretPath: testSuitePath }) => {
|
|
||||||
beforeAll(async () => {
|
|
||||||
let prodFolder: { id: string };
|
|
||||||
let stagingFolder: { id: string };
|
|
||||||
let devFolder: { id: string };
|
|
||||||
|
|
||||||
if (testSuitePath !== "/") {
|
|
||||||
prodFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
stagingFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
devFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const devImportFromStage = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "staging",
|
|
||||||
isReplication: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const stageImportFromProd = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "prod",
|
|
||||||
isReplication: true
|
|
||||||
});
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: stageImportFromProd.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: devImportFromStage.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
if (prodFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: prodFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "prod"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stagingFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: stagingFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (devFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: devFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Check one level imported secret exist", async () => {
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY",
|
|
||||||
value: "stage-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
// wait for 10 second for replication to finish
|
|
||||||
await new Promise((resolve) => {
|
|
||||||
setTimeout(resolve, 10000); // time to breathe for db
|
|
||||||
});
|
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
|
||||||
expect(secret.secretValue).toBe("stage-value");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "STAGING_KEY",
|
|
||||||
secretValue: "stage-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Check two level imported secret exist", async () => {
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY",
|
|
||||||
value: "prod-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
// wait for 10 second for replication to finish
|
|
||||||
await new Promise((resolve) => {
|
|
||||||
setTimeout(resolve, 10000); // time to breathe for db
|
|
||||||
});
|
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(secret.secretKey).toBe("PROD_KEY");
|
|
||||||
expect(secret.secretValue).toBe("prod-value");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "PROD_KEY",
|
|
||||||
secretValue: "prod-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
},
|
|
||||||
{ timeout: 30000 }
|
|
||||||
);
|
|
||||||
|
|
||||||
// dev <- stage, dev <- prod
|
|
||||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
|
||||||
"Secret replication 1-N pattern testing - %path",
|
|
||||||
({ path: testSuitePath }) => {
|
|
||||||
beforeAll(async () => {
|
|
||||||
let prodFolder: { id: string };
|
|
||||||
let stagingFolder: { id: string };
|
|
||||||
let devFolder: { id: string };
|
|
||||||
|
|
||||||
if (testSuitePath !== "/") {
|
|
||||||
prodFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
stagingFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
devFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const devImportFromStage = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "staging",
|
|
||||||
isReplication: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const devImportFromProd = await createSecretImport({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
importPath: testSuitePath,
|
|
||||||
importEnv: "prod",
|
|
||||||
isReplication: true
|
|
||||||
});
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: devImportFromProd.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
await deleteSecretImport({
|
|
||||||
id: devImportFromStage.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
|
|
||||||
if (prodFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: prodFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "prod"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stagingFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: stagingFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: "staging"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (devFolder) {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: devFolder.id,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environmentSlug: seedData1.environment.slug
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Check imported secret exist", async () => {
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY",
|
|
||||||
value: "stage-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
await createSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY",
|
|
||||||
value: "prod-value"
|
|
||||||
});
|
|
||||||
|
|
||||||
// wait for 10 second for replication to finish
|
|
||||||
await new Promise((resolve) => {
|
|
||||||
setTimeout(resolve, 10000); // time to breathe for db
|
|
||||||
});
|
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
|
||||||
expect(secret.secretValue).toBe("stage-value");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "STAGING_KEY",
|
|
||||||
secretValue: "stage-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
}),
|
|
||||||
expect.objectContaining({
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "PROD_KEY",
|
|
||||||
secretValue: "prod-value"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "staging",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "STAGING_KEY"
|
|
||||||
});
|
|
||||||
await deleteSecretV2({
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
secretPath: testSuitePath,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "PROD_KEY"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
},
|
|
||||||
{ timeout: 30000 }
|
|
||||||
);
|
|
@ -510,7 +510,7 @@ describe("Service token fail cases", async () => {
|
|||||||
authorization: `Bearer ${serviceToken}`
|
authorization: `Bearer ${serviceToken}`
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
expect(fetchSecrets.statusCode).toBe(403);
|
expect(fetchSecrets.statusCode).toBe(401);
|
||||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||||
await deleteServiceToken();
|
await deleteServiceToken();
|
||||||
});
|
});
|
||||||
@ -532,7 +532,7 @@ describe("Service token fail cases", async () => {
|
|||||||
authorization: `Bearer ${serviceToken}`
|
authorization: `Bearer ${serviceToken}`
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
expect(fetchSecrets.statusCode).toBe(403);
|
expect(fetchSecrets.statusCode).toBe(401);
|
||||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||||
await deleteServiceToken();
|
await deleteServiceToken();
|
||||||
});
|
});
|
||||||
@ -557,7 +557,7 @@ describe("Service token fail cases", async () => {
|
|||||||
authorization: `Bearer ${serviceToken}`
|
authorization: `Bearer ${serviceToken}`
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
expect(writeSecrets.statusCode).toBe(403);
|
expect(writeSecrets.statusCode).toBe(401);
|
||||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||||
|
|
||||||
// but read access should still work fine
|
// but read access should still work fine
|
||||||
|
@ -1,86 +0,0 @@
|
|||||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
|
||||||
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
|
||||||
|
|
||||||
import { seedData1 } from "@app/db/seed-data";
|
|
||||||
|
|
||||||
describe("Secret Recursive Testing", async () => {
|
|
||||||
const projectId = seedData1.projectV3.id;
|
|
||||||
const folderAndSecretNames = [
|
|
||||||
{ name: "deep1", path: "/", expectedSecretCount: 4 },
|
|
||||||
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
|
|
||||||
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
|
|
||||||
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
|
|
||||||
];
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
|
||||||
const rootFolderIds: string[] = [];
|
|
||||||
for (const folder of folderAndSecretNames) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
const createdFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: folder.path,
|
|
||||||
name: folder.name
|
|
||||||
});
|
|
||||||
|
|
||||||
if (folder.path === "/") {
|
|
||||||
rootFolderIds.push(createdFolder.id);
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await createSecretV2({
|
|
||||||
secretPath: folder.path,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
key: folder.name,
|
|
||||||
value: folder.name
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
await Promise.all(
|
|
||||||
rootFolderIds.map((id) =>
|
|
||||||
deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id,
|
|
||||||
workspaceId: projectId,
|
|
||||||
environmentSlug: "prod"
|
|
||||||
})
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
await deleteSecretV2({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
workspaceId: projectId,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
key: folderAndSecretNames[0].name
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
|
|
||||||
const secrets = await getSecretsV2({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: path,
|
|
||||||
workspaceId: projectId,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
recursive: true
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(secrets.secrets.length).toEqual(expectedSecretCount);
|
|
||||||
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
|
|
||||||
folderAndSecretNames
|
|
||||||
.filter((el) => el.path.startsWith(path))
|
|
||||||
.sort((a, b) => a.name.localeCompare(b.name))
|
|
||||||
.map((el) =>
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: el.name,
|
|
||||||
secretValue: el.name
|
|
||||||
})
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,344 +0,0 @@
|
|||||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
|
||||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
|
||||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
|
||||||
|
|
||||||
import { seedData1 } from "@app/db/seed-data";
|
|
||||||
|
|
||||||
describe("Secret expansion", () => {
|
|
||||||
const projectId = seedData1.projectV3.id;
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
|
||||||
const prodRootFolder = await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
name: "deep"
|
|
||||||
});
|
|
||||||
|
|
||||||
await createFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep",
|
|
||||||
name: "nested"
|
|
||||||
});
|
|
||||||
|
|
||||||
return async () => {
|
|
||||||
await deleteFolder({
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
secretPath: "/",
|
|
||||||
id: prodRootFolder.id,
|
|
||||||
workspaceId: projectId,
|
|
||||||
environmentSlug: "prod"
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Local secret reference", async () => {
|
|
||||||
const secrets = [
|
|
||||||
{
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "HELLO",
|
|
||||||
value: "world"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "TEST",
|
|
||||||
// eslint-disable-next-line
|
|
||||||
value: "hello ${HELLO}"
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const secret of secrets) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await createSecretV2(secret);
|
|
||||||
}
|
|
||||||
|
|
||||||
const expandedSecret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "TEST"
|
|
||||||
});
|
|
||||||
expect(expandedSecret.secretValue).toBe("hello world");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.secrets).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "TEST",
|
|
||||||
secretValue: "hello world"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Cross environment secret reference", async () => {
|
|
||||||
const secrets = [
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "DEEP_KEY_1",
|
|
||||||
value: "testing"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep/nested",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "NESTED_KEY_1",
|
|
||||||
value: "reference"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep/nested",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "NESTED_KEY_2",
|
|
||||||
// eslint-disable-next-line
|
|
||||||
value: "secret ${NESTED_KEY_1}"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "KEY",
|
|
||||||
// eslint-disable-next-line
|
|
||||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const secret of secrets) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await createSecretV2(secret);
|
|
||||||
}
|
|
||||||
|
|
||||||
const expandedSecret = await getSecretByNameV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "KEY"
|
|
||||||
});
|
|
||||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.secrets).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "KEY",
|
|
||||||
secretValue: "hello testing secret reference"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
|
||||||
const secrets = [
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "DEEP_KEY_1",
|
|
||||||
value: "testing"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep/nested",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "NESTED_KEY_1",
|
|
||||||
value: "reference"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep/nested",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "NESTED_KEY_2",
|
|
||||||
// eslint-disable-next-line
|
|
||||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "KEY",
|
|
||||||
// eslint-disable-next-line
|
|
||||||
value: "hello world"
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const secret of secrets) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await createSecretV2(secret);
|
|
||||||
}
|
|
||||||
|
|
||||||
const secretImportFromProdToDev = await createSecretImport({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
importEnv: "prod",
|
|
||||||
importPath: "/deep/nested"
|
|
||||||
});
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretPath: "/deep/nested",
|
|
||||||
environment: "prod",
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "NESTED_KEY_1",
|
|
||||||
secretValue: "reference"
|
|
||||||
}),
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "NESTED_KEY_2",
|
|
||||||
secretValue: "secret reference testing"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
|
||||||
await deleteSecretImport({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
id: secretImportFromProdToDev.id,
|
|
||||||
secretPath: "/"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test(
|
|
||||||
"Replicated secret import secret expansion on local reference and nested reference",
|
|
||||||
async () => {
|
|
||||||
const secrets = [
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "DEEP_KEY_1",
|
|
||||||
value: "testing"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep/nested",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "NESTED_KEY_1",
|
|
||||||
value: "reference"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: "prod",
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/deep/nested",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "NESTED_KEY_2",
|
|
||||||
// eslint-disable-next-line
|
|
||||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
key: "KEY",
|
|
||||||
// eslint-disable-next-line
|
|
||||||
value: "hello world"
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const secret of secrets) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await createSecretV2(secret);
|
|
||||||
}
|
|
||||||
|
|
||||||
const secretImportFromProdToDev = await createSecretImport({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
importEnv: "prod",
|
|
||||||
importPath: "/deep/nested",
|
|
||||||
isReplication: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// wait for 5 second for replication to finish
|
|
||||||
await new Promise((resolve) => {
|
|
||||||
setTimeout(resolve, 5000); // time to breathe for db
|
|
||||||
});
|
|
||||||
|
|
||||||
const listSecrets = await getSecretsV2({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
secretPath: "/",
|
|
||||||
authToken: jwtAuthToken
|
|
||||||
});
|
|
||||||
expect(listSecrets.imports).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
|
||||||
environment: seedData1.environment.slug,
|
|
||||||
secrets: expect.arrayContaining([
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "NESTED_KEY_1",
|
|
||||||
secretValue: "reference"
|
|
||||||
}),
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: "NESTED_KEY_2",
|
|
||||||
secretValue: "secret reference testing"
|
|
||||||
})
|
|
||||||
])
|
|
||||||
})
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
|
||||||
await deleteSecretImport({
|
|
||||||
environmentSlug: seedData1.environment.slug,
|
|
||||||
workspaceId: projectId,
|
|
||||||
authToken: jwtAuthToken,
|
|
||||||
id: secretImportFromProdToDev.id,
|
|
||||||
secretPath: "/"
|
|
||||||
});
|
|
||||||
},
|
|
||||||
{ timeout: 10000 }
|
|
||||||
);
|
|
||||||
});
|
|
@ -8,7 +8,6 @@ type TRawSecret = {
|
|||||||
secretComment?: string;
|
secretComment?: string;
|
||||||
version: number;
|
version: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||||
const createSecretReqBody = {
|
const createSecretReqBody = {
|
||||||
workspaceId: seedData1.projectV3.id,
|
workspaceId: seedData1.projectV3.id,
|
||||||
@ -535,107 +534,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
|
||||||
const updateSharedSecRes = await testServer.inject({
|
|
||||||
method: "PATCH",
|
|
||||||
url: `/api/v3/secrets/batch/raw`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environment: seedData1.environment.slug,
|
|
||||||
secretPath: path,
|
|
||||||
mode: "upsert",
|
|
||||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
|
||||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
secretComment: secret.comment
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
|
||||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
|
||||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
|
||||||
|
|
||||||
// bulk ones should exist
|
|
||||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
|
||||||
expect(secrets).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
Array.from(Array(5)).map((_e, i) =>
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
type: SecretType.Shared
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
await Promise.all(
|
|
||||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
|
||||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
|
||||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
secretComment: "comment",
|
|
||||||
secretPath: secretTestCases[0].path
|
|
||||||
}));
|
|
||||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
|
||||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
|
||||||
secretValue: "update-value",
|
|
||||||
secretComment: "comment",
|
|
||||||
secretPath: secretTestCases[1].path
|
|
||||||
}));
|
|
||||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
|
||||||
|
|
||||||
const updateSharedSecRes = await testServer.inject({
|
|
||||||
method: "PATCH",
|
|
||||||
url: `/api/v3/secrets/batch/raw`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: seedData1.projectV3.id,
|
|
||||||
environment: seedData1.environment.slug,
|
|
||||||
mode: "upsert",
|
|
||||||
secrets: testSecrets
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
|
||||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
|
||||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
|
||||||
|
|
||||||
// bulk ones should exist
|
|
||||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
|
||||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
firstBatchSecrets.map((el) =>
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: el.secretKey,
|
|
||||||
secretValue: "update-value",
|
|
||||||
type: SecretType.Shared
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
|
||||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
secondBatchSecrets.map((el) =>
|
|
||||||
expect.objectContaining({
|
|
||||||
secretKey: el.secretKey,
|
|
||||||
secretValue: "update-value",
|
|
||||||
type: SecretType.Shared
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
|
||||||
});
|
|
||||||
|
|
||||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||||
|
@ -1075,7 +1075,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
|||||||
},
|
},
|
||||||
body: createSecretReqBody
|
body: createSecretReqBody
|
||||||
});
|
});
|
||||||
expect(createSecRes.statusCode).toBe(404);
|
expect(createSecRes.statusCode).toBe(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Update secret raw", async () => {
|
test("Update secret raw", async () => {
|
||||||
@ -1093,7 +1093,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
|||||||
},
|
},
|
||||||
body: updateSecretReqBody
|
body: updateSecretReqBody
|
||||||
});
|
});
|
||||||
expect(updateSecRes.statusCode).toBe(404);
|
expect(updateSecRes.statusCode).toBe(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Delete secret raw", async () => {
|
test("Delete secret raw", async () => {
|
||||||
@ -1110,6 +1110,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
|||||||
},
|
},
|
||||||
body: deletedSecretReqBody
|
body: deletedSecretReqBody
|
||||||
});
|
});
|
||||||
expect(deletedSecRes.statusCode).toBe(404);
|
expect(deletedSecRes.statusCode).toBe(400);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,73 +0,0 @@
|
|||||||
type TFolder = {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const createFolder = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
name: string;
|
|
||||||
authToken: string;
|
|
||||||
}) => {
|
|
||||||
const res = await testServer.inject({
|
|
||||||
method: "POST",
|
|
||||||
url: `/api/v1/folders`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
name: dto.name,
|
|
||||||
path: dto.secretPath
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(res.statusCode).toBe(200);
|
|
||||||
return res.json().folder as TFolder;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteFolder = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
id: string;
|
|
||||||
authToken: string;
|
|
||||||
}) => {
|
|
||||||
const res = await testServer.inject({
|
|
||||||
method: "DELETE",
|
|
||||||
url: `/api/v1/folders/${dto.id}`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
path: dto.secretPath
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(res.statusCode).toBe(200);
|
|
||||||
return res.json().folder as TFolder;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const listFolders = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
authToken: string;
|
|
||||||
}) => {
|
|
||||||
const res = await testServer.inject({
|
|
||||||
method: "GET",
|
|
||||||
url: `/api/v1/folders`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
path: dto.secretPath
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(res.statusCode).toBe(200);
|
|
||||||
return res.json().folders as TFolder[];
|
|
||||||
};
|
|
@ -1,93 +0,0 @@
|
|||||||
type TSecretImport = {
|
|
||||||
id: string;
|
|
||||||
importEnv: {
|
|
||||||
name: string;
|
|
||||||
slug: string;
|
|
||||||
id: string;
|
|
||||||
};
|
|
||||||
importPath: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const createSecretImport = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
isReplication?: boolean;
|
|
||||||
secretPath: string;
|
|
||||||
importPath: string;
|
|
||||||
importEnv: string;
|
|
||||||
authToken: string;
|
|
||||||
}) => {
|
|
||||||
const res = await testServer.inject({
|
|
||||||
method: "POST",
|
|
||||||
url: `/api/v1/secret-imports`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
isReplication: dto.isReplication,
|
|
||||||
path: dto.secretPath,
|
|
||||||
import: {
|
|
||||||
environment: dto.importEnv,
|
|
||||||
path: dto.importPath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(res.statusCode).toBe(200);
|
|
||||||
const payload = JSON.parse(res.payload);
|
|
||||||
expect(payload).toHaveProperty("secretImport");
|
|
||||||
return payload.secretImport as TSecretImport;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteSecretImport = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
authToken: string;
|
|
||||||
id: string;
|
|
||||||
}) => {
|
|
||||||
const res = await testServer.inject({
|
|
||||||
method: "DELETE",
|
|
||||||
url: `/api/v1/secret-imports/${dto.id}`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
path: dto.secretPath
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(res.statusCode).toBe(200);
|
|
||||||
const payload = JSON.parse(res.payload);
|
|
||||||
expect(payload).toHaveProperty("secretImport");
|
|
||||||
return payload.secretImport as TSecretImport;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const listSecretImport = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
authToken: string;
|
|
||||||
}) => {
|
|
||||||
const res = await testServer.inject({
|
|
||||||
method: "GET",
|
|
||||||
url: `/api/v1/secret-imports`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
path: dto.secretPath
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(res.statusCode).toBe(200);
|
|
||||||
const payload = JSON.parse(res.payload);
|
|
||||||
expect(payload).toHaveProperty("secretImports");
|
|
||||||
return payload.secretImports as TSecretImport[];
|
|
||||||
};
|
|
@ -1,130 +0,0 @@
|
|||||||
import { SecretType } from "@app/db/schemas";
|
|
||||||
|
|
||||||
type TRawSecret = {
|
|
||||||
secretKey: string;
|
|
||||||
secretValue: string;
|
|
||||||
secretComment?: string;
|
|
||||||
version: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const createSecretV2 = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
key: string;
|
|
||||||
value: string;
|
|
||||||
comment?: string;
|
|
||||||
authToken: string;
|
|
||||||
type?: SecretType;
|
|
||||||
}) => {
|
|
||||||
const createSecretReqBody = {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
type: dto.type || SecretType.Shared,
|
|
||||||
secretPath: dto.secretPath,
|
|
||||||
secretKey: dto.key,
|
|
||||||
secretValue: dto.value,
|
|
||||||
secretComment: dto.comment
|
|
||||||
};
|
|
||||||
const createSecRes = await testServer.inject({
|
|
||||||
method: "POST",
|
|
||||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
body: createSecretReqBody
|
|
||||||
});
|
|
||||||
expect(createSecRes.statusCode).toBe(200);
|
|
||||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
|
||||||
expect(createdSecretPayload).toHaveProperty("secret");
|
|
||||||
return createdSecretPayload.secret as TRawSecret;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteSecretV2 = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
key: string;
|
|
||||||
authToken: string;
|
|
||||||
}) => {
|
|
||||||
const deleteSecRes = await testServer.inject({
|
|
||||||
method: "DELETE",
|
|
||||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
secretPath: dto.secretPath
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(deleteSecRes.statusCode).toBe(200);
|
|
||||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
|
||||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
|
||||||
return updatedSecretPayload.secret as TRawSecret;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getSecretByNameV2 = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
key: string;
|
|
||||||
authToken: string;
|
|
||||||
}) => {
|
|
||||||
const response = await testServer.inject({
|
|
||||||
method: "GET",
|
|
||||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
secretPath: dto.secretPath,
|
|
||||||
expandSecretReferences: "true",
|
|
||||||
include_imports: "true"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(response.statusCode).toBe(200);
|
|
||||||
const payload = JSON.parse(response.payload);
|
|
||||||
expect(payload).toHaveProperty("secret");
|
|
||||||
return payload.secret as TRawSecret;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getSecretsV2 = async (dto: {
|
|
||||||
workspaceId: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
secretPath: string;
|
|
||||||
authToken: string;
|
|
||||||
recursive?: boolean;
|
|
||||||
}) => {
|
|
||||||
const getSecretsResponse = await testServer.inject({
|
|
||||||
method: "GET",
|
|
||||||
url: `/api/v3/secrets/raw`,
|
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${dto.authToken}`
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
workspaceId: dto.workspaceId,
|
|
||||||
environment: dto.environmentSlug,
|
|
||||||
secretPath: dto.secretPath,
|
|
||||||
expandSecretReferences: "true",
|
|
||||||
include_imports: "true",
|
|
||||||
recursive: String(dto.recursive || false)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
expect(getSecretsResponse.statusCode).toBe(200);
|
|
||||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
|
||||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
|
||||||
expect(getSecretsPayload).toHaveProperty("imports");
|
|
||||||
return getSecretsPayload as {
|
|
||||||
secrets: TRawSecret[];
|
|
||||||
imports: {
|
|
||||||
secretPath: string;
|
|
||||||
environment: string;
|
|
||||||
folderId: string;
|
|
||||||
secrets: TRawSecret[];
|
|
||||||
}[];
|
|
||||||
};
|
|
||||||
};
|
|
@ -11,67 +11,37 @@ import { initLogger } from "@app/lib/logger";
|
|||||||
import { main } from "@app/server/app";
|
import { main } from "@app/server/app";
|
||||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
import { mockQueue } from "./mocks/queue";
|
||||||
import { mockSmtpServer } from "./mocks/smtp";
|
import { mockSmtpServer } from "./mocks/smtp";
|
||||||
|
import { mockKeyStore } from "./mocks/keystore";
|
||||||
import { initDbConnection } from "@app/db";
|
import { initDbConnection } from "@app/db";
|
||||||
import { queueServiceFactory } from "@app/queue";
|
|
||||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
|
||||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
|
||||||
import { buildRedisFromConfig } from "@app/lib/config/redis";
|
|
||||||
|
|
||||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||||
export default {
|
export default {
|
||||||
name: "knex-env",
|
name: "knex-env",
|
||||||
transformMode: "ssr",
|
transformMode: "ssr",
|
||||||
async setup() {
|
async setup() {
|
||||||
const logger = initLogger();
|
const logger = await initLogger();
|
||||||
const envConfig = initEnvConfig(logger);
|
const cfg = initEnvConfig(logger);
|
||||||
const db = initDbConnection({
|
const db = initDbConnection({
|
||||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||||
dbRootCert: envConfig.DB_ROOT_CERT
|
dbRootCert: cfg.DB_ROOT_CERT
|
||||||
});
|
});
|
||||||
|
|
||||||
const redis = buildRedisFromConfig(envConfig);
|
|
||||||
await redis.flushdb("SYNC");
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await db.migrate.rollback(
|
|
||||||
{
|
|
||||||
directory: path.join(__dirname, "../src/db/migrations"),
|
|
||||||
extension: "ts",
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
},
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
await db.migrate.latest({
|
await db.migrate.latest({
|
||||||
directory: path.join(__dirname, "../src/db/migrations"),
|
directory: path.join(__dirname, "../src/db/migrations"),
|
||||||
extension: "ts",
|
extension: "ts",
|
||||||
tableName: "infisical_migrations"
|
tableName: "infisical_migrations"
|
||||||
});
|
});
|
||||||
|
|
||||||
await db.seed.run({
|
await db.seed.run({
|
||||||
directory: path.join(__dirname, "../src/db/seeds"),
|
directory: path.join(__dirname, "../src/db/seeds"),
|
||||||
extension: "ts"
|
extension: "ts"
|
||||||
});
|
});
|
||||||
|
|
||||||
const smtp = mockSmtpServer();
|
const smtp = mockSmtpServer();
|
||||||
const queue = queueServiceFactory(envConfig, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
const queue = mockQueue();
|
||||||
const keyStore = keyStoreFactory(envConfig);
|
const keyStore = mockKeyStore();
|
||||||
|
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||||
const hsmModule = initializeHsmModule(envConfig);
|
|
||||||
hsmModule.initialize();
|
|
||||||
|
|
||||||
const server = await main({
|
|
||||||
db,
|
|
||||||
smtp,
|
|
||||||
logger,
|
|
||||||
queue,
|
|
||||||
keyStore,
|
|
||||||
hsmModule: hsmModule.getModule(),
|
|
||||||
redis,
|
|
||||||
envConfig
|
|
||||||
});
|
|
||||||
|
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
globalThis.testServer = server;
|
globalThis.testServer = server;
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
@ -84,16 +54,14 @@ export default {
|
|||||||
organizationId: seedData1.organization.id,
|
organizationId: seedData1.organization.id,
|
||||||
accessVersion: 1
|
accessVersion: 1
|
||||||
},
|
},
|
||||||
envConfig.AUTH_SECRET,
|
cfg.AUTH_SECRET,
|
||||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// eslint-disable-next-line
|
|
||||||
console.log("[TEST] Error setting up environment", error);
|
console.log("[TEST] Error setting up environment", error);
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
// custom setup
|
// custom setup
|
||||||
return {
|
return {
|
||||||
async teardown() {
|
async teardown() {
|
||||||
@ -112,9 +80,6 @@ export default {
|
|||||||
},
|
},
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
|
|
||||||
await redis.flushdb("ASYNC");
|
|
||||||
redis.disconnect();
|
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
nodejs_conf = nodejs_init
|
|
||||||
|
|
||||||
.include /usr/local/ssl/fipsmodule.cnf
|
|
||||||
|
|
||||||
[nodejs_init]
|
|
||||||
providers = provider_sect
|
|
||||||
|
|
||||||
[provider_sect]
|
|
||||||
default = default_sect
|
|
||||||
fips = fips_sect
|
|
||||||
|
|
||||||
[default_sect]
|
|
||||||
activate = 1
|
|
||||||
|
|
||||||
[algorithm_sect]
|
|
||||||
default_properties = fips=yes
|
|
15360
backend/package-lock.json
generated
15360
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -38,42 +38,22 @@
|
|||||||
"build:frontend": "npm run build --prefix ../frontend",
|
"build:frontend": "npm run build --prefix ../frontend",
|
||||||
"start": "node --enable-source-maps dist/main.mjs",
|
"start": "node --enable-source-maps dist/main.mjs",
|
||||||
"type:check": "tsc --noEmit",
|
"type:check": "tsc --noEmit",
|
||||||
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
|
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||||
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
|
"lint": "eslint 'src/**/*.ts'",
|
||||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
|
||||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
||||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
|
||||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
|
||||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
|
||||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
|
||||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
|
||||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
|
||||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
|
||||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
|
||||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
|
||||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
|
||||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
|
||||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
|
||||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
|
||||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
|
||||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
|
||||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
|
||||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
|
||||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest",
|
|
||||||
"email:dev": "email dev --dir src/services/smtp/emails"
|
|
||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"author": "",
|
"author": "",
|
||||||
@ -90,17 +70,15 @@
|
|||||||
"@types/jsrp": "^0.2.6",
|
"@types/jsrp": "^0.2.6",
|
||||||
"@types/libsodium-wrappers": "^0.7.13",
|
"@types/libsodium-wrappers": "^0.7.13",
|
||||||
"@types/lodash.isequal": "^4.5.8",
|
"@types/lodash.isequal": "^4.5.8",
|
||||||
"@types/node": "^20.17.30",
|
"@types/node": "^20.9.5",
|
||||||
"@types/nodemailer": "^6.4.14",
|
"@types/nodemailer": "^6.4.14",
|
||||||
|
"@types/passport-github": "^1.1.12",
|
||||||
"@types/passport-google-oauth20": "^2.0.14",
|
"@types/passport-google-oauth20": "^2.0.14",
|
||||||
"@types/pg": "^8.10.9",
|
"@types/pg": "^8.10.9",
|
||||||
"@types/picomatch": "^2.3.3",
|
"@types/picomatch": "^2.3.3",
|
||||||
"@types/pkcs11js": "^1.0.4",
|
|
||||||
"@types/prompt-sync": "^4.2.3",
|
"@types/prompt-sync": "^4.2.3",
|
||||||
"@types/react": "^19.1.2",
|
|
||||||
"@types/resolve": "^1.20.6",
|
"@types/resolve": "^1.20.6",
|
||||||
"@types/safe-regex": "^1.1.6",
|
"@types/safe-regex": "^1.1.6",
|
||||||
"@types/sjcl": "^1.0.34",
|
|
||||||
"@types/uuid": "^9.0.7",
|
"@types/uuid": "^9.0.7",
|
||||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||||
"@typescript-eslint/parser": "^6.20.0",
|
"@typescript-eslint/parser": "^6.20.0",
|
||||||
@ -117,7 +95,6 @@
|
|||||||
"nodemon": "^3.0.2",
|
"nodemon": "^3.0.2",
|
||||||
"pino-pretty": "^10.2.3",
|
"pino-pretty": "^10.2.3",
|
||||||
"prompt-sync": "^4.2.0",
|
"prompt-sync": "^4.2.0",
|
||||||
"react-email": "4.0.7",
|
|
||||||
"rimraf": "^5.0.5",
|
"rimraf": "^5.0.5",
|
||||||
"ts-node": "^10.9.2",
|
"ts-node": "^10.9.2",
|
||||||
"tsc-alias": "^1.8.8",
|
"tsc-alias": "^1.8.8",
|
||||||
@ -125,126 +102,86 @@
|
|||||||
"tsup": "^8.0.1",
|
"tsup": "^8.0.1",
|
||||||
"tsx": "^4.4.0",
|
"tsx": "^4.4.0",
|
||||||
"typescript": "^5.3.2",
|
"typescript": "^5.3.2",
|
||||||
|
"vite-tsconfig-paths": "^4.2.2",
|
||||||
"vitest": "^1.2.2"
|
"vitest": "^1.2.2"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||||
"@aws-sdk/client-iam": "^3.525.0",
|
"@aws-sdk/client-iam": "^3.525.0",
|
||||||
"@aws-sdk/client-kms": "^3.609.0",
|
"@aws-sdk/client-kms": "^3.609.0",
|
||||||
"@aws-sdk/client-route-53": "^3.810.0",
|
|
||||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||||
"@aws-sdk/client-sts": "^3.600.0",
|
"@aws-sdk/client-sts": "^3.600.0",
|
||||||
"@casl/ability": "^6.5.0",
|
"@casl/ability": "^6.5.0",
|
||||||
"@elastic/elasticsearch": "^8.15.0",
|
|
||||||
"@fastify/cookie": "^9.3.1",
|
"@fastify/cookie": "^9.3.1",
|
||||||
"@fastify/cors": "^8.5.0",
|
"@fastify/cors": "^8.5.0",
|
||||||
"@fastify/etag": "^5.1.0",
|
"@fastify/etag": "^5.1.0",
|
||||||
"@fastify/formbody": "^7.4.0",
|
"@fastify/formbody": "^7.4.0",
|
||||||
"@fastify/helmet": "^11.1.1",
|
"@fastify/helmet": "^11.1.1",
|
||||||
"@fastify/multipart": "8.3.1",
|
|
||||||
"@fastify/passport": "^2.4.0",
|
"@fastify/passport": "^2.4.0",
|
||||||
"@fastify/rate-limit": "^9.0.0",
|
"@fastify/rate-limit": "^9.0.0",
|
||||||
"@fastify/request-context": "^5.1.0",
|
|
||||||
"@fastify/session": "^10.7.0",
|
"@fastify/session": "^10.7.0",
|
||||||
"@fastify/static": "^7.0.4",
|
|
||||||
"@fastify/swagger": "^8.14.0",
|
"@fastify/swagger": "^8.14.0",
|
||||||
"@fastify/swagger-ui": "^2.1.0",
|
"@fastify/swagger-ui": "^2.1.0",
|
||||||
"@gitbeaker/rest": "^42.5.0",
|
"@node-saml/passport-saml": "^4.0.4",
|
||||||
"@google-cloud/kms": "^4.5.0",
|
|
||||||
"@infisical/quic": "^1.0.8",
|
|
||||||
"@node-saml/passport-saml": "^5.0.1",
|
|
||||||
"@octokit/auth-app": "^7.1.1",
|
|
||||||
"@octokit/core": "^5.2.1",
|
|
||||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
|
||||||
"@octokit/plugin-retry": "^5.0.5",
|
"@octokit/plugin-retry": "^5.0.5",
|
||||||
"@octokit/rest": "^20.0.2",
|
"@octokit/rest": "^20.0.2",
|
||||||
"@octokit/webhooks-types": "^7.3.1",
|
"@octokit/webhooks-types": "^7.3.1",
|
||||||
"@octopusdeploy/api-client": "^3.4.1",
|
|
||||||
"@opentelemetry/api": "^1.9.0",
|
|
||||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
|
||||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
|
||||||
"@opentelemetry/instrumentation": "^0.55.0",
|
|
||||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
|
||||||
"@opentelemetry/resources": "^1.28.0",
|
|
||||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
|
||||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
|
||||||
"@peculiar/asn1-schema": "^2.3.8",
|
"@peculiar/asn1-schema": "^2.3.8",
|
||||||
"@peculiar/x509": "^1.12.1",
|
"@peculiar/x509": "^1.12.1",
|
||||||
"@react-email/components": "0.0.36",
|
|
||||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||||
"@sindresorhus/slugify": "1.1.0",
|
"@sindresorhus/slugify": "1.1.0",
|
||||||
"@slack/oauth": "^3.0.2",
|
"@team-plain/typescript-sdk": "^4.6.1",
|
||||||
"@slack/web-api": "^7.8.0",
|
|
||||||
"@ucast/mongo2js": "^1.3.4",
|
"@ucast/mongo2js": "^1.3.4",
|
||||||
"acme-client": "^5.4.0",
|
|
||||||
"ajv": "^8.12.0",
|
"ajv": "^8.12.0",
|
||||||
"argon2": "^0.31.2",
|
"argon2": "^0.31.2",
|
||||||
"aws-sdk": "^2.1553.0",
|
"aws-sdk": "^2.1553.0",
|
||||||
"axios": "^1.6.7",
|
"axios": "^1.6.7",
|
||||||
"axios-retry": "^4.0.0",
|
"axios-retry": "^4.0.0",
|
||||||
"bcrypt": "^5.1.1",
|
"bcrypt": "^5.1.1",
|
||||||
"botbuilder": "^4.23.2",
|
|
||||||
"bullmq": "^5.4.2",
|
"bullmq": "^5.4.2",
|
||||||
"cassandra-driver": "^4.7.2",
|
"cassandra-driver": "^4.7.2",
|
||||||
"connect-redis": "^7.1.1",
|
"connect-redis": "^7.1.1",
|
||||||
"cron": "^3.1.7",
|
"cron": "^3.1.7",
|
||||||
"dd-trace": "^5.40.0",
|
|
||||||
"dotenv": "^16.4.1",
|
"dotenv": "^16.4.1",
|
||||||
"fastify": "^4.28.1",
|
"fastify": "^4.26.0",
|
||||||
"fastify-plugin": "^4.5.1",
|
"fastify-plugin": "^4.5.1",
|
||||||
"google-auth-library": "^9.9.0",
|
"google-auth-library": "^9.9.0",
|
||||||
"googleapis": "^137.1.0",
|
"googleapis": "^137.1.0",
|
||||||
"handlebars": "^4.7.8",
|
"handlebars": "^4.7.8",
|
||||||
"hdb": "^0.19.10",
|
|
||||||
"ioredis": "^5.3.2",
|
"ioredis": "^5.3.2",
|
||||||
"isomorphic-dompurify": "^2.22.0",
|
|
||||||
"jmespath": "^0.16.0",
|
"jmespath": "^0.16.0",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"jsrp": "^0.2.4",
|
"jsrp": "^0.2.4",
|
||||||
"jwks-rsa": "^3.1.0",
|
"jwks-rsa": "^3.1.0",
|
||||||
"knex": "^3.0.1",
|
"knex": "^3.0.1",
|
||||||
"ldapjs": "^3.0.7",
|
"ldapjs": "^3.0.7",
|
||||||
"ldif": "0.5.1",
|
|
||||||
"libsodium-wrappers": "^0.7.13",
|
"libsodium-wrappers": "^0.7.13",
|
||||||
"lodash.isequal": "^4.5.0",
|
"lodash.isequal": "^4.5.0",
|
||||||
"mongodb": "^6.8.1",
|
|
||||||
"ms": "^2.1.3",
|
"ms": "^2.1.3",
|
||||||
"mysql2": "^3.9.8",
|
"mysql2": "^3.9.8",
|
||||||
"nanoid": "^3.3.8",
|
"nanoid": "^3.3.4",
|
||||||
"nodemailer": "^6.9.9",
|
"nodemailer": "^6.9.9",
|
||||||
"oci-sdk": "^2.108.0",
|
|
||||||
"odbc": "^2.4.9",
|
|
||||||
"openid-client": "^5.6.5",
|
"openid-client": "^5.6.5",
|
||||||
"ora": "^7.0.1",
|
"ora": "^7.0.1",
|
||||||
"oracledb": "^6.4.0",
|
"oracledb": "^6.4.0",
|
||||||
"otplib": "^12.0.1",
|
"passport-github": "^1.1.0",
|
||||||
"passport-gitlab2": "^5.0.0",
|
"passport-gitlab2": "^5.0.0",
|
||||||
"passport-google-oauth20": "^2.0.0",
|
"passport-google-oauth20": "^2.0.0",
|
||||||
"passport-ldapauth": "^3.0.1",
|
"passport-ldapauth": "^3.0.1",
|
||||||
"passport-oauth2": "^1.8.0",
|
|
||||||
"pg": "^8.11.3",
|
"pg": "^8.11.3",
|
||||||
"pg-boss": "^10.1.5",
|
|
||||||
"pg-query-stream": "^4.5.3",
|
"pg-query-stream": "^4.5.3",
|
||||||
"picomatch": "^3.0.1",
|
"picomatch": "^3.0.1",
|
||||||
"pino": "^8.16.2",
|
"pino": "^8.16.2",
|
||||||
"pkcs11js": "^2.1.6",
|
|
||||||
"pkijs": "^3.2.4",
|
"pkijs": "^3.2.4",
|
||||||
"posthog-node": "^3.6.2",
|
"posthog-node": "^3.6.2",
|
||||||
"probot": "^13.3.8",
|
"probot": "^13.0.0",
|
||||||
"re2": "^1.21.4",
|
|
||||||
"react": "19.1.0",
|
|
||||||
"react-dom": "19.1.0",
|
|
||||||
"safe-regex": "^2.1.1",
|
"safe-regex": "^2.1.1",
|
||||||
"scim-patch": "^0.8.3",
|
|
||||||
"scim2-parse-filter": "^0.2.10",
|
|
||||||
"sjcl": "^1.0.8",
|
|
||||||
"smee-client": "^2.0.0",
|
"smee-client": "^2.0.0",
|
||||||
"snowflake-sdk": "^1.14.0",
|
|
||||||
"tedious": "^18.2.1",
|
"tedious": "^18.2.1",
|
||||||
"tweetnacl": "^1.0.3",
|
"tweetnacl": "^1.0.3",
|
||||||
"tweetnacl-util": "^0.15.1",
|
"tweetnacl-util": "^0.15.1",
|
||||||
"uuid": "^9.0.1",
|
"uuid": "^9.0.1",
|
||||||
"zod": "^3.22.4",
|
"zod": "^3.22.4",
|
||||||
"zod-to-json-schema": "^3.24.5"
|
"zod-to-json-schema": "^3.22.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -84,23 +84,13 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const bigIntegerColumns: Record<string, string[]> = {
|
|
||||||
"folder_commits": ["commitId"]
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
const tables = (
|
const tables = (
|
||||||
await db("information_schema.tables")
|
await db("information_schema.tables")
|
||||||
.whereRaw("table_schema = current_schema()")
|
.whereRaw("table_schema = current_schema()")
|
||||||
.select<{ tableName: string }[]>("table_name as tableName")
|
.select<{ tableName: string }[]>("table_name as tableName")
|
||||||
.orderBy("table_name")
|
.orderBy("table_name")
|
||||||
).filter(
|
).filter((el) => !el.tableName.includes("_migrations"));
|
||||||
(el) =>
|
|
||||||
!el.tableName.includes("_migrations") &&
|
|
||||||
!el.tableName.includes("audit_logs_") &&
|
|
||||||
el.tableName !== "intermediate_audit_logs"
|
|
||||||
);
|
|
||||||
|
|
||||||
for (let i = 0; i < tables.length; i += 1) {
|
for (let i = 0; i < tables.length; i += 1) {
|
||||||
const { tableName } = tables[i];
|
const { tableName } = tables[i];
|
||||||
@ -113,9 +103,6 @@ const main = async () => {
|
|||||||
const columnName = columnNames[colNum];
|
const columnName = columnNames[colNum];
|
||||||
const colInfo = columns[columnName];
|
const colInfo = columns[columnName];
|
||||||
let ztype = getZodPrimitiveType(colInfo.type);
|
let ztype = getZodPrimitiveType(colInfo.type);
|
||||||
if (bigIntegerColumns[tableName]?.includes(columnName)) {
|
|
||||||
ztype = "z.coerce.bigint()";
|
|
||||||
}
|
|
||||||
if (["zodBuffer"].includes(ztype)) {
|
if (["zodBuffer"].includes(ztype)) {
|
||||||
zodImportSet.add(ztype);
|
zodImportSet.add(ztype);
|
||||||
}
|
}
|
||||||
|
@ -1,103 +0,0 @@
|
|||||||
/* eslint-disable */
|
|
||||||
import promptSync from "prompt-sync";
|
|
||||||
import { execSync } from "child_process";
|
|
||||||
import path from "path";
|
|
||||||
import { existsSync } from "fs";
|
|
||||||
|
|
||||||
const prompt = promptSync({
|
|
||||||
sigint: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const sanitizeInputParam = (value: string) => {
|
|
||||||
// Escape double quotes and wrap the entire value in double quotes
|
|
||||||
if (value) {
|
|
||||||
return `"${value.replace(/"/g, '\\"')}"`;
|
|
||||||
}
|
|
||||||
return '""';
|
|
||||||
};
|
|
||||||
|
|
||||||
const exportDb = () => {
|
|
||||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
|
||||||
const exportPort = sanitizeInputParam(
|
|
||||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
|
||||||
);
|
|
||||||
const exportUser = sanitizeInputParam(
|
|
||||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
|
||||||
);
|
|
||||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
|
||||||
const exportDatabase = sanitizeInputParam(
|
|
||||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
|
||||||
);
|
|
||||||
|
|
||||||
// we do not include the audit_log and secret_sharing entries
|
|
||||||
execSync(
|
|
||||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
|
||||||
__dirname,
|
|
||||||
"../src/db/backup.dump"
|
|
||||||
)}`,
|
|
||||||
{ stdio: "inherit" }
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const importDbForOrg = () => {
|
|
||||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
|
||||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
|
||||||
const importUser = sanitizeInputParam(
|
|
||||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
|
||||||
);
|
|
||||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
|
||||||
const importDatabase = sanitizeInputParam(
|
|
||||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
|
||||||
);
|
|
||||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
|
||||||
|
|
||||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
|
||||||
console.log("File not found, please export the database first.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
execSync(
|
|
||||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
|
||||||
__dirname,
|
|
||||||
"../src/db/backup.dump"
|
|
||||||
)}`,
|
|
||||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
|
||||||
);
|
|
||||||
|
|
||||||
execSync(
|
|
||||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
|
||||||
);
|
|
||||||
|
|
||||||
// delete global/instance-level resources not relevant to the organization to migrate
|
|
||||||
// users
|
|
||||||
execSync(
|
|
||||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
|
||||||
);
|
|
||||||
|
|
||||||
// identities
|
|
||||||
execSync(
|
|
||||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
|
||||||
);
|
|
||||||
|
|
||||||
// reset slack configuration in superAdmin
|
|
||||||
execSync(
|
|
||||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log("Organization migrated successfully.");
|
|
||||||
};
|
|
||||||
|
|
||||||
const main = () => {
|
|
||||||
const action = prompt(
|
|
||||||
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
|
|
||||||
);
|
|
||||||
if (action === "1") {
|
|
||||||
exportDb();
|
|
||||||
} else if (action === "2") {
|
|
||||||
importDbForOrg();
|
|
||||||
} else {
|
|
||||||
console.log("Invalid action");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
main();
|
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
|||||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||||
|
import { Logger } from "pino";
|
||||||
|
|
||||||
import { CustomLogger } from "@app/lib/logger/logger";
|
|
||||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
@ -8,7 +8,7 @@ declare global {
|
|||||||
RawServerDefault,
|
RawServerDefault,
|
||||||
RawRequestDefaultExpression<RawServerDefault>,
|
RawRequestDefaultExpression<RawServerDefault>,
|
||||||
RawReplyDefaultExpression<RawServerDefault>,
|
RawReplyDefaultExpression<RawServerDefault>,
|
||||||
Readonly<CustomLogger>,
|
Readonly<Logger>,
|
||||||
ZodTypeProvider
|
ZodTypeProvider
|
||||||
>;
|
>;
|
||||||
|
|
||||||
|
143
backend/src/@types/fastify.d.ts
vendored
143
backend/src/@types/fastify.d.ts
vendored
@ -1,51 +1,35 @@
|
|||||||
import "fastify";
|
import "fastify";
|
||||||
|
|
||||||
import { Redis } from "ioredis";
|
|
||||||
|
|
||||||
import { TUsers } from "@app/db/schemas";
|
import { TUsers } from "@app/db/schemas";
|
||||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||||
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-types";
|
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||||
import { TAuditLogServiceFactory, TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
|
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-types";
|
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
|
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
|
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
|
||||||
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
|
|
||||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
|
||||||
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
|
|
||||||
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
|
|
||||||
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
|
|
||||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
|
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-types";
|
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
|
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||||
import { RateLimitConfiguration, TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-types";
|
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-types";
|
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-types";
|
|
||||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
|
||||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||||
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
|
|
||||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
|
||||||
import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
|
|
||||||
import { TSshHostGroupServiceFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-service";
|
|
||||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-types";
|
|
||||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
|
||||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||||
@ -53,40 +37,25 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
|||||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||||
import { TInternalCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/internal/internal-certificate-authority-service";
|
|
||||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
|
||||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
|
||||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
|
||||||
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
|
||||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
|
||||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||||
import { TIdentityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
|
|
||||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||||
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
|
|
||||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||||
import { TIdentityLdapAuthServiceFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-service";
|
|
||||||
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
|
|
||||||
import { TIdentityOciAuthServiceFactory } from "@app/services/identity-oci-auth/identity-oci-auth-service";
|
|
||||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||||
import { TIdentityTlsCertAuthServiceFactory } from "@app/services/identity-tls-cert-auth/identity-tls-cert-auth-types";
|
|
||||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||||
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
|
|
||||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||||
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
|
|
||||||
import { TPkiTemplatesServiceFactory } from "@app/services/pki-templates/pki-templates-service";
|
|
||||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||||
@ -99,44 +68,16 @@ import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-
|
|||||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||||
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
|
|
||||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
|
||||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||||
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
|
||||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
|
||||||
|
|
||||||
declare module "@fastify/request-context" {
|
|
||||||
interface RequestContextData {
|
|
||||||
reqId: string;
|
|
||||||
orgId?: string;
|
|
||||||
identityAuthInfo?: {
|
|
||||||
identityId: string;
|
|
||||||
oidc?: {
|
|
||||||
claims: Record<string, string>;
|
|
||||||
};
|
|
||||||
kubernetes?: {
|
|
||||||
namespace: string;
|
|
||||||
name: string;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
|
||||||
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
declare module "fastify" {
|
declare module "fastify" {
|
||||||
interface Session {
|
|
||||||
callbackPort: string;
|
|
||||||
isAdminLogin: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface FastifyRequest {
|
interface FastifyRequest {
|
||||||
realIp: string;
|
realIp: string;
|
||||||
// used for mfa session authentication
|
// used for mfa session authentication
|
||||||
@ -156,31 +97,15 @@ declare module "fastify" {
|
|||||||
rateLimits: RateLimitConfiguration;
|
rateLimits: RateLimitConfiguration;
|
||||||
// passport data
|
// passport data
|
||||||
passportUser: {
|
passportUser: {
|
||||||
isUserCompleted: boolean;
|
isUserCompleted: string;
|
||||||
providerAuthToken: string;
|
providerAuthToken: string;
|
||||||
externalProviderAccessToken?: string;
|
|
||||||
};
|
|
||||||
passportMachineIdentity: {
|
|
||||||
identityId: string;
|
|
||||||
user: {
|
|
||||||
uid: string;
|
|
||||||
mail?: string;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
kmipUser: {
|
|
||||||
projectId: string;
|
|
||||||
clientId: string;
|
|
||||||
name: string;
|
|
||||||
};
|
};
|
||||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>> & {
|
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||||
allowedFields?: TAllowedFields[];
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface FastifyInstance {
|
interface FastifyInstance {
|
||||||
redis: Redis;
|
|
||||||
services: {
|
services: {
|
||||||
login: TAuthLoginFactory;
|
login: TAuthLoginFactory;
|
||||||
password: TAuthPasswordFactory;
|
password: TAuthPasswordFactory;
|
||||||
@ -218,14 +143,9 @@ declare module "fastify" {
|
|||||||
identityUa: TIdentityUaServiceFactory;
|
identityUa: TIdentityUaServiceFactory;
|
||||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||||
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
|
|
||||||
identityTlsCertAuth: TIdentityTlsCertAuthServiceFactory;
|
|
||||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||||
identityOciAuth: TIdentityOciAuthServiceFactory;
|
|
||||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||||
identityJwtAuth: TIdentityJwtAuthServiceFactory;
|
|
||||||
identityLdapAuth: TIdentityLdapAuthServiceFactory;
|
|
||||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||||
@ -239,15 +159,10 @@ declare module "fastify" {
|
|||||||
auditLogStream: TAuditLogStreamServiceFactory;
|
auditLogStream: TAuditLogStreamServiceFactory;
|
||||||
certificate: TCertificateServiceFactory;
|
certificate: TCertificateServiceFactory;
|
||||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||||
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
|
|
||||||
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
|
|
||||||
sshHost: TSshHostServiceFactory;
|
|
||||||
sshHostGroup: TSshHostGroupServiceFactory;
|
|
||||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||||
certificateEst: TCertificateEstServiceFactory;
|
certificateEst: TCertificateEstServiceFactory;
|
||||||
pkiCollection: TPkiCollectionServiceFactory;
|
pkiCollection: TPkiCollectionServiceFactory;
|
||||||
pkiSubscriber: TPkiSubscriberServiceFactory;
|
|
||||||
secretScanning: TSecretScanningServiceFactory;
|
secretScanning: TSecretScanningServiceFactory;
|
||||||
license: TLicenseServiceFactory;
|
license: TLicenseServiceFactory;
|
||||||
trustedIp: TTrustedIpServiceFactory;
|
trustedIp: TTrustedIpServiceFactory;
|
||||||
@ -257,40 +172,16 @@ declare module "fastify" {
|
|||||||
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
||||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||||
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
|
|
||||||
secretSharing: TSecretSharingServiceFactory;
|
secretSharing: TSecretSharingServiceFactory;
|
||||||
rateLimit: TRateLimitServiceFactory;
|
rateLimit: TRateLimitServiceFactory;
|
||||||
userEngagement: TUserEngagementServiceFactory;
|
userEngagement: TUserEngagementServiceFactory;
|
||||||
externalKms: TExternalKmsServiceFactory;
|
externalKms: TExternalKmsServiceFactory;
|
||||||
hsm: THsmServiceFactory;
|
|
||||||
orgAdmin: TOrgAdminServiceFactory;
|
orgAdmin: TOrgAdminServiceFactory;
|
||||||
slack: TSlackServiceFactory;
|
|
||||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
|
||||||
cmek: TCmekServiceFactory;
|
|
||||||
migration: TExternalMigrationServiceFactory;
|
|
||||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
|
||||||
projectTemplate: TProjectTemplateServiceFactory;
|
|
||||||
totp: TTotpServiceFactory;
|
|
||||||
appConnection: TAppConnectionServiceFactory;
|
|
||||||
secretSync: TSecretSyncServiceFactory;
|
|
||||||
kmip: TKmipServiceFactory;
|
|
||||||
kmipOperation: TKmipOperationServiceFactory;
|
|
||||||
gateway: TGatewayServiceFactory;
|
|
||||||
secretRotationV2: TSecretRotationV2ServiceFactory;
|
|
||||||
microsoftTeams: TMicrosoftTeamsServiceFactory;
|
|
||||||
assumePrivileges: TAssumePrivilegeServiceFactory;
|
|
||||||
githubOrgSync: TGithubOrgSyncServiceFactory;
|
|
||||||
folderCommit: TFolderCommitServiceFactory;
|
|
||||||
pit: TPitServiceFactory;
|
|
||||||
secretScanningV2: TSecretScanningV2ServiceFactory;
|
|
||||||
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
|
|
||||||
pkiTemplate: TPkiTemplatesServiceFactory;
|
|
||||||
};
|
};
|
||||||
// this is exclusive use for middlewares in which we need to inject data
|
// this is exclusive use for middlewares in which we need to inject data
|
||||||
// everywhere else access using service layer
|
// everywhere else access using service layer
|
||||||
store: {
|
store: {
|
||||||
user: Pick<TUserDALFactory, "findById">;
|
user: Pick<TUserDALFactory, "findById">;
|
||||||
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
4
backend/src/@types/hdb.d.ts
vendored
4
backend/src/@types/hdb.d.ts
vendored
@ -1,4 +0,0 @@
|
|||||||
declare module "hdb" {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
|
||||||
function createClient(options): any;
|
|
||||||
}
|
|
455
backend/src/@types/knex.d.ts
vendored
455
backend/src/@types/knex.d.ts
vendored
@ -6,9 +6,6 @@ import {
|
|||||||
TAccessApprovalPoliciesApprovers,
|
TAccessApprovalPoliciesApprovers,
|
||||||
TAccessApprovalPoliciesApproversInsert,
|
TAccessApprovalPoliciesApproversInsert,
|
||||||
TAccessApprovalPoliciesApproversUpdate,
|
TAccessApprovalPoliciesApproversUpdate,
|
||||||
TAccessApprovalPoliciesBypassers,
|
|
||||||
TAccessApprovalPoliciesBypassersInsert,
|
|
||||||
TAccessApprovalPoliciesBypassersUpdate,
|
|
||||||
TAccessApprovalPoliciesInsert,
|
TAccessApprovalPoliciesInsert,
|
||||||
TAccessApprovalPoliciesUpdate,
|
TAccessApprovalPoliciesUpdate,
|
||||||
TAccessApprovalRequests,
|
TAccessApprovalRequests,
|
||||||
@ -20,9 +17,6 @@ import {
|
|||||||
TApiKeys,
|
TApiKeys,
|
||||||
TApiKeysInsert,
|
TApiKeysInsert,
|
||||||
TApiKeysUpdate,
|
TApiKeysUpdate,
|
||||||
TAppConnections,
|
|
||||||
TAppConnectionsInsert,
|
|
||||||
TAppConnectionsUpdate,
|
|
||||||
TAuditLogs,
|
TAuditLogs,
|
||||||
TAuditLogsInsert,
|
TAuditLogsInsert,
|
||||||
TAuditLogStreams,
|
TAuditLogStreams,
|
||||||
@ -71,45 +65,15 @@ import {
|
|||||||
TDynamicSecrets,
|
TDynamicSecrets,
|
||||||
TDynamicSecretsInsert,
|
TDynamicSecretsInsert,
|
||||||
TDynamicSecretsUpdate,
|
TDynamicSecretsUpdate,
|
||||||
TExternalCertificateAuthorities,
|
|
||||||
TExternalCertificateAuthoritiesInsert,
|
|
||||||
TExternalCertificateAuthoritiesUpdate,
|
|
||||||
TExternalGroupOrgRoleMappings,
|
|
||||||
TExternalGroupOrgRoleMappingsInsert,
|
|
||||||
TExternalGroupOrgRoleMappingsUpdate,
|
|
||||||
TExternalKms,
|
TExternalKms,
|
||||||
TExternalKmsInsert,
|
TExternalKmsInsert,
|
||||||
TExternalKmsUpdate,
|
TExternalKmsUpdate,
|
||||||
TFolderCheckpointResources,
|
|
||||||
TFolderCheckpointResourcesInsert,
|
|
||||||
TFolderCheckpointResourcesUpdate,
|
|
||||||
TFolderCheckpoints,
|
|
||||||
TFolderCheckpointsInsert,
|
|
||||||
TFolderCheckpointsUpdate,
|
|
||||||
TFolderCommitChanges,
|
|
||||||
TFolderCommitChangesInsert,
|
|
||||||
TFolderCommitChangesUpdate,
|
|
||||||
TFolderCommits,
|
|
||||||
TFolderCommitsInsert,
|
|
||||||
TFolderCommitsUpdate,
|
|
||||||
TFolderTreeCheckpointResources,
|
|
||||||
TFolderTreeCheckpointResourcesInsert,
|
|
||||||
TFolderTreeCheckpointResourcesUpdate,
|
|
||||||
TFolderTreeCheckpoints,
|
|
||||||
TFolderTreeCheckpointsInsert,
|
|
||||||
TFolderTreeCheckpointsUpdate,
|
|
||||||
TGateways,
|
|
||||||
TGatewaysInsert,
|
|
||||||
TGatewaysUpdate,
|
|
||||||
TGitAppInstallSessions,
|
TGitAppInstallSessions,
|
||||||
TGitAppInstallSessionsInsert,
|
TGitAppInstallSessionsInsert,
|
||||||
TGitAppInstallSessionsUpdate,
|
TGitAppInstallSessionsUpdate,
|
||||||
TGitAppOrg,
|
TGitAppOrg,
|
||||||
TGitAppOrgInsert,
|
TGitAppOrgInsert,
|
||||||
TGitAppOrgUpdate,
|
TGitAppOrgUpdate,
|
||||||
TGithubOrgSyncConfigs,
|
|
||||||
TGithubOrgSyncConfigsInsert,
|
|
||||||
TGithubOrgSyncConfigsUpdate,
|
|
||||||
TGroupProjectMembershipRoles,
|
TGroupProjectMembershipRoles,
|
||||||
TGroupProjectMembershipRolesInsert,
|
TGroupProjectMembershipRolesInsert,
|
||||||
TGroupProjectMembershipRolesUpdate,
|
TGroupProjectMembershipRolesUpdate,
|
||||||
@ -125,9 +89,6 @@ import {
|
|||||||
TIdentityAccessTokens,
|
TIdentityAccessTokens,
|
||||||
TIdentityAccessTokensInsert,
|
TIdentityAccessTokensInsert,
|
||||||
TIdentityAccessTokensUpdate,
|
TIdentityAccessTokensUpdate,
|
||||||
TIdentityAlicloudAuths,
|
|
||||||
TIdentityAlicloudAuthsInsert,
|
|
||||||
TIdentityAlicloudAuthsUpdate,
|
|
||||||
TIdentityAwsAuths,
|
TIdentityAwsAuths,
|
||||||
TIdentityAwsAuthsInsert,
|
TIdentityAwsAuthsInsert,
|
||||||
TIdentityAwsAuthsUpdate,
|
TIdentityAwsAuthsUpdate,
|
||||||
@ -137,18 +98,9 @@ import {
|
|||||||
TIdentityGcpAuths,
|
TIdentityGcpAuths,
|
||||||
TIdentityGcpAuthsInsert,
|
TIdentityGcpAuthsInsert,
|
||||||
TIdentityGcpAuthsUpdate,
|
TIdentityGcpAuthsUpdate,
|
||||||
TIdentityJwtAuths,
|
|
||||||
TIdentityJwtAuthsInsert,
|
|
||||||
TIdentityJwtAuthsUpdate,
|
|
||||||
TIdentityKubernetesAuths,
|
TIdentityKubernetesAuths,
|
||||||
TIdentityKubernetesAuthsInsert,
|
TIdentityKubernetesAuthsInsert,
|
||||||
TIdentityKubernetesAuthsUpdate,
|
TIdentityKubernetesAuthsUpdate,
|
||||||
TIdentityMetadata,
|
|
||||||
TIdentityMetadataInsert,
|
|
||||||
TIdentityMetadataUpdate,
|
|
||||||
TIdentityOciAuths,
|
|
||||||
TIdentityOciAuthsInsert,
|
|
||||||
TIdentityOciAuthsUpdate,
|
|
||||||
TIdentityOidcAuths,
|
TIdentityOidcAuths,
|
||||||
TIdentityOidcAuthsInsert,
|
TIdentityOidcAuthsInsert,
|
||||||
TIdentityOidcAuthsUpdate,
|
TIdentityOidcAuthsUpdate,
|
||||||
@ -164,9 +116,6 @@ import {
|
|||||||
TIdentityProjectMemberships,
|
TIdentityProjectMemberships,
|
||||||
TIdentityProjectMembershipsInsert,
|
TIdentityProjectMembershipsInsert,
|
||||||
TIdentityProjectMembershipsUpdate,
|
TIdentityProjectMembershipsUpdate,
|
||||||
TIdentityTlsCertAuths,
|
|
||||||
TIdentityTlsCertAuthsInsert,
|
|
||||||
TIdentityTlsCertAuthsUpdate,
|
|
||||||
TIdentityTokenAuths,
|
TIdentityTokenAuths,
|
||||||
TIdentityTokenAuthsInsert,
|
TIdentityTokenAuthsInsert,
|
||||||
TIdentityTokenAuthsUpdate,
|
TIdentityTokenAuthsUpdate,
|
||||||
@ -185,24 +134,9 @@ import {
|
|||||||
TIntegrations,
|
TIntegrations,
|
||||||
TIntegrationsInsert,
|
TIntegrationsInsert,
|
||||||
TIntegrationsUpdate,
|
TIntegrationsUpdate,
|
||||||
TInternalCertificateAuthorities,
|
|
||||||
TInternalCertificateAuthoritiesInsert,
|
|
||||||
TInternalCertificateAuthoritiesUpdate,
|
|
||||||
TInternalKms,
|
TInternalKms,
|
||||||
TInternalKmsInsert,
|
TInternalKmsInsert,
|
||||||
TInternalKmsUpdate,
|
TInternalKmsUpdate,
|
||||||
TKmipClientCertificates,
|
|
||||||
TKmipClientCertificatesInsert,
|
|
||||||
TKmipClientCertificatesUpdate,
|
|
||||||
TKmipClients,
|
|
||||||
TKmipClientsInsert,
|
|
||||||
TKmipClientsUpdate,
|
|
||||||
TKmipOrgConfigs,
|
|
||||||
TKmipOrgConfigsInsert,
|
|
||||||
TKmipOrgConfigsUpdate,
|
|
||||||
TKmipOrgServerCertificates,
|
|
||||||
TKmipOrgServerCertificatesInsert,
|
|
||||||
TKmipOrgServerCertificatesUpdate,
|
|
||||||
TKmsKeys,
|
TKmsKeys,
|
||||||
TKmsKeysInsert,
|
TKmsKeysInsert,
|
||||||
TKmsKeysUpdate,
|
TKmsKeysUpdate,
|
||||||
@ -227,9 +161,6 @@ import {
|
|||||||
TOrgBots,
|
TOrgBots,
|
||||||
TOrgBotsInsert,
|
TOrgBotsInsert,
|
||||||
TOrgBotsUpdate,
|
TOrgBotsUpdate,
|
||||||
TOrgGatewayConfig,
|
|
||||||
TOrgGatewayConfigInsert,
|
|
||||||
TOrgGatewayConfigUpdate,
|
|
||||||
TOrgMemberships,
|
TOrgMemberships,
|
||||||
TOrgMembershipsInsert,
|
TOrgMembershipsInsert,
|
||||||
TOrgMembershipsUpdate,
|
TOrgMembershipsUpdate,
|
||||||
@ -245,18 +176,12 @@ import {
|
|||||||
TPkiCollections,
|
TPkiCollections,
|
||||||
TPkiCollectionsInsert,
|
TPkiCollectionsInsert,
|
||||||
TPkiCollectionsUpdate,
|
TPkiCollectionsUpdate,
|
||||||
TPkiSubscribers,
|
|
||||||
TPkiSubscribersInsert,
|
|
||||||
TPkiSubscribersUpdate,
|
|
||||||
TProjectBots,
|
TProjectBots,
|
||||||
TProjectBotsInsert,
|
TProjectBotsInsert,
|
||||||
TProjectBotsUpdate,
|
TProjectBotsUpdate,
|
||||||
TProjectEnvironments,
|
TProjectEnvironments,
|
||||||
TProjectEnvironmentsInsert,
|
TProjectEnvironmentsInsert,
|
||||||
TProjectEnvironmentsUpdate,
|
TProjectEnvironmentsUpdate,
|
||||||
TProjectGateways,
|
|
||||||
TProjectGatewaysInsert,
|
|
||||||
TProjectGatewaysUpdate,
|
|
||||||
TProjectKeys,
|
TProjectKeys,
|
||||||
TProjectKeysInsert,
|
TProjectKeysInsert,
|
||||||
TProjectKeysUpdate,
|
TProjectKeysUpdate,
|
||||||
@ -268,19 +193,7 @@ import {
|
|||||||
TProjectRolesUpdate,
|
TProjectRolesUpdate,
|
||||||
TProjects,
|
TProjects,
|
||||||
TProjectsInsert,
|
TProjectsInsert,
|
||||||
TProjectSlackConfigs,
|
|
||||||
TProjectSlackConfigsInsert,
|
|
||||||
TProjectSlackConfigsUpdate,
|
|
||||||
TProjectSplitBackfillIds,
|
|
||||||
TProjectSplitBackfillIdsInsert,
|
|
||||||
TProjectSplitBackfillIdsUpdate,
|
|
||||||
TProjectSshConfigs,
|
|
||||||
TProjectSshConfigsInsert,
|
|
||||||
TProjectSshConfigsUpdate,
|
|
||||||
TProjectsUpdate,
|
TProjectsUpdate,
|
||||||
TProjectTemplates,
|
|
||||||
TProjectTemplatesInsert,
|
|
||||||
TProjectTemplatesUpdate,
|
|
||||||
TProjectUserAdditionalPrivilege,
|
TProjectUserAdditionalPrivilege,
|
||||||
TProjectUserAdditionalPrivilegeInsert,
|
TProjectUserAdditionalPrivilegeInsert,
|
||||||
TProjectUserAdditionalPrivilegeUpdate,
|
TProjectUserAdditionalPrivilegeUpdate,
|
||||||
@ -290,9 +203,6 @@ import {
|
|||||||
TRateLimit,
|
TRateLimit,
|
||||||
TRateLimitInsert,
|
TRateLimitInsert,
|
||||||
TRateLimitUpdate,
|
TRateLimitUpdate,
|
||||||
TResourceMetadata,
|
|
||||||
TResourceMetadataInsert,
|
|
||||||
TResourceMetadataUpdate,
|
|
||||||
TSamlConfigs,
|
TSamlConfigs,
|
||||||
TSamlConfigsInsert,
|
TSamlConfigsInsert,
|
||||||
TSamlConfigsUpdate,
|
TSamlConfigsUpdate,
|
||||||
@ -303,9 +213,6 @@ import {
|
|||||||
TSecretApprovalPoliciesApprovers,
|
TSecretApprovalPoliciesApprovers,
|
||||||
TSecretApprovalPoliciesApproversInsert,
|
TSecretApprovalPoliciesApproversInsert,
|
||||||
TSecretApprovalPoliciesApproversUpdate,
|
TSecretApprovalPoliciesApproversUpdate,
|
||||||
TSecretApprovalPoliciesBypassers,
|
|
||||||
TSecretApprovalPoliciesBypassersInsert,
|
|
||||||
TSecretApprovalPoliciesBypassersUpdate,
|
|
||||||
TSecretApprovalPoliciesInsert,
|
TSecretApprovalPoliciesInsert,
|
||||||
TSecretApprovalPoliciesUpdate,
|
TSecretApprovalPoliciesUpdate,
|
||||||
TSecretApprovalRequests,
|
TSecretApprovalRequests,
|
||||||
@ -353,31 +260,10 @@ import {
|
|||||||
TSecretRotations,
|
TSecretRotations,
|
||||||
TSecretRotationsInsert,
|
TSecretRotationsInsert,
|
||||||
TSecretRotationsUpdate,
|
TSecretRotationsUpdate,
|
||||||
TSecretRotationsV2,
|
|
||||||
TSecretRotationsV2Insert,
|
|
||||||
TSecretRotationsV2Update,
|
|
||||||
TSecretRotationV2SecretMappings,
|
|
||||||
TSecretRotationV2SecretMappingsInsert,
|
|
||||||
TSecretRotationV2SecretMappingsUpdate,
|
|
||||||
TSecrets,
|
TSecrets,
|
||||||
TSecretScanningConfigs,
|
|
||||||
TSecretScanningConfigsInsert,
|
|
||||||
TSecretScanningConfigsUpdate,
|
|
||||||
TSecretScanningDataSources,
|
|
||||||
TSecretScanningDataSourcesInsert,
|
|
||||||
TSecretScanningDataSourcesUpdate,
|
|
||||||
TSecretScanningFindings,
|
|
||||||
TSecretScanningFindingsInsert,
|
|
||||||
TSecretScanningFindingsUpdate,
|
|
||||||
TSecretScanningGitRisks,
|
TSecretScanningGitRisks,
|
||||||
TSecretScanningGitRisksInsert,
|
TSecretScanningGitRisksInsert,
|
||||||
TSecretScanningGitRisksUpdate,
|
TSecretScanningGitRisksUpdate,
|
||||||
TSecretScanningResources,
|
|
||||||
TSecretScanningResourcesInsert,
|
|
||||||
TSecretScanningResourcesUpdate,
|
|
||||||
TSecretScanningScans,
|
|
||||||
TSecretScanningScansInsert,
|
|
||||||
TSecretScanningScansUpdate,
|
|
||||||
TSecretSharing,
|
TSecretSharing,
|
||||||
TSecretSharingInsert,
|
TSecretSharingInsert,
|
||||||
TSecretSharingUpdate,
|
TSecretSharingUpdate,
|
||||||
@ -395,27 +281,15 @@ import {
|
|||||||
TSecretSnapshotsInsert,
|
TSecretSnapshotsInsert,
|
||||||
TSecretSnapshotsUpdate,
|
TSecretSnapshotsUpdate,
|
||||||
TSecretsUpdate,
|
TSecretsUpdate,
|
||||||
TSecretsV2,
|
|
||||||
TSecretsV2Insert,
|
|
||||||
TSecretsV2Update,
|
|
||||||
TSecretSyncs,
|
|
||||||
TSecretSyncsInsert,
|
|
||||||
TSecretSyncsUpdate,
|
|
||||||
TSecretTagJunction,
|
TSecretTagJunction,
|
||||||
TSecretTagJunctionInsert,
|
TSecretTagJunctionInsert,
|
||||||
TSecretTagJunctionUpdate,
|
TSecretTagJunctionUpdate,
|
||||||
TSecretTags,
|
TSecretTags,
|
||||||
TSecretTagsInsert,
|
TSecretTagsInsert,
|
||||||
TSecretTagsUpdate,
|
TSecretTagsUpdate,
|
||||||
TSecretV2TagJunction,
|
|
||||||
TSecretV2TagJunctionInsert,
|
|
||||||
TSecretV2TagJunctionUpdate,
|
|
||||||
TSecretVersions,
|
TSecretVersions,
|
||||||
TSecretVersionsInsert,
|
TSecretVersionsInsert,
|
||||||
TSecretVersionsUpdate,
|
TSecretVersionsUpdate,
|
||||||
TSecretVersionsV2,
|
|
||||||
TSecretVersionsV2Insert,
|
|
||||||
TSecretVersionsV2Update,
|
|
||||||
TSecretVersionTagJunction,
|
TSecretVersionTagJunction,
|
||||||
TSecretVersionTagJunctionInsert,
|
TSecretVersionTagJunctionInsert,
|
||||||
TSecretVersionTagJunctionUpdate,
|
TSecretVersionTagJunctionUpdate,
|
||||||
@ -425,45 +299,9 @@ import {
|
|||||||
TServiceTokens,
|
TServiceTokens,
|
||||||
TServiceTokensInsert,
|
TServiceTokensInsert,
|
||||||
TServiceTokensUpdate,
|
TServiceTokensUpdate,
|
||||||
TSlackIntegrations,
|
|
||||||
TSlackIntegrationsInsert,
|
|
||||||
TSlackIntegrationsUpdate,
|
|
||||||
TSshCertificateAuthorities,
|
|
||||||
TSshCertificateAuthoritiesInsert,
|
|
||||||
TSshCertificateAuthoritiesUpdate,
|
|
||||||
TSshCertificateAuthoritySecrets,
|
|
||||||
TSshCertificateAuthoritySecretsInsert,
|
|
||||||
TSshCertificateAuthoritySecretsUpdate,
|
|
||||||
TSshCertificateBodies,
|
|
||||||
TSshCertificateBodiesInsert,
|
|
||||||
TSshCertificateBodiesUpdate,
|
|
||||||
TSshCertificates,
|
|
||||||
TSshCertificatesInsert,
|
|
||||||
TSshCertificatesUpdate,
|
|
||||||
TSshCertificateTemplates,
|
|
||||||
TSshCertificateTemplatesInsert,
|
|
||||||
TSshCertificateTemplatesUpdate,
|
|
||||||
TSshHostGroupMemberships,
|
|
||||||
TSshHostGroupMembershipsInsert,
|
|
||||||
TSshHostGroupMembershipsUpdate,
|
|
||||||
TSshHostGroups,
|
|
||||||
TSshHostGroupsInsert,
|
|
||||||
TSshHostGroupsUpdate,
|
|
||||||
TSshHostLoginUserMappings,
|
|
||||||
TSshHostLoginUserMappingsInsert,
|
|
||||||
TSshHostLoginUserMappingsUpdate,
|
|
||||||
TSshHostLoginUsers,
|
|
||||||
TSshHostLoginUsersInsert,
|
|
||||||
TSshHostLoginUsersUpdate,
|
|
||||||
TSshHosts,
|
|
||||||
TSshHostsInsert,
|
|
||||||
TSshHostsUpdate,
|
|
||||||
TSuperAdmin,
|
TSuperAdmin,
|
||||||
TSuperAdminInsert,
|
TSuperAdminInsert,
|
||||||
TSuperAdminUpdate,
|
TSuperAdminUpdate,
|
||||||
TTotpConfigs,
|
|
||||||
TTotpConfigsInsert,
|
|
||||||
TTotpConfigsUpdate,
|
|
||||||
TTrustedIps,
|
TTrustedIps,
|
||||||
TTrustedIpsInsert,
|
TTrustedIpsInsert,
|
||||||
TTrustedIpsUpdate,
|
TTrustedIpsUpdate,
|
||||||
@ -484,31 +322,19 @@ import {
|
|||||||
TUsersUpdate,
|
TUsersUpdate,
|
||||||
TWebhooks,
|
TWebhooks,
|
||||||
TWebhooksInsert,
|
TWebhooksInsert,
|
||||||
TWebhooksUpdate,
|
TWebhooksUpdate
|
||||||
TWorkflowIntegrations,
|
|
||||||
TWorkflowIntegrationsInsert,
|
|
||||||
TWorkflowIntegrationsUpdate
|
|
||||||
} from "@app/db/schemas";
|
} from "@app/db/schemas";
|
||||||
import {
|
import {
|
||||||
TIdentityLdapAuths,
|
TSecretV2TagJunction,
|
||||||
TIdentityLdapAuthsInsert,
|
TSecretV2TagJunctionInsert,
|
||||||
TIdentityLdapAuthsUpdate
|
TSecretV2TagJunctionUpdate
|
||||||
} from "@app/db/schemas/identity-ldap-auths";
|
} from "@app/db/schemas/secret-v2-tag-junction";
|
||||||
import {
|
import {
|
||||||
TMicrosoftTeamsIntegrations,
|
TSecretVersionsV2,
|
||||||
TMicrosoftTeamsIntegrationsInsert,
|
TSecretVersionsV2Insert,
|
||||||
TMicrosoftTeamsIntegrationsUpdate
|
TSecretVersionsV2Update
|
||||||
} from "@app/db/schemas/microsoft-teams-integrations";
|
} from "@app/db/schemas/secret-versions-v2";
|
||||||
import {
|
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
|
||||||
TProjectMicrosoftTeamsConfigs,
|
|
||||||
TProjectMicrosoftTeamsConfigsInsert,
|
|
||||||
TProjectMicrosoftTeamsConfigsUpdate
|
|
||||||
} from "@app/db/schemas/project-microsoft-teams-configs";
|
|
||||||
import {
|
|
||||||
TSecretReminderRecipients,
|
|
||||||
TSecretReminderRecipientsInsert,
|
|
||||||
TSecretReminderRecipientsUpdate
|
|
||||||
} from "@app/db/schemas/secret-reminder-recipients";
|
|
||||||
|
|
||||||
declare module "knex" {
|
declare module "knex" {
|
||||||
namespace Knex {
|
namespace Knex {
|
||||||
@ -523,52 +349,6 @@ declare module "knex/types/tables" {
|
|||||||
interface Tables {
|
interface Tables {
|
||||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||||
[TableName.SshHostGroup]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshHostGroups,
|
|
||||||
TSshHostGroupsInsert,
|
|
||||||
TSshHostGroupsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshHostGroupMembership]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshHostGroupMemberships,
|
|
||||||
TSshHostGroupMembershipsInsert,
|
|
||||||
TSshHostGroupMembershipsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshHost]: KnexOriginal.CompositeTableType<TSshHosts, TSshHostsInsert, TSshHostsUpdate>;
|
|
||||||
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateAuthorities,
|
|
||||||
TSshCertificateAuthoritiesInsert,
|
|
||||||
TSshCertificateAuthoritiesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateAuthoritySecrets,
|
|
||||||
TSshCertificateAuthoritySecretsInsert,
|
|
||||||
TSshCertificateAuthoritySecretsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateTemplates,
|
|
||||||
TSshCertificateTemplatesInsert,
|
|
||||||
TSshCertificateTemplatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificates,
|
|
||||||
TSshCertificatesInsert,
|
|
||||||
TSshCertificatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshCertificateBodies,
|
|
||||||
TSshCertificateBodiesInsert,
|
|
||||||
TSshCertificateBodiesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshHostLoginUser]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshHostLoginUsers,
|
|
||||||
TSshHostLoginUsersInsert,
|
|
||||||
TSshHostLoginUsersUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SshHostLoginUserMapping]: KnexOriginal.CompositeTableType<
|
|
||||||
TSshHostLoginUserMappings,
|
|
||||||
TSshHostLoginUserMappingsInsert,
|
|
||||||
TSshHostLoginUserMappingsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||||
TCertificateAuthorities,
|
TCertificateAuthorities,
|
||||||
TCertificateAuthoritiesInsert,
|
TCertificateAuthoritiesInsert,
|
||||||
@ -589,16 +369,6 @@ declare module "knex/types/tables" {
|
|||||||
TCertificateAuthorityCrlInsert,
|
TCertificateAuthorityCrlInsert,
|
||||||
TCertificateAuthorityCrlUpdate
|
TCertificateAuthorityCrlUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.InternalCertificateAuthority]: KnexOriginal.CompositeTableType<
|
|
||||||
TInternalCertificateAuthorities,
|
|
||||||
TInternalCertificateAuthoritiesInsert,
|
|
||||||
TInternalCertificateAuthoritiesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.ExternalCertificateAuthority]: KnexOriginal.CompositeTableType<
|
|
||||||
TExternalCertificateAuthorities,
|
|
||||||
TExternalCertificateAuthoritiesInsert,
|
|
||||||
TExternalCertificateAuthoritiesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||||
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||||
TCertificateTemplates,
|
TCertificateTemplates,
|
||||||
@ -631,11 +401,6 @@ declare module "knex/types/tables" {
|
|||||||
TPkiCollectionItemsInsert,
|
TPkiCollectionItemsInsert,
|
||||||
TPkiCollectionItemsUpdate
|
TPkiCollectionItemsUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.PkiSubscriber]: KnexOriginal.CompositeTableType<
|
|
||||||
TPkiSubscribers,
|
|
||||||
TPkiSubscribersInsert,
|
|
||||||
TPkiSubscribersUpdate
|
|
||||||
>;
|
|
||||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||||
TUserGroupMembership,
|
TUserGroupMembership,
|
||||||
TUserGroupMembershipInsert,
|
TUserGroupMembershipInsert,
|
||||||
@ -688,11 +453,6 @@ declare module "knex/types/tables" {
|
|||||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||||
[TableName.ProjectSshConfig]: KnexOriginal.CompositeTableType<
|
|
||||||
TProjectSshConfigs,
|
|
||||||
TProjectSshConfigsInsert,
|
|
||||||
TProjectSshConfigsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||||
TProjectMemberships,
|
TProjectMemberships,
|
||||||
TProjectMembershipsInsert,
|
TProjectMembershipsInsert,
|
||||||
@ -777,11 +537,6 @@ declare module "knex/types/tables" {
|
|||||||
TIdentityUniversalAuthsInsert,
|
TIdentityUniversalAuthsInsert,
|
||||||
TIdentityUniversalAuthsUpdate
|
TIdentityUniversalAuthsUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
|
|
||||||
TIdentityMetadata,
|
|
||||||
TIdentityMetadataInsert,
|
|
||||||
TIdentityMetadataUpdate
|
|
||||||
>;
|
|
||||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||||
TIdentityKubernetesAuths,
|
TIdentityKubernetesAuths,
|
||||||
TIdentityKubernetesAuthsInsert,
|
TIdentityKubernetesAuthsInsert,
|
||||||
@ -792,16 +547,6 @@ declare module "knex/types/tables" {
|
|||||||
TIdentityGcpAuthsInsert,
|
TIdentityGcpAuthsInsert,
|
||||||
TIdentityGcpAuthsUpdate
|
TIdentityGcpAuthsUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.IdentityAliCloudAuth]: KnexOriginal.CompositeTableType<
|
|
||||||
TIdentityAlicloudAuths,
|
|
||||||
TIdentityAlicloudAuthsInsert,
|
|
||||||
TIdentityAlicloudAuthsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.IdentityTlsCertAuth]: KnexOriginal.CompositeTableType<
|
|
||||||
TIdentityTlsCertAuths,
|
|
||||||
TIdentityTlsCertAuthsInsert,
|
|
||||||
TIdentityTlsCertAuthsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||||
TIdentityAwsAuths,
|
TIdentityAwsAuths,
|
||||||
TIdentityAwsAuthsInsert,
|
TIdentityAwsAuthsInsert,
|
||||||
@ -812,26 +557,11 @@ declare module "knex/types/tables" {
|
|||||||
TIdentityAzureAuthsInsert,
|
TIdentityAzureAuthsInsert,
|
||||||
TIdentityAzureAuthsUpdate
|
TIdentityAzureAuthsUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.IdentityOciAuth]: KnexOriginal.CompositeTableType<
|
|
||||||
TIdentityOciAuths,
|
|
||||||
TIdentityOciAuthsInsert,
|
|
||||||
TIdentityOciAuthsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
||||||
TIdentityOidcAuths,
|
TIdentityOidcAuths,
|
||||||
TIdentityOidcAuthsInsert,
|
TIdentityOidcAuthsInsert,
|
||||||
TIdentityOidcAuthsUpdate
|
TIdentityOidcAuthsUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
|
|
||||||
TIdentityJwtAuths,
|
|
||||||
TIdentityJwtAuthsInsert,
|
|
||||||
TIdentityJwtAuthsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.IdentityLdapAuth]: KnexOriginal.CompositeTableType<
|
|
||||||
TIdentityLdapAuths,
|
|
||||||
TIdentityLdapAuthsInsert,
|
|
||||||
TIdentityLdapAuthsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||||
TIdentityUaClientSecrets,
|
TIdentityUaClientSecrets,
|
||||||
TIdentityUaClientSecretsInsert,
|
TIdentityUaClientSecretsInsert,
|
||||||
@ -875,12 +605,6 @@ declare module "knex/types/tables" {
|
|||||||
TAccessApprovalPoliciesApproversUpdate
|
TAccessApprovalPoliciesApproversUpdate
|
||||||
>;
|
>;
|
||||||
|
|
||||||
[TableName.AccessApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
|
|
||||||
TAccessApprovalPoliciesBypassers,
|
|
||||||
TAccessApprovalPoliciesBypassersInsert,
|
|
||||||
TAccessApprovalPoliciesBypassersUpdate
|
|
||||||
>;
|
|
||||||
|
|
||||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||||
TAccessApprovalRequests,
|
TAccessApprovalRequests,
|
||||||
TAccessApprovalRequestsInsert,
|
TAccessApprovalRequestsInsert,
|
||||||
@ -904,11 +628,6 @@ declare module "knex/types/tables" {
|
|||||||
TSecretApprovalPoliciesApproversInsert,
|
TSecretApprovalPoliciesApproversInsert,
|
||||||
TSecretApprovalPoliciesApproversUpdate
|
TSecretApprovalPoliciesApproversUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.SecretApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretApprovalPoliciesBypassers,
|
|
||||||
TSecretApprovalPoliciesBypassersInsert,
|
|
||||||
TSecretApprovalPoliciesBypassersUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||||
TSecretApprovalRequests,
|
TSecretApprovalRequests,
|
||||||
TSecretApprovalRequestsInsert,
|
TSecretApprovalRequestsInsert,
|
||||||
@ -1057,159 +776,5 @@ declare module "knex/types/tables" {
|
|||||||
TKmsKeyVersionsInsert,
|
TKmsKeyVersionsInsert,
|
||||||
TKmsKeyVersionsUpdate
|
TKmsKeyVersionsUpdate
|
||||||
>;
|
>;
|
||||||
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
|
|
||||||
TSlackIntegrations,
|
|
||||||
TSlackIntegrationsInsert,
|
|
||||||
TSlackIntegrationsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
|
|
||||||
TProjectSlackConfigs,
|
|
||||||
TProjectSlackConfigsInsert,
|
|
||||||
TProjectSlackConfigsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
|
|
||||||
TWorkflowIntegrations,
|
|
||||||
TWorkflowIntegrationsInsert,
|
|
||||||
TWorkflowIntegrationsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
|
|
||||||
TExternalGroupOrgRoleMappings,
|
|
||||||
TExternalGroupOrgRoleMappingsInsert,
|
|
||||||
TExternalGroupOrgRoleMappingsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
|
|
||||||
TProjectTemplates,
|
|
||||||
TProjectTemplatesInsert,
|
|
||||||
TProjectTemplatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
|
||||||
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
|
|
||||||
TProjectSplitBackfillIds,
|
|
||||||
TProjectSplitBackfillIdsInsert,
|
|
||||||
TProjectSplitBackfillIdsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
|
|
||||||
TResourceMetadata,
|
|
||||||
TResourceMetadataInsert,
|
|
||||||
TResourceMetadataUpdate
|
|
||||||
>;
|
|
||||||
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
|
|
||||||
TAppConnections,
|
|
||||||
TAppConnectionsInsert,
|
|
||||||
TAppConnectionsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
|
|
||||||
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
|
|
||||||
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
|
|
||||||
TKmipOrgConfigs,
|
|
||||||
TKmipOrgConfigsInsert,
|
|
||||||
TKmipOrgConfigsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
|
|
||||||
TKmipOrgServerCertificates,
|
|
||||||
TKmipOrgServerCertificatesInsert,
|
|
||||||
TKmipOrgServerCertificatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
|
|
||||||
TKmipClientCertificates,
|
|
||||||
TKmipClientCertificatesInsert,
|
|
||||||
TKmipClientCertificatesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
|
||||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
|
||||||
TProjectGateways,
|
|
||||||
TProjectGatewaysInsert,
|
|
||||||
TProjectGatewaysUpdate
|
|
||||||
>;
|
|
||||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
|
||||||
TOrgGatewayConfig,
|
|
||||||
TOrgGatewayConfigInsert,
|
|
||||||
TOrgGatewayConfigUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretRotationV2]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretRotationsV2,
|
|
||||||
TSecretRotationsV2Insert,
|
|
||||||
TSecretRotationsV2Update
|
|
||||||
>;
|
|
||||||
[TableName.SecretRotationV2SecretMapping]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretRotationV2SecretMappings,
|
|
||||||
TSecretRotationV2SecretMappingsInsert,
|
|
||||||
TSecretRotationV2SecretMappingsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.MicrosoftTeamsIntegrations]: KnexOriginal.CompositeTableType<
|
|
||||||
TMicrosoftTeamsIntegrations,
|
|
||||||
TMicrosoftTeamsIntegrationsInsert,
|
|
||||||
TMicrosoftTeamsIntegrationsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.ProjectMicrosoftTeamsConfigs]: KnexOriginal.CompositeTableType<
|
|
||||||
TProjectMicrosoftTeamsConfigs,
|
|
||||||
TProjectMicrosoftTeamsConfigsInsert,
|
|
||||||
TProjectMicrosoftTeamsConfigsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretReminderRecipients]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretReminderRecipients,
|
|
||||||
TSecretReminderRecipientsInsert,
|
|
||||||
TSecretReminderRecipientsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.GithubOrgSyncConfig]: KnexOriginal.CompositeTableType<
|
|
||||||
TGithubOrgSyncConfigs,
|
|
||||||
TGithubOrgSyncConfigsInsert,
|
|
||||||
TGithubOrgSyncConfigsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
|
|
||||||
TFolderCommits,
|
|
||||||
TFolderCommitsInsert,
|
|
||||||
TFolderCommitsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
|
|
||||||
TFolderCommitChanges,
|
|
||||||
TFolderCommitChangesInsert,
|
|
||||||
TFolderCommitChangesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
|
|
||||||
TFolderCheckpoints,
|
|
||||||
TFolderCheckpointsInsert,
|
|
||||||
TFolderCheckpointsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
|
|
||||||
TFolderCheckpointResources,
|
|
||||||
TFolderCheckpointResourcesInsert,
|
|
||||||
TFolderCheckpointResourcesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
|
|
||||||
TFolderTreeCheckpoints,
|
|
||||||
TFolderTreeCheckpointsInsert,
|
|
||||||
TFolderTreeCheckpointsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
|
|
||||||
TFolderTreeCheckpointResources,
|
|
||||||
TFolderTreeCheckpointResourcesInsert,
|
|
||||||
TFolderTreeCheckpointResourcesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretScanningDataSources,
|
|
||||||
TSecretScanningDataSourcesInsert,
|
|
||||||
TSecretScanningDataSourcesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretScanningResources,
|
|
||||||
TSecretScanningResourcesInsert,
|
|
||||||
TSecretScanningResourcesUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretScanningScans,
|
|
||||||
TSecretScanningScansInsert,
|
|
||||||
TSecretScanningScansUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretScanningFindings,
|
|
||||||
TSecretScanningFindingsInsert,
|
|
||||||
TSecretScanningFindingsUpdate
|
|
||||||
>;
|
|
||||||
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
|
|
||||||
TSecretScanningConfigs,
|
|
||||||
TSecretScanningConfigsInsert,
|
|
||||||
TSecretScanningConfigsUpdate
|
|
||||||
>;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
4
backend/src/@types/ldif.d.ts
vendored
4
backend/src/@types/ldif.d.ts
vendored
@ -1,4 +0,0 @@
|
|||||||
declare module "ldif" {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
|
||||||
function parse(input: string, ...args: any[]): any;
|
|
||||||
}
|
|
@ -1,105 +0,0 @@
|
|||||||
import path from "node:path";
|
|
||||||
|
|
||||||
import dotenv from "dotenv";
|
|
||||||
import { Knex } from "knex";
|
|
||||||
import { Logger } from "pino";
|
|
||||||
|
|
||||||
import { PgSqlLock } from "./keystore/keystore";
|
|
||||||
|
|
||||||
dotenv.config();
|
|
||||||
|
|
||||||
type TArgs = {
|
|
||||||
auditLogDb?: Knex;
|
|
||||||
applicationDb: Knex;
|
|
||||||
logger: Logger;
|
|
||||||
};
|
|
||||||
|
|
||||||
const isProduction = process.env.NODE_ENV === "production";
|
|
||||||
const migrationConfig = {
|
|
||||||
directory: path.join(__dirname, "./db/migrations"),
|
|
||||||
loadExtensions: [".mjs", ".ts"],
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
};
|
|
||||||
|
|
||||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
|
||||||
// happens for first time in which the migration table itself is not created yet
|
|
||||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
|
||||||
if (err?.message?.includes("does not exist")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
throw err;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
|
||||||
try {
|
|
||||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
|
||||||
if (isProduction) {
|
|
||||||
const migrationTable = migrationConfig.tableName;
|
|
||||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
|
||||||
if (hasMigrationTable) {
|
|
||||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
|
||||||
if (firstFile?.name?.includes(".ts")) {
|
|
||||||
await applicationDb(migrationTable).update({
|
|
||||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (auditLogDb) {
|
|
||||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
|
||||||
if (hasMigrationTableInAuditLog) {
|
|
||||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
|
||||||
if (firstFile?.name?.includes(".ts")) {
|
|
||||||
await auditLogDb(migrationTable).update({
|
|
||||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const shouldRunMigration = Boolean(
|
|
||||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
|
||||||
); // db.length - code.length
|
|
||||||
if (!shouldRunMigration) {
|
|
||||||
logger.info("No migrations pending: Skipping migration process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (auditLogDb) {
|
|
||||||
await auditLogDb.transaction(async (tx) => {
|
|
||||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
|
||||||
logger.info("Running audit log migrations.");
|
|
||||||
|
|
||||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
|
||||||
.status(migrationConfig)
|
|
||||||
.catch(migrationStatusCheckErrorHandler));
|
|
||||||
if (didPreviousInstanceRunMigration) {
|
|
||||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await auditLogDb.migrate.latest(migrationConfig);
|
|
||||||
logger.info("Finished audit log migrations.");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await applicationDb.transaction(async (tx) => {
|
|
||||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
|
||||||
logger.info("Running application migrations.");
|
|
||||||
|
|
||||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
|
||||||
.status(migrationConfig)
|
|
||||||
.catch(migrationStatusCheckErrorHandler));
|
|
||||||
if (didPreviousInstanceRunMigration) {
|
|
||||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await applicationDb.migrate.latest(migrationConfig);
|
|
||||||
logger.info("Finished application migrations.");
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(err, "Boot up migration failed");
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,75 +0,0 @@
|
|||||||
// eslint-disable-next-line
|
|
||||||
import "ts-node/register";
|
|
||||||
|
|
||||||
import dotenv from "dotenv";
|
|
||||||
import type { Knex } from "knex";
|
|
||||||
import path from "path";
|
|
||||||
|
|
||||||
// Update with your config settings. .
|
|
||||||
dotenv.config({
|
|
||||||
path: path.join(__dirname, "../../../.env.migration")
|
|
||||||
});
|
|
||||||
dotenv.config({
|
|
||||||
path: path.join(__dirname, "../../../.env")
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) {
|
|
||||||
console.info("Dedicated audit log database not found. No further migrations necessary");
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.info("Executing migration on audit log database...");
|
|
||||||
|
|
||||||
export default {
|
|
||||||
development: {
|
|
||||||
client: "postgres",
|
|
||||||
connection: {
|
|
||||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
|
||||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
|
||||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
|
||||||
user: process.env.AUDIT_LOGS_DB_USER,
|
|
||||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
|
||||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
|
||||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
|
||||||
? {
|
|
||||||
rejectUnauthorized: true,
|
|
||||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
|
||||||
}
|
|
||||||
: false
|
|
||||||
},
|
|
||||||
pool: {
|
|
||||||
min: 2,
|
|
||||||
max: 10
|
|
||||||
},
|
|
||||||
seeds: {
|
|
||||||
directory: "./seeds"
|
|
||||||
},
|
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
production: {
|
|
||||||
client: "postgres",
|
|
||||||
connection: {
|
|
||||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
|
||||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
|
||||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
|
||||||
user: process.env.AUDIT_LOGS_DB_USER,
|
|
||||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
|
||||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
|
||||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
|
||||||
? {
|
|
||||||
rejectUnauthorized: true,
|
|
||||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
|
||||||
}
|
|
||||||
: false
|
|
||||||
},
|
|
||||||
pool: {
|
|
||||||
min: 2,
|
|
||||||
max: 10
|
|
||||||
},
|
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} as Knex.Config;
|
|
@ -1,2 +1,2 @@
|
|||||||
export type { TDbClient } from "./instance";
|
export type { TDbClient } from "./instance";
|
||||||
export { initAuditLogDbConnection, initDbConnection } from "./instance";
|
export { initDbConnection } from "./instance";
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import knex, { Knex } from "knex";
|
import knex, { Knex } from "knex";
|
||||||
|
|
||||||
export type TDbClient = Knex;
|
export type TDbClient = ReturnType<typeof initDbConnection>;
|
||||||
export const initDbConnection = ({
|
export const initDbConnection = ({
|
||||||
dbConnectionUri,
|
dbConnectionUri,
|
||||||
dbRootCert,
|
dbRootCert,
|
||||||
@ -49,11 +49,6 @@ export const initDbConnection = ({
|
|||||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
},
|
|
||||||
// https://knexjs.org/guide/#pool
|
|
||||||
pool: { min: 0, max: 10 },
|
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -69,59 +64,9 @@ export const initDbConnection = ({
|
|||||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||||
}
|
}
|
||||||
: false
|
: false
|
||||||
},
|
}
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
},
|
|
||||||
pool: { min: 0, max: 10 }
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
return db;
|
return db;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const initAuditLogDbConnection = ({
|
|
||||||
dbConnectionUri,
|
|
||||||
dbRootCert
|
|
||||||
}: {
|
|
||||||
dbConnectionUri: string;
|
|
||||||
dbRootCert?: string;
|
|
||||||
}) => {
|
|
||||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
|
||||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
|
||||||
// eslint-disable-next-line
|
|
||||||
const db: Knex<any, unknown[]> = knex({
|
|
||||||
client: "pg",
|
|
||||||
connection: {
|
|
||||||
connectionString: dbConnectionUri,
|
|
||||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
|
||||||
// @ts-expect-error I have no clue why only for the port there is a type error
|
|
||||||
// eslint-disable-next-line
|
|
||||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
|
||||||
user: process.env.AUDIT_LOGS_DB_USER,
|
|
||||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
|
||||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
|
||||||
ssl: dbRootCert
|
|
||||||
? {
|
|
||||||
rejectUnauthorized: true,
|
|
||||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
|
||||||
}
|
|
||||||
: false
|
|
||||||
},
|
|
||||||
migrations: {
|
|
||||||
tableName: "infisical_migrations"
|
|
||||||
},
|
|
||||||
pool: { min: 0, max: 10 }
|
|
||||||
});
|
|
||||||
|
|
||||||
// we add these overrides so that auditLogDb and the primary DB are interchangeable
|
|
||||||
db.primaryNode = () => {
|
|
||||||
return db;
|
|
||||||
};
|
|
||||||
|
|
||||||
db.replicaNode = () => {
|
|
||||||
return db;
|
|
||||||
};
|
|
||||||
|
|
||||||
return db;
|
|
||||||
};
|
|
||||||
|
@ -4,7 +4,6 @@ import "ts-node/register";
|
|||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import type { Knex } from "knex";
|
import type { Knex } from "knex";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import { initLogger } from "@app/lib/logger";
|
|
||||||
|
|
||||||
// Update with your config settings. .
|
// Update with your config settings. .
|
||||||
dotenv.config({
|
dotenv.config({
|
||||||
@ -14,8 +13,6 @@ dotenv.config({
|
|||||||
path: path.join(__dirname, "../../../.env")
|
path: path.join(__dirname, "../../../.env")
|
||||||
});
|
});
|
||||||
|
|
||||||
initLogger();
|
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
development: {
|
development: {
|
||||||
client: "postgres",
|
client: "postgres",
|
||||||
@ -41,8 +38,7 @@ export default {
|
|||||||
directory: "./seeds"
|
directory: "./seeds"
|
||||||
},
|
},
|
||||||
migrations: {
|
migrations: {
|
||||||
tableName: "infisical_migrations",
|
tableName: "infisical_migrations"
|
||||||
loadExtensions: [".mjs", ".ts"]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
production: {
|
production: {
|
||||||
@ -66,8 +62,7 @@ export default {
|
|||||||
max: 10
|
max: 10
|
||||||
},
|
},
|
||||||
migrations: {
|
migrations: {
|
||||||
tableName: "infisical_migrations",
|
tableName: "infisical_migrations"
|
||||||
loadExtensions: [".mjs", ".ts"]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} as Knex.Config;
|
} as Knex.Config;
|
||||||
|
@ -1,161 +0,0 @@
|
|||||||
import kx, { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
const INTERMEDIATE_AUDIT_LOG_TABLE = "intermediate_audit_logs";
|
|
||||||
|
|
||||||
const formatPartitionDate = (date: Date) => {
|
|
||||||
const year = date.getFullYear();
|
|
||||||
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
||||||
const day = String(date.getDate()).padStart(2, "0");
|
|
||||||
|
|
||||||
return `${year}-${month}-${day}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
|
|
||||||
const startDateStr = formatPartitionDate(startDate);
|
|
||||||
const endDateStr = formatPartitionDate(endDate);
|
|
||||||
|
|
||||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
|
|
||||||
|
|
||||||
await knex.schema.raw(
|
|
||||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const up = async (knex: Knex): Promise<void> => {
|
|
||||||
console.info("Dropping primary key of audit log table...");
|
|
||||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
|
||||||
// remove existing keys
|
|
||||||
t.dropPrimary();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get all indices of the audit log table and drop them
|
|
||||||
const indexNames: { rows: { indexname: string }[] } = await knex.raw(
|
|
||||||
`
|
|
||||||
SELECT indexname
|
|
||||||
FROM pg_indexes
|
|
||||||
WHERE tablename = '${TableName.AuditLog}'
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
"Deleting existing audit log indices:",
|
|
||||||
indexNames.rows.map((e) => e.indexname)
|
|
||||||
);
|
|
||||||
|
|
||||||
for await (const row of indexNames.rows) {
|
|
||||||
await knex.raw(`DROP INDEX IF EXISTS ${row.indexname}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// renaming audit log to intermediate table
|
|
||||||
console.log("Renaming audit log table to the intermediate name");
|
|
||||||
await knex.schema.renameTable(TableName.AuditLog, INTERMEDIATE_AUDIT_LOG_TABLE);
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.AuditLog))) {
|
|
||||||
const createTableSql = knex.schema
|
|
||||||
.createTable(TableName.AuditLog, (t) => {
|
|
||||||
t.uuid("id").defaultTo(knex.fn.uuid());
|
|
||||||
t.string("actor").notNullable();
|
|
||||||
t.jsonb("actorMetadata").notNullable();
|
|
||||||
t.string("ipAddress");
|
|
||||||
t.string("eventType").notNullable();
|
|
||||||
t.jsonb("eventMetadata");
|
|
||||||
t.string("userAgent");
|
|
||||||
t.string("userAgentType");
|
|
||||||
t.datetime("expiresAt");
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("orgId");
|
|
||||||
t.string("projectId");
|
|
||||||
t.string("projectName");
|
|
||||||
t.primary(["id", "createdAt"]);
|
|
||||||
})
|
|
||||||
.toString();
|
|
||||||
|
|
||||||
console.info("Creating partition table...");
|
|
||||||
await knex.schema.raw(`
|
|
||||||
${createTableSql} PARTITION BY RANGE ("createdAt");
|
|
||||||
`);
|
|
||||||
|
|
||||||
console.log("Adding indices...");
|
|
||||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
|
||||||
t.index(["projectId", "createdAt"]);
|
|
||||||
t.index(["orgId", "createdAt"]);
|
|
||||||
t.index("expiresAt");
|
|
||||||
t.index("orgId");
|
|
||||||
t.index("projectId");
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log("Adding GIN indices...");
|
|
||||||
|
|
||||||
await knex.raw(
|
|
||||||
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)`
|
|
||||||
);
|
|
||||||
console.log("GIN index for actorMetadata done");
|
|
||||||
|
|
||||||
await knex.raw(
|
|
||||||
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.AuditLog} USING gin("eventMetadata" jsonb_path_ops)`
|
|
||||||
);
|
|
||||||
console.log("GIN index for eventMetadata done");
|
|
||||||
|
|
||||||
// create default partition
|
|
||||||
console.log("Creating default partition...");
|
|
||||||
await knex.schema.raw(`CREATE TABLE ${TableName.AuditLog}_default PARTITION OF ${TableName.AuditLog} DEFAULT`);
|
|
||||||
|
|
||||||
const nextDate = new Date();
|
|
||||||
nextDate.setDate(nextDate.getDate() + 1);
|
|
||||||
const nextDateStr = formatPartitionDate(nextDate);
|
|
||||||
|
|
||||||
console.log("Attaching existing audit log table as a partition...");
|
|
||||||
await knex.schema.raw(`
|
|
||||||
ALTER TABLE ${INTERMEDIATE_AUDIT_LOG_TABLE} ADD CONSTRAINT audit_log_old
|
|
||||||
CHECK ( "createdAt" < DATE '${nextDateStr}' );
|
|
||||||
|
|
||||||
ALTER TABLE ${TableName.AuditLog} ATTACH PARTITION ${INTERMEDIATE_AUDIT_LOG_TABLE}
|
|
||||||
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
|
|
||||||
`);
|
|
||||||
|
|
||||||
// create partition from now until end of month
|
|
||||||
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
|
|
||||||
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
|
|
||||||
|
|
||||||
// create partitions 4 years ahead
|
|
||||||
const partitionMonths = 4 * 12;
|
|
||||||
const partitionPromises: Promise<void>[] = [];
|
|
||||||
for (let x = 1; x <= partitionMonths; x += 1) {
|
|
||||||
partitionPromises.push(
|
|
||||||
createAuditLogPartition(
|
|
||||||
knex,
|
|
||||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
|
|
||||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(partitionPromises);
|
|
||||||
console.log("Partition migration complete");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const executeMigration = async (url: string) => {
|
|
||||||
console.log("Executing migration...");
|
|
||||||
const knex = kx({
|
|
||||||
client: "pg",
|
|
||||||
connection: url
|
|
||||||
});
|
|
||||||
|
|
||||||
await knex.transaction(async (tx) => {
|
|
||||||
await up(tx);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const dbUrl = process.env.AUDIT_LOGS_DB_CONNECTION_URI;
|
|
||||||
if (!dbUrl) {
|
|
||||||
console.error("Please provide a DB connection URL to the AUDIT_LOGS_DB_CONNECTION_URI env");
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
void executeMigration(dbUrl).then(() => {
|
|
||||||
console.log("Migration: partition-audit-logs DONE");
|
|
||||||
process.exit(0);
|
|
||||||
});
|
|
@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
t.string("integration").notNullable();
|
t.string("integration").notNullable();
|
||||||
t.string("teamId"); // vercel-specific
|
t.string("teamId"); // vercel-specific
|
||||||
t.string("url"); // for self-hosted
|
t.string("url"); // for self hosted
|
||||||
t.string("namespace"); // hashicorp specific
|
t.string("namespace"); // hashicorp specific
|
||||||
t.string("accountId"); // netlify
|
t.string("accountId"); // netlify
|
||||||
t.text("refreshCiphertext");
|
t.text("refreshCiphertext");
|
||||||
@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
await knex.schema.createTable(TableName.Integration, (t) => {
|
await knex.schema.createTable(TableName.Integration, (t) => {
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
t.boolean("isActive").notNullable();
|
t.boolean("isActive").notNullable();
|
||||||
t.string("url"); // self-hosted
|
t.string("url"); // self hosted
|
||||||
t.string("app"); // name of app in provider
|
t.string("app"); // name of app in provider
|
||||||
t.string("appId");
|
t.string("appId");
|
||||||
t.string("targetEnvironment");
|
t.string("targetEnvironment");
|
||||||
|
@ -115,14 +115,7 @@ export async function down(knex: Knex): Promise<void> {
|
|||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
// @ts-ignore because generate schema happens after this
|
// @ts-ignore because generate schema happens after this
|
||||||
approverId: knex(TableName.ProjectMembership)
|
approverId: knex(TableName.ProjectMembership)
|
||||||
.join(
|
.select("id")
|
||||||
TableName.SecretApprovalPolicy,
|
|
||||||
`${TableName.SecretApprovalPolicy}.id`,
|
|
||||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
|
||||||
)
|
|
||||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
|
||||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
|
||||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
|
||||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||||
});
|
});
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||||
@ -154,27 +147,13 @@ export async function down(knex: Knex): Promise<void> {
|
|||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
// @ts-ignore because generate schema happens after this
|
// @ts-ignore because generate schema happens after this
|
||||||
committerId: knex(TableName.ProjectMembership)
|
committerId: knex(TableName.ProjectMembership)
|
||||||
.join(
|
.select("id")
|
||||||
TableName.SecretApprovalPolicy,
|
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||||
`${TableName.SecretApprovalPolicy}.id`,
|
|
||||||
`${TableName.SecretApprovalRequest}.policyId`
|
|
||||||
)
|
|
||||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
|
||||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
|
||||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
|
|
||||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
|
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
// @ts-ignore because generate schema happens after this
|
// @ts-ignore because generate schema happens after this
|
||||||
statusChangeBy: knex(TableName.ProjectMembership)
|
statusChangeBy: knex(TableName.ProjectMembership)
|
||||||
.join(
|
.select("id")
|
||||||
TableName.SecretApprovalPolicy,
|
|
||||||
`${TableName.SecretApprovalPolicy}.id`,
|
|
||||||
`${TableName.SecretApprovalRequest}.policyId`
|
|
||||||
)
|
|
||||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
|
||||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
|
||||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
|
||||||
});
|
});
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||||
@ -198,20 +177,8 @@ export async function down(knex: Knex): Promise<void> {
|
|||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
// @ts-ignore because generate schema happens after this
|
// @ts-ignore because generate schema happens after this
|
||||||
member: knex(TableName.ProjectMembership)
|
member: knex(TableName.ProjectMembership)
|
||||||
.join(
|
.select("id")
|
||||||
TableName.SecretApprovalRequest,
|
|
||||||
`${TableName.SecretApprovalRequest}.id`,
|
|
||||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
|
||||||
)
|
|
||||||
.join(
|
|
||||||
TableName.SecretApprovalPolicy,
|
|
||||||
`${TableName.SecretApprovalPolicy}.id`,
|
|
||||||
`${TableName.SecretApprovalRequest}.policyId`
|
|
||||||
)
|
|
||||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
|
||||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
|
||||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
|
||||||
});
|
});
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||||
tb.uuid("member").notNullable().alter();
|
tb.uuid("member").notNullable().alter();
|
||||||
|
@ -64,25 +64,23 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||||
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
|
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||||
if (!hasCaCertIdColumn) {
|
t.uuid("caCertId").nullable();
|
||||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||||
t.uuid("caCertId").nullable();
|
});
|
||||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
|
||||||
});
|
|
||||||
|
|
||||||
await knex.raw(`
|
await knex.raw(`
|
||||||
UPDATE "${TableName.Certificate}" cert
|
UPDATE "${TableName.Certificate}" cert
|
||||||
SET "caCertId" = (
|
SET "caCertId" = (
|
||||||
SELECT caCert.id
|
SELECT caCert.id
|
||||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||||
WHERE caCert."caId" = cert."caId"
|
WHERE caCert."caId" = cert."caId"
|
||||||
)`);
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||||
t.uuid("caCertId").notNullable().alter();
|
t.uuid("caCertId").notNullable().alter();
|
||||||
});
|
});
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,25 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
|
||||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
|
||||||
if (!doesPasswordExist) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
|
||||||
t.string("password").nullable();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
|
||||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
|
||||||
if (doesPasswordExist) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
|
||||||
t.dropColumn("password");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,96 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
|
|
||||||
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
|
|
||||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
tb.string("integration").notNullable();
|
|
||||||
tb.string("slug").notNullable();
|
|
||||||
tb.uuid("orgId").notNullable();
|
|
||||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
tb.string("description");
|
|
||||||
tb.unique(["orgId", "slug"]);
|
|
||||||
tb.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
|
|
||||||
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
|
|
||||||
tb.uuid("id", { primaryKey: true }).notNullable();
|
|
||||||
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
|
|
||||||
tb.string("teamId").notNullable();
|
|
||||||
tb.string("teamName").notNullable();
|
|
||||||
tb.string("slackUserId").notNullable();
|
|
||||||
tb.string("slackAppId").notNullable();
|
|
||||||
tb.binary("encryptedBotAccessToken").notNullable();
|
|
||||||
tb.string("slackBotId").notNullable();
|
|
||||||
tb.string("slackBotUserId").notNullable();
|
|
||||||
tb.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
|
|
||||||
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
|
|
||||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
tb.string("projectId").notNullable().unique();
|
|
||||||
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
tb.uuid("slackIntegrationId").notNullable();
|
|
||||||
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
|
|
||||||
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
|
|
||||||
tb.string("accessRequestChannels").notNullable().defaultTo("");
|
|
||||||
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
|
|
||||||
tb.string("secretRequestChannels").notNullable().defaultTo("");
|
|
||||||
tb.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
|
||||||
}
|
|
||||||
|
|
||||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
|
||||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
|
||||||
TableName.SuperAdmin,
|
|
||||||
"encryptedSlackClientSecret"
|
|
||||||
);
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
|
||||||
if (!doesSuperAdminHaveSlackClientId) {
|
|
||||||
tb.binary("encryptedSlackClientId");
|
|
||||||
}
|
|
||||||
if (!doesSuperAdminHaveSlackClientSecret) {
|
|
||||||
tb.binary("encryptedSlackClientSecret");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
|
||||||
|
|
||||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
|
||||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
|
||||||
TableName.SuperAdmin,
|
|
||||||
"encryptedSlackClientSecret"
|
|
||||||
);
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
|
||||||
if (doesSuperAdminHaveSlackClientId) {
|
|
||||||
tb.dropColumn("encryptedSlackClientId");
|
|
||||||
}
|
|
||||||
if (doesSuperAdminHaveSlackClientSecret) {
|
|
||||||
tb.dropColumn("encryptedSlackClientSecret");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,25 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
|
||||||
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.CertificateAuthority,
|
|
||||||
"requireTemplateForIssuance"
|
|
||||||
);
|
|
||||||
if (!hasRequireTemplateForIssuanceColumn) {
|
|
||||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
|
||||||
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
|
||||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
|
||||||
t.dropColumn("requireTemplateForIssuance");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,85 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
// Certificate template
|
|
||||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
|
||||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
|
|
||||||
if (!hasKeyUsagesCol) {
|
|
||||||
tb.specificType("keyUsages", "text[]");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!hasExtendedKeyUsagesCol) {
|
|
||||||
tb.specificType("extendedKeyUsages", "text[]");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!hasKeyUsagesCol) {
|
|
||||||
await knex(TableName.CertificateTemplate).update({
|
|
||||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!hasExtendedKeyUsagesCol) {
|
|
||||||
await knex(TableName.CertificateTemplate).update({
|
|
||||||
extendedKeyUsages: []
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Certificate
|
|
||||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
|
||||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
|
||||||
await knex.schema.alterTable(TableName.Certificate, (tb) => {
|
|
||||||
if (!doesCertTableHaveKeyUsages) {
|
|
||||||
tb.specificType("keyUsages", "text[]");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
|
||||||
tb.specificType("extendedKeyUsages", "text[]");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!doesCertTableHaveKeyUsages) {
|
|
||||||
await knex(TableName.Certificate).update({
|
|
||||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
|
||||||
await knex(TableName.Certificate).update({
|
|
||||||
extendedKeyUsages: []
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
// Certificate Template
|
|
||||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
|
||||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
|
|
||||||
if (hasKeyUsagesCol) {
|
|
||||||
t.dropColumn("keyUsages");
|
|
||||||
}
|
|
||||||
if (hasExtendedKeyUsagesCol) {
|
|
||||||
t.dropColumn("extendedKeyUsages");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Certificate
|
|
||||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
|
||||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
|
||||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
|
||||||
if (doesCertTableHaveKeyUsages) {
|
|
||||||
t.dropColumn("keyUsages");
|
|
||||||
}
|
|
||||||
if (doesCertTableHaveExtendedKeyUsages) {
|
|
||||||
t.dropColumn("extendedKeyUsages");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,76 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasAccessApproverGroupId = await knex.schema.hasColumn(
|
|
||||||
TableName.AccessApprovalPolicyApprover,
|
|
||||||
"approverGroupId"
|
|
||||||
);
|
|
||||||
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
|
||||||
const hasSecretApproverGroupId = await knex.schema.hasColumn(
|
|
||||||
TableName.SecretApprovalPolicyApprover,
|
|
||||||
"approverGroupId"
|
|
||||||
);
|
|
||||||
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
|
||||||
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
|
|
||||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
|
|
||||||
// add column approverGroupId to AccessApprovalPolicyApprover
|
|
||||||
if (!hasAccessApproverGroupId) {
|
|
||||||
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
|
||||||
}
|
|
||||||
|
|
||||||
// make approverUserId nullable
|
|
||||||
if (hasAccessApproverUserId) {
|
|
||||||
table.uuid("approverUserId").nullable().alter();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
|
|
||||||
// add column approverGroupId to SecretApprovalPolicyApprover
|
|
||||||
if (!hasSecretApproverGroupId) {
|
|
||||||
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
|
||||||
}
|
|
||||||
|
|
||||||
// make approverUserId nullable
|
|
||||||
if (hasSecretApproverUserId) {
|
|
||||||
table.uuid("approverUserId").nullable().alter();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasAccessApproverGroupId = await knex.schema.hasColumn(
|
|
||||||
TableName.AccessApprovalPolicyApprover,
|
|
||||||
"approverGroupId"
|
|
||||||
);
|
|
||||||
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
|
||||||
const hasSecretApproverGroupId = await knex.schema.hasColumn(
|
|
||||||
TableName.SecretApprovalPolicyApprover,
|
|
||||||
"approverGroupId"
|
|
||||||
);
|
|
||||||
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
|
||||||
|
|
||||||
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
|
|
||||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
|
|
||||||
if (hasAccessApproverGroupId) {
|
|
||||||
table.dropColumn("approverGroupId");
|
|
||||||
}
|
|
||||||
// make approverUserId not nullable
|
|
||||||
if (hasAccessApproverUserId) {
|
|
||||||
table.uuid("approverUserId").notNullable().alter();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// remove
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
|
|
||||||
if (hasSecretApproverGroupId) {
|
|
||||||
table.dropColumn("approverGroupId");
|
|
||||||
}
|
|
||||||
// make approverUserId not nullable
|
|
||||||
if (hasSecretApproverUserId) {
|
|
||||||
table.uuid("approverUserId").notNullable().alter();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,24 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.IdentityMetadata))) {
|
|
||||||
await knex.schema.createTable(TableName.IdentityMetadata, (tb) => {
|
|
||||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
tb.string("key").notNullable();
|
|
||||||
tb.string("value").notNullable();
|
|
||||||
tb.uuid("orgId").notNullable();
|
|
||||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
tb.uuid("userId");
|
|
||||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
|
||||||
tb.uuid("identityId");
|
|
||||||
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
|
||||||
tb.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.IdentityMetadata);
|
|
||||||
}
|
|
@ -1,43 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
|
||||||
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
|
|
||||||
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
|
||||||
t.string("iv").nullable().alter();
|
|
||||||
t.string("tag").nullable().alter();
|
|
||||||
t.string("encryptedValue").nullable().alter();
|
|
||||||
|
|
||||||
if (!hasEncryptedSecret) {
|
|
||||||
t.binary("encryptedSecret").nullable();
|
|
||||||
}
|
|
||||||
t.string("hashedHex").nullable().alter();
|
|
||||||
|
|
||||||
if (!hasIdentifier) {
|
|
||||||
t.string("identifier", 64).nullable();
|
|
||||||
t.unique("identifier");
|
|
||||||
t.index("identifier");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
|
|
||||||
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
|
|
||||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
|
||||||
if (hasEncryptedSecret) {
|
|
||||||
t.dropColumn("encryptedSecret");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasIdentifier) {
|
|
||||||
t.dropColumn("identifier");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,19 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed"))) {
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
|
||||||
tb.datetime("lastUsed");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed")) {
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
|
||||||
tb.dropColumn("lastUsed");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,52 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
|
||||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
|
||||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
|
||||||
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
|
|
||||||
|
|
||||||
// drop constraint if exists (won't exist if rolled back, see below)
|
|
||||||
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
|
|
||||||
|
|
||||||
// projectId for CMEK functionality
|
|
||||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
|
||||||
if (!hasProjectId) {
|
|
||||||
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasOrgId && hasSlug) {
|
|
||||||
table.unique(["orgId", "projectId", "slug"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasSlug) {
|
|
||||||
table.renameColumn("slug", "name");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
|
||||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
|
||||||
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
|
|
||||||
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
|
|
||||||
|
|
||||||
// remove projectId for CMEK functionality
|
|
||||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
|
||||||
if (hasName) {
|
|
||||||
table.renameColumn("name", "slug");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasOrgId) {
|
|
||||||
table.dropUnique(["orgId", "projectId", "slug"]);
|
|
||||||
}
|
|
||||||
if (hasProjectId) {
|
|
||||||
table.dropColumn("projectId");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,30 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
|
||||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
|
||||||
|
|
||||||
if (!hasSlug) {
|
|
||||||
// add slug back temporarily and set value equal to name
|
|
||||||
await knex.schema
|
|
||||||
.alterTable(TableName.KmsKey, (table) => {
|
|
||||||
table.string("slug", 32);
|
|
||||||
})
|
|
||||||
.then(() => knex(TableName.KmsKey).update("slug", knex.ref("name")));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
|
||||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
|
||||||
|
|
||||||
if (hasSlug) {
|
|
||||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
|
||||||
table.dropColumn("slug");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,48 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
|
||||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
|
||||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
|
||||||
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
|
||||||
if (doesOrgIdExist) {
|
|
||||||
t.dropForeign("orgId");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (doesProjectIdExist) {
|
|
||||||
t.dropForeign("projectId");
|
|
||||||
}
|
|
||||||
|
|
||||||
// add normalized field
|
|
||||||
if (!doesProjectNameExist) {
|
|
||||||
t.string("projectName");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
|
||||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
|
||||||
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
|
|
||||||
|
|
||||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
|
||||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
|
||||||
if (doesOrgIdExist) {
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
}
|
|
||||||
if (doesProjectIdExist) {
|
|
||||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
}
|
|
||||||
|
|
||||||
// remove normalized field
|
|
||||||
if (doesProjectNameExist) {
|
|
||||||
t.dropColumn("projectName");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,29 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
// org default role
|
|
||||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
|
||||||
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
|
|
||||||
|
|
||||||
if (!hasDefaultRoleCol) {
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (tb) => {
|
|
||||||
tb.string("defaultMembershipRole").notNullable().defaultTo("member");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
// org default role
|
|
||||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
|
||||||
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
|
|
||||||
|
|
||||||
if (hasDefaultRoleCol) {
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (tb) => {
|
|
||||||
tb.dropColumn("defaultMembershipRole");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,101 +0,0 @@
|
|||||||
/* eslint-disable no-await-in-loop */
|
|
||||||
import { packRules, unpackRules } from "@casl/ability/extra";
|
|
||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import {
|
|
||||||
backfillPermissionV1SchemaToV2Schema,
|
|
||||||
ProjectPermissionSub
|
|
||||||
} from "@app/ee/services/permission/project-permission";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
const CHUNK_SIZE = 1000;
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
|
|
||||||
if (!hasVersion) {
|
|
||||||
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
|
|
||||||
t.integer("version").defaultTo(1).notNullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
const docs = await knex(TableName.ProjectRoles).select("*");
|
|
||||||
const updatedDocs = docs
|
|
||||||
.filter((i) => {
|
|
||||||
const permissionString = JSON.stringify(i.permissions || []);
|
|
||||||
return (
|
|
||||||
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
|
|
||||||
!permissionString.includes(ProjectPermissionSub.DynamicSecrets)
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.map((el) => ({
|
|
||||||
...el,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore-error this is valid ts
|
|
||||||
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions), true)))
|
|
||||||
}));
|
|
||||||
if (updatedDocs.length) {
|
|
||||||
for (let i = 0; i < updatedDocs.length; i += CHUNK_SIZE) {
|
|
||||||
const chunk = updatedDocs.slice(i, i + CHUNK_SIZE);
|
|
||||||
await knex(TableName.ProjectRoles).insert(chunk).onConflict("id").merge();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// secret permission is split into multiple ones like secrets, folders, imports and dynamic-secrets
|
|
||||||
// so we just find all the privileges with respective mapping and map it as needed
|
|
||||||
const identityPrivileges = await knex(TableName.IdentityProjectAdditionalPrivilege).select("*");
|
|
||||||
const updatedIdentityPrivilegesDocs = identityPrivileges
|
|
||||||
.filter((i) => {
|
|
||||||
const permissionString = JSON.stringify(i.permissions || []);
|
|
||||||
return (
|
|
||||||
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
|
|
||||||
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
|
|
||||||
!permissionString.includes(ProjectPermissionSub.SecretFolders)
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.map((el) => ({
|
|
||||||
...el,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore-error this is valid ts
|
|
||||||
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
|
|
||||||
}));
|
|
||||||
if (updatedIdentityPrivilegesDocs.length) {
|
|
||||||
for (let i = 0; i < updatedIdentityPrivilegesDocs.length; i += CHUNK_SIZE) {
|
|
||||||
const chunk = updatedIdentityPrivilegesDocs.slice(i, i + CHUNK_SIZE);
|
|
||||||
await knex(TableName.IdentityProjectAdditionalPrivilege).insert(chunk).onConflict("id").merge();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const userPrivileges = await knex(TableName.ProjectUserAdditionalPrivilege).select("*");
|
|
||||||
const updatedUserPrivilegeDocs = userPrivileges
|
|
||||||
.filter((i) => {
|
|
||||||
const permissionString = JSON.stringify(i.permissions || []);
|
|
||||||
return (
|
|
||||||
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
|
|
||||||
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
|
|
||||||
!permissionString.includes(ProjectPermissionSub.SecretFolders)
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.map((el) => ({
|
|
||||||
...el,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore-error this is valid ts
|
|
||||||
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
|
|
||||||
}));
|
|
||||||
if (docs.length) {
|
|
||||||
for (let i = 0; i < updatedUserPrivilegeDocs.length; i += CHUNK_SIZE) {
|
|
||||||
const chunk = updatedUserPrivilegeDocs.slice(i, i + CHUNK_SIZE);
|
|
||||||
await knex(TableName.ProjectUserAdditionalPrivilege).insert(chunk).onConflict("id").merge();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
|
|
||||||
if (hasVersion) {
|
|
||||||
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
|
|
||||||
t.dropColumn("version");
|
|
||||||
});
|
|
||||||
|
|
||||||
// permission change can be ignored
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,78 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
const BATCH_SIZE = 10_000;
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
|
||||||
|
|
||||||
if (!hasAuthMethodColumnAccessToken) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
|
||||||
t.string("authMethod").nullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
// first we remove identities without auth method that is unused
|
|
||||||
// ! We delete all access tokens where the identity has no auth method set!
|
|
||||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
|
||||||
await knex(TableName.IdentityAccessToken)
|
|
||||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
|
||||||
.whereNull(`${TableName.Identity}.authMethod`)
|
|
||||||
.delete();
|
|
||||||
|
|
||||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
|
||||||
.whereNull("authMethod")
|
|
||||||
.limit(BATCH_SIZE)
|
|
||||||
.select("id");
|
|
||||||
let totalUpdated = 0;
|
|
||||||
|
|
||||||
do {
|
|
||||||
const batchIds = nullableAccessTokens.map((token) => token.id);
|
|
||||||
|
|
||||||
// ! Update the auth method column in batches for the current batch
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.IdentityAccessToken)
|
|
||||||
.whereIn("id", batchIds)
|
|
||||||
.update({
|
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
||||||
// @ts-ignore because generate schema happens after this
|
|
||||||
authMethod: knex(TableName.Identity)
|
|
||||||
.select("authMethod")
|
|
||||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
|
||||||
.whereNotNull("authMethod")
|
|
||||||
.first()
|
|
||||||
});
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
|
||||||
.whereNull("authMethod")
|
|
||||||
.limit(BATCH_SIZE)
|
|
||||||
.select("id");
|
|
||||||
|
|
||||||
totalUpdated += batchIds.length;
|
|
||||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
|
||||||
} while (nullableAccessTokens.length > 0);
|
|
||||||
|
|
||||||
// Finally we set the authMethod to notNullable after populating the column.
|
|
||||||
// This will fail if the data is not populated correctly, so it's safe.
|
|
||||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
|
||||||
t.string("authMethod").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// ! We aren't dropping the authMethod column from the Identity itself, because we wan't to be able to easily rollback for the time being.
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
|
||||||
|
|
||||||
if (hasAuthMethodColumnAccessToken) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
|
||||||
t.dropColumn("authMethod");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const config = { transaction: false };
|
|
||||||
export { config };
|
|
@ -1,19 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
|
||||||
t.string("value", 1020).alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
|
||||||
t.string("value", 255).alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,32 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
// add external group to org role mapping table
|
|
||||||
if (!(await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping))) {
|
|
||||||
await knex.schema.createTable(TableName.ExternalGroupOrgRoleMapping, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.string("groupName").notNullable();
|
|
||||||
t.index("groupName");
|
|
||||||
t.string("role").notNullable();
|
|
||||||
t.uuid("roleId");
|
|
||||||
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
|
|
||||||
t.uuid("orgId").notNullable();
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.unique(["orgId", "groupName"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping)) {
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
|
|
||||||
|
|
||||||
await knex.schema.dropTable(TableName.ExternalGroupOrgRoleMapping);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,19 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasColumn(TableName.Organization, "enforceMfa"))) {
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (tb) => {
|
|
||||||
tb.boolean("enforceMfa").defaultTo(false).notNullable();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.Organization, "enforceMfa")) {
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
|
||||||
t.dropColumn("enforceMfa");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
|
|
||||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
|
||||||
t.dropForeign("orgId");
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
|
|
||||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
|
||||||
t.dropForeign("orgId");
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,28 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "@app/db/schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.ProjectTemplates))) {
|
|
||||||
await knex.schema.createTable(TableName.ProjectTemplates, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.string("name", 32).notNullable();
|
|
||||||
t.string("description").nullable();
|
|
||||||
t.jsonb("roles").notNullable();
|
|
||||||
t.jsonb("environments").notNullable();
|
|
||||||
t.uuid("orgId").notNullable().references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.ProjectTemplates);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasTable(TableName.ProjectTemplates)) {
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.ProjectTemplates);
|
|
||||||
|
|
||||||
await knex.schema.dropTable(TableName.ProjectTemplates);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,35 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
|
||||||
TableName.CertificateTemplateEstConfig,
|
|
||||||
"disableBootstrapCertValidation"
|
|
||||||
);
|
|
||||||
|
|
||||||
const hasCaChainCol = await knex.schema.hasColumn(TableName.CertificateTemplateEstConfig, "encryptedCaChain");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
|
||||||
if (!hasDisableBootstrapCertValidationCol) {
|
|
||||||
t.boolean("disableBootstrapCertValidation").defaultTo(false).notNullable();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasCaChainCol) {
|
|
||||||
t.binary("encryptedCaChain").nullable().alter();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
|
||||||
TableName.CertificateTemplateEstConfig,
|
|
||||||
"disableBootstrapCertValidation"
|
|
||||||
);
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
|
||||||
if (hasDisableBootstrapCertValidationCol) {
|
|
||||||
t.dropColumn("disableBootstrapCertValidation");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
|
||||||
t.dropForeign("orgId");
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
|
||||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
|
||||||
t.dropForeign("orgId");
|
|
||||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
|
||||||
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
|
||||||
if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE");
|
|
||||||
if (!hasTimestampsCol) t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
|
||||||
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
|
||||||
if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy");
|
|
||||||
if (hasTimestampsCol) t.dropTimestamps(true);
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,54 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
|
|
||||||
await knex.schema.createTable(TableName.TotpConfig, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.uuid("userId").notNullable();
|
|
||||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
|
||||||
t.boolean("isVerified").defaultTo(false).notNullable();
|
|
||||||
t.binary("encryptedRecoveryCodes").notNullable();
|
|
||||||
t.binary("encryptedSecret").notNullable();
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.unique("userId");
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.TotpConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
|
||||||
if (!doesOrgMfaMethodColExist) {
|
|
||||||
t.string("selectedMfaMethod");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
|
||||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
|
||||||
if (!doesUserSelectedMfaMethodColExist) {
|
|
||||||
t.string("selectedMfaMethod");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
|
|
||||||
await knex.schema.dropTableIfExists(TableName.TotpConfig);
|
|
||||||
|
|
||||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
|
||||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
|
||||||
if (doesOrgMfaMethodColExist) {
|
|
||||||
t.dropColumn("selectedMfaMethod");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
|
||||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
|
||||||
if (doesUserSelectedMfaMethodColExist) {
|
|
||||||
t.dropColumn("selectedMfaMethod");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
|
||||||
|
|
||||||
if (!hasProjectDescription) {
|
|
||||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
|
||||||
t.string("description");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
|
||||||
|
|
||||||
if (hasProjectDescription) {
|
|
||||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
|
||||||
t.dropColumn("description");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,20 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
|
||||||
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
|
||||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
|
||||||
t.string("value", 1020).notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
|
||||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
|
||||||
t.string("value", 1020).alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,59 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.AccessApprovalPolicy,
|
|
||||||
"deletedAt"
|
|
||||||
);
|
|
||||||
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.SecretApprovalPolicy,
|
|
||||||
"deletedAt"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!hasAccessApprovalPolicyDeletedAtColumn) {
|
|
||||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
|
||||||
t.timestamp("deletedAt");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (!hasSecretApprovalPolicyDeletedAtColumn) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
|
||||||
t.timestamp("deletedAt");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
|
||||||
t.dropForeign(["privilegeId"]);
|
|
||||||
|
|
||||||
// Add the new foreign key constraint with ON DELETE SET NULL
|
|
||||||
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.AccessApprovalPolicy,
|
|
||||||
"deletedAt"
|
|
||||||
);
|
|
||||||
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
|
||||||
TableName.SecretApprovalPolicy,
|
|
||||||
"deletedAt"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (hasAccessApprovalPolicyDeletedAtColumn) {
|
|
||||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
|
||||||
t.dropColumn("deletedAt");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (hasSecretApprovalPolicyDeletedAtColumn) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
|
||||||
t.dropColumn("deletedAt");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
|
||||||
t.dropForeign(["privilegeId"]);
|
|
||||||
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,34 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.IdentityJwtAuth))) {
|
|
||||||
await knex.schema.createTable(TableName.IdentityJwtAuth, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
|
||||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
|
||||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
|
||||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
|
||||||
t.uuid("identityId").notNullable().unique();
|
|
||||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
|
||||||
t.string("configurationType").notNullable();
|
|
||||||
t.string("jwksUrl").notNullable();
|
|
||||||
t.binary("encryptedJwksCaCert").notNullable();
|
|
||||||
t.binary("encryptedPublicKeys").notNullable();
|
|
||||||
t.string("boundIssuer").notNullable();
|
|
||||||
t.string("boundAudiences").notNullable();
|
|
||||||
t.jsonb("boundClaims").notNullable();
|
|
||||||
t.string("boundSubject").notNullable();
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.IdentityJwtAuth);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
|
|
||||||
}
|
|
@ -1,19 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
|
||||||
t.index("folderId");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
|
||||||
t.dropIndex("folderId");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,297 +0,0 @@
|
|||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import { Knex } from "knex";
|
|
||||||
import { v4 as uuidV4 } from "uuid";
|
|
||||||
|
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
|
||||||
|
|
||||||
import { ProjectType, TableName } from "../schemas";
|
|
||||||
|
|
||||||
/* eslint-disable no-await-in-loop,@typescript-eslint/ban-ts-comment */
|
|
||||||
const newProject = async (knex: Knex, projectId: string, projectType: ProjectType) => {
|
|
||||||
const newProjectId = uuidV4();
|
|
||||||
const project = await knex(TableName.Project).where("id", projectId).first();
|
|
||||||
await knex(TableName.Project).insert({
|
|
||||||
...project,
|
|
||||||
type: projectType,
|
|
||||||
// @ts-ignore id is required
|
|
||||||
id: newProjectId,
|
|
||||||
slug: slugify(`${project?.name}-${alphaNumericNanoId(4)}`)
|
|
||||||
});
|
|
||||||
|
|
||||||
const customRoleMapping: Record<string, string> = {};
|
|
||||||
const projectCustomRoles = await knex(TableName.ProjectRoles).where("projectId", projectId);
|
|
||||||
if (projectCustomRoles.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.ProjectRoles,
|
|
||||||
projectCustomRoles.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
customRoleMapping[el.id] = id;
|
|
||||||
return {
|
|
||||||
...el,
|
|
||||||
id,
|
|
||||||
projectId: newProjectId,
|
|
||||||
permissions: el.permissions ? JSON.stringify(el.permissions) : el.permissions
|
|
||||||
};
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const groupMembershipMapping: Record<string, string> = {};
|
|
||||||
const groupMemberships = await knex(TableName.GroupProjectMembership).where("projectId", projectId);
|
|
||||||
if (groupMemberships.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.GroupProjectMembership,
|
|
||||||
groupMemberships.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
groupMembershipMapping[el.id] = id;
|
|
||||||
return { ...el, id, projectId: newProjectId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const groupMembershipRoles = await knex(TableName.GroupProjectMembershipRole).whereIn(
|
|
||||||
"projectMembershipId",
|
|
||||||
groupMemberships.map((el) => el.id)
|
|
||||||
);
|
|
||||||
if (groupMembershipRoles.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.GroupProjectMembershipRole,
|
|
||||||
groupMembershipRoles.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
const projectMembershipId = groupMembershipMapping[el.projectMembershipId];
|
|
||||||
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
|
||||||
return { ...el, id, projectMembershipId, customRoleId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const identityProjectMembershipMapping: Record<string, string> = {};
|
|
||||||
const identities = await knex(TableName.IdentityProjectMembership).where("projectId", projectId);
|
|
||||||
if (identities.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.IdentityProjectMembership,
|
|
||||||
identities.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
identityProjectMembershipMapping[el.id] = id;
|
|
||||||
return { ...el, id, projectId: newProjectId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const identitiesRoles = await knex(TableName.IdentityProjectMembershipRole).whereIn(
|
|
||||||
"projectMembershipId",
|
|
||||||
identities.map((el) => el.id)
|
|
||||||
);
|
|
||||||
if (identitiesRoles.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.IdentityProjectMembershipRole,
|
|
||||||
identitiesRoles.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
const projectMembershipId = identityProjectMembershipMapping[el.projectMembershipId];
|
|
||||||
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
|
||||||
return { ...el, id, projectMembershipId, customRoleId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const projectMembershipMapping: Record<string, string> = {};
|
|
||||||
const projectUserMembers = await knex(TableName.ProjectMembership).where("projectId", projectId);
|
|
||||||
if (projectUserMembers.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.ProjectMembership,
|
|
||||||
projectUserMembers.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
projectMembershipMapping[el.id] = id;
|
|
||||||
return { ...el, id, projectId: newProjectId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const membershipRoles = await knex(TableName.ProjectUserMembershipRole).whereIn(
|
|
||||||
"projectMembershipId",
|
|
||||||
projectUserMembers.map((el) => el.id)
|
|
||||||
);
|
|
||||||
if (membershipRoles.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.ProjectUserMembershipRole,
|
|
||||||
membershipRoles.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
const projectMembershipId = projectMembershipMapping[el.projectMembershipId];
|
|
||||||
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
|
||||||
return { ...el, id, projectMembershipId, customRoleId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const kmsKeys = await knex(TableName.KmsKey).where("projectId", projectId).andWhere("isReserved", true);
|
|
||||||
if (kmsKeys.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.KmsKey,
|
|
||||||
kmsKeys.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
const slug = slugify(alphaNumericNanoId(8).toLowerCase());
|
|
||||||
return { ...el, id, slug, projectId: newProjectId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const projectBot = await knex(TableName.ProjectBot).where("projectId", projectId).first();
|
|
||||||
if (projectBot) {
|
|
||||||
const newProjectBot = { ...projectBot, id: uuidV4(), projectId: newProjectId };
|
|
||||||
await knex(TableName.ProjectBot).insert(newProjectBot);
|
|
||||||
}
|
|
||||||
|
|
||||||
const projectKeys = await knex(TableName.ProjectKeys).where("projectId", projectId);
|
|
||||||
if (projectKeys.length) {
|
|
||||||
await knex.batchInsert(
|
|
||||||
TableName.ProjectKeys,
|
|
||||||
projectKeys.map((el) => {
|
|
||||||
const id = uuidV4();
|
|
||||||
return { ...el, id, projectId: newProjectId };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return newProjectId;
|
|
||||||
};
|
|
||||||
|
|
||||||
const BATCH_SIZE = 500;
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
|
|
||||||
if (!hasSplitMappingTable) {
|
|
||||||
await knex.schema.createTable(TableName.ProjectSplitBackfillIds, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.string("sourceProjectId", 36).notNullable();
|
|
||||||
t.foreign("sourceProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
t.string("destinationProjectType").notNullable();
|
|
||||||
t.string("destinationProjectId", 36).notNullable();
|
|
||||||
t.foreign("destinationProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
|
|
||||||
if (!hasTypeColumn) {
|
|
||||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
|
||||||
t.string("type");
|
|
||||||
});
|
|
||||||
|
|
||||||
let projectsToBeTyped;
|
|
||||||
do {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
projectsToBeTyped = await knex(TableName.Project).whereNull("type").limit(BATCH_SIZE).select("id");
|
|
||||||
if (projectsToBeTyped.length) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
await knex(TableName.Project)
|
|
||||||
.whereIn(
|
|
||||||
"id",
|
|
||||||
projectsToBeTyped.map((el) => el.id)
|
|
||||||
)
|
|
||||||
.update({ type: ProjectType.SecretManager });
|
|
||||||
}
|
|
||||||
} while (projectsToBeTyped.length > 0);
|
|
||||||
|
|
||||||
const projectsWithCertificates = await knex(TableName.CertificateAuthority)
|
|
||||||
.distinct("projectId")
|
|
||||||
.select("projectId");
|
|
||||||
/* eslint-disable no-await-in-loop,no-param-reassign */
|
|
||||||
for (const { projectId } of projectsWithCertificates) {
|
|
||||||
const newProjectId = await newProject(knex, projectId, ProjectType.CertificateManager);
|
|
||||||
await knex(TableName.CertificateAuthority).where("projectId", projectId).update({ projectId: newProjectId });
|
|
||||||
await knex(TableName.PkiAlert).where("projectId", projectId).update({ projectId: newProjectId });
|
|
||||||
await knex(TableName.PkiCollection).where("projectId", projectId).update({ projectId: newProjectId });
|
|
||||||
await knex(TableName.ProjectSplitBackfillIds).insert({
|
|
||||||
sourceProjectId: projectId,
|
|
||||||
destinationProjectType: ProjectType.CertificateManager,
|
|
||||||
destinationProjectId: newProjectId
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const projectsWithCmek = await knex(TableName.KmsKey)
|
|
||||||
.where("isReserved", false)
|
|
||||||
.whereNotNull("projectId")
|
|
||||||
.distinct("projectId")
|
|
||||||
.select("projectId");
|
|
||||||
for (const { projectId } of projectsWithCmek) {
|
|
||||||
if (projectId) {
|
|
||||||
const newProjectId = await newProject(knex, projectId, ProjectType.KMS);
|
|
||||||
await knex(TableName.KmsKey)
|
|
||||||
.where({
|
|
||||||
isReserved: false,
|
|
||||||
projectId
|
|
||||||
})
|
|
||||||
.update({ projectId: newProjectId });
|
|
||||||
await knex(TableName.ProjectSplitBackfillIds).insert({
|
|
||||||
sourceProjectId: projectId,
|
|
||||||
destinationProjectType: ProjectType.KMS,
|
|
||||||
destinationProjectId: newProjectId
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* eslint-enable */
|
|
||||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
|
||||||
t.string("type").notNullable().alter();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
|
|
||||||
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
|
|
||||||
|
|
||||||
if (hasTypeColumn && hasSplitMappingTable) {
|
|
||||||
const splitProjectMappings = await knex(TableName.ProjectSplitBackfillIds).where({});
|
|
||||||
const certMapping = splitProjectMappings.filter(
|
|
||||||
(el) => el.destinationProjectType === ProjectType.CertificateManager
|
|
||||||
);
|
|
||||||
/* eslint-disable no-await-in-loop */
|
|
||||||
for (const project of certMapping) {
|
|
||||||
await knex(TableName.CertificateAuthority)
|
|
||||||
.where("projectId", project.destinationProjectId)
|
|
||||||
.update({ projectId: project.sourceProjectId });
|
|
||||||
await knex(TableName.PkiAlert)
|
|
||||||
.where("projectId", project.destinationProjectId)
|
|
||||||
.update({ projectId: project.sourceProjectId });
|
|
||||||
await knex(TableName.PkiCollection)
|
|
||||||
.where("projectId", project.destinationProjectId)
|
|
||||||
.update({ projectId: project.sourceProjectId });
|
|
||||||
}
|
|
||||||
|
|
||||||
/* eslint-enable */
|
|
||||||
const kmsMapping = splitProjectMappings.filter((el) => el.destinationProjectType === ProjectType.KMS);
|
|
||||||
/* eslint-disable no-await-in-loop */
|
|
||||||
for (const project of kmsMapping) {
|
|
||||||
await knex(TableName.KmsKey)
|
|
||||||
.where({
|
|
||||||
isReserved: false,
|
|
||||||
projectId: project.destinationProjectId
|
|
||||||
})
|
|
||||||
.update({ projectId: project.sourceProjectId });
|
|
||||||
}
|
|
||||||
/* eslint-enable */
|
|
||||||
await knex(TableName.ProjectMembership)
|
|
||||||
.whereIn(
|
|
||||||
"projectId",
|
|
||||||
splitProjectMappings.map((el) => el.destinationProjectId)
|
|
||||||
)
|
|
||||||
.delete();
|
|
||||||
await knex(TableName.ProjectRoles)
|
|
||||||
.whereIn(
|
|
||||||
"projectId",
|
|
||||||
splitProjectMappings.map((el) => el.destinationProjectId)
|
|
||||||
)
|
|
||||||
.delete();
|
|
||||||
await knex(TableName.Project)
|
|
||||||
.whereIn(
|
|
||||||
"id",
|
|
||||||
splitProjectMappings.map((el) => el.destinationProjectId)
|
|
||||||
)
|
|
||||||
.delete();
|
|
||||||
|
|
||||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
|
||||||
t.dropColumn("type");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasSplitMappingTable) {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.ProjectSplitBackfillIds);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,99 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthority))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateAuthority, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.string("projectId").notNullable();
|
|
||||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
|
||||||
t.string("status").notNullable(); // active / disabled
|
|
||||||
t.string("friendlyName").notNullable();
|
|
||||||
t.string("keyAlgorithm").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthoritySecret))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateAuthoritySecret, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCaId").notNullable().unique();
|
|
||||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
|
||||||
t.binary("encryptedPrivateKey").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateTemplate))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateTemplate, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCaId").notNullable();
|
|
||||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
|
|
||||||
t.string("status").notNullable(); // active / disabled
|
|
||||||
t.string("name").notNullable();
|
|
||||||
t.string("ttl").notNullable();
|
|
||||||
t.string("maxTTL").notNullable();
|
|
||||||
t.specificType("allowedUsers", "text[]").notNullable();
|
|
||||||
t.specificType("allowedHosts", "text[]").notNullable();
|
|
||||||
t.boolean("allowUserCertificates").notNullable();
|
|
||||||
t.boolean("allowHostCertificates").notNullable();
|
|
||||||
t.boolean("allowCustomKeyIds").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificate))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificate, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCaId").notNullable();
|
|
||||||
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
|
|
||||||
t.uuid("sshCertificateTemplateId");
|
|
||||||
t.foreign("sshCertificateTemplateId")
|
|
||||||
.references("id")
|
|
||||||
.inTable(TableName.SshCertificateTemplate)
|
|
||||||
.onDelete("SET NULL");
|
|
||||||
t.string("serialNumber").notNullable().unique();
|
|
||||||
t.string("certType").notNullable(); // user or host
|
|
||||||
t.specificType("principals", "text[]").notNullable();
|
|
||||||
t.string("keyId").notNullable();
|
|
||||||
t.datetime("notBefore").notNullable();
|
|
||||||
t.datetime("notAfter").notNullable();
|
|
||||||
});
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificate);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await knex.schema.hasTable(TableName.SshCertificateBody))) {
|
|
||||||
await knex.schema.createTable(TableName.SshCertificateBody, (t) => {
|
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
t.timestamps(true, true, true);
|
|
||||||
t.uuid("sshCertId").notNullable().unique();
|
|
||||||
t.foreign("sshCertId").references("id").inTable(TableName.SshCertificate).onDelete("CASCADE");
|
|
||||||
t.binary("encryptedCertificate").notNullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
await createOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateBody);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateBody);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificate);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificate);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateTemplate);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthoritySecret);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
|
|
||||||
|
|
||||||
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthority);
|
|
||||||
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
|
|
||||||
}
|
|
@ -1,40 +0,0 @@
|
|||||||
import { Knex } from "knex";
|
|
||||||
|
|
||||||
import { TableName } from "../schemas";
|
|
||||||
|
|
||||||
export async function up(knex: Knex): Promise<void> {
|
|
||||||
if (!(await knex.schema.hasTable(TableName.ResourceMetadata))) {
|
|
||||||
await knex.schema.createTable(TableName.ResourceMetadata, (tb) => {
|
|
||||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
|
||||||
tb.string("key").notNullable();
|
|
||||||
tb.string("value", 1020).notNullable();
|
|
||||||
tb.uuid("orgId").notNullable();
|
|
||||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
|
||||||
tb.uuid("userId");
|
|
||||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
|
||||||
tb.uuid("identityId");
|
|
||||||
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
|
||||||
tb.uuid("secretId");
|
|
||||||
tb.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
|
|
||||||
tb.timestamps(true, true, true);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
|
||||||
if (!hasSecretMetadataField) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
|
||||||
t.jsonb("secretMetadata");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(knex: Knex): Promise<void> {
|
|
||||||
await knex.schema.dropTableIfExists(TableName.ResourceMetadata);
|
|
||||||
|
|
||||||
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
|
|
||||||
if (hasSecretMetadataField) {
|
|
||||||
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
|
|
||||||
t.dropColumn("secretMetadata");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user