mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-20 14:21:28 +00:00
Compare commits
1 Commits
access-tre
...
stylistic-
Author | SHA1 | Date | |
---|---|---|---|
f8ad8de4c5 |
65
.env.example
65
.env.example
@ -3,6 +3,9 @@
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
@ -13,9 +16,6 @@ POSTGRES_PASSWORD=infisical
|
||||
POSTGRES_USER=infisical
|
||||
POSTGRES_DB=infisical
|
||||
|
||||
# Required
|
||||
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
|
||||
@ -26,8 +26,7 @@ SITE_URL=http://localhost:8080
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_FROM_ADDRESS=
|
||||
SMTP_FROM_NAME=
|
||||
SMTP_NAME=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
@ -37,22 +36,16 @@ CLIENT_ID_HEROKU=
|
||||
CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITHUB_APP=
|
||||
CLIENT_SLUG_GITHUB_APP=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITHUB_APP=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
CLIENT_PRIVATE_KEY_GITHUB_APP=
|
||||
CLIENT_APP_ID_GITHUB_APP=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
SENTRY_DSN=
|
||||
|
||||
@ -70,53 +63,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
||||
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
|
||||
# App Connections
|
||||
|
||||
# aws assume-role connection
|
||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# github oauth connection
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github app connection
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
|
@ -1,2 +1 @@
|
||||
DB_CONNECTION_URI=
|
||||
AUDIT_LOGS_DB_CONNECTION_URI=
|
||||
|
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@ -6,7 +6,6 @@
|
||||
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Improvement
|
||||
- [ ] Breaking change
|
||||
- [ ] Documentation
|
||||
|
||||
|
26
.github/resources/rename_migration_files.py
vendored
26
.github/resources/rename_migration_files.py
vendored
@ -1,26 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def rename_migrations():
|
||||
migration_folder = "./backend/src/db/migrations"
|
||||
with open("added_files.txt", "r") as file:
|
||||
changed_files = file.readlines()
|
||||
|
||||
# Find the latest file among the changed files
|
||||
latest_timestamp = datetime.now() # utc time
|
||||
for file_path in changed_files:
|
||||
file_path = file_path.strip()
|
||||
# each new file bump by 1s
|
||||
latest_timestamp = latest_timestamp + timedelta(seconds=1)
|
||||
|
||||
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
|
||||
old_filename = os.path.join(migration_folder, file_path)
|
||||
os.rename(old_filename, new_filename)
|
||||
print(f"Renamed {old_filename} to {new_filename}")
|
||||
|
||||
if len(changed_files) == 0:
|
||||
print("No new files added to migration folder")
|
||||
|
||||
if __name__ == "__main__":
|
||||
rename_migrations()
|
||||
|
189
.github/workflows/build-binaries.yml
vendored
189
.github/workflows/build-binaries.yml
vendored
@ -1,189 +0,0 @@
|
||||
name: Build Binaries and Deploy
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version number"
|
||||
required: true
|
||||
type: string
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./backend
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [x64, arm64]
|
||||
os: [linux, win]
|
||||
include:
|
||||
- os: linux
|
||||
target: node20-linux
|
||||
- os: win
|
||||
target: node20-win
|
||||
runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install pkg
|
||||
run: npm install -g @yao-pkg/pkg
|
||||
|
||||
- name: Install dependencies (backend)
|
||||
run: npm install
|
||||
|
||||
- name: Install dependencies (frontend)
|
||||
run: npm install --prefix ../frontend
|
||||
|
||||
- name: Prerequisites for pkg
|
||||
run: npm run binary:build
|
||||
|
||||
- name: Package into node binary
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" != "linux" ]; then
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
|
||||
else
|
||||
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
|
||||
fi
|
||||
|
||||
# Set up .deb package structure (Debian/Ubuntu only)
|
||||
- name: Set up .deb package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core/DEBIAN
|
||||
mkdir -p infisical-core/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core/usr/local/bin/
|
||||
chmod +x infisical-core/usr/local/bin/infisical-core
|
||||
|
||||
- name: Create control file
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core/DEBIAN/control
|
||||
Package: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Section: base
|
||||
Priority: optional
|
||||
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
|
||||
Maintainer: Infisical <daniel@infisical.com>
|
||||
Description: Infisical Core standalone executable (app.infisical.com)
|
||||
EOF
|
||||
|
||||
# Build .deb file (Debian/Ubunutu only)
|
||||
- name: Build .deb package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
dpkg-deb --build infisical-core
|
||||
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
|
||||
|
||||
### RPM
|
||||
|
||||
# Set up .rpm package structure
|
||||
- name: Set up .rpm package structure
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
mkdir -p infisical-core-rpm/usr/local/bin
|
||||
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
|
||||
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
|
||||
|
||||
# Install RPM build tools
|
||||
- name: Install RPM build tools
|
||||
if: matrix.os == 'linux'
|
||||
run: sudo apt-get update && sudo apt-get install -y rpm
|
||||
|
||||
# Create .spec file for RPM
|
||||
- name: Create .spec file for RPM
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
cat <<EOF > infisical-core.spec
|
||||
|
||||
%global _enable_debug_package 0
|
||||
%global debug_package %{nil}
|
||||
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
|
||||
|
||||
Name: infisical-core
|
||||
Version: ${{ github.event.inputs.version }}
|
||||
Release: 1%{?dist}
|
||||
Summary: Infisical Core standalone executable
|
||||
License: Proprietary
|
||||
URL: https://app.infisical.com
|
||||
|
||||
%description
|
||||
Infisical Core standalone executable (app.infisical.com)
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}/usr/local/bin
|
||||
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
|
||||
|
||||
%files
|
||||
/usr/local/bin/infisical-core
|
||||
|
||||
%pre
|
||||
|
||||
%post
|
||||
|
||||
%preun
|
||||
|
||||
%postun
|
||||
EOF
|
||||
|
||||
# Build .rpm file
|
||||
- name: Build .rpm package
|
||||
if: matrix.os == 'linux'
|
||||
run: |
|
||||
# Create necessary directories
|
||||
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
|
||||
# Copy the binary directly to SOURCES
|
||||
cp ./binary/infisical-core rpmbuild/SOURCES/
|
||||
|
||||
# Run rpmbuild with verbose output
|
||||
rpmbuild -vv -bb \
|
||||
--define "_topdir $(pwd)/rpmbuild" \
|
||||
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
|
||||
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
|
||||
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
|
||||
infisical-core.spec
|
||||
|
||||
# Try to find the RPM file
|
||||
find rpmbuild -name "*.rpm"
|
||||
|
||||
# Move the RPM file if found
|
||||
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
|
||||
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
|
||||
else
|
||||
echo "RPM file not found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x" # Specify the Python version you need
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade cloudsmith-cli
|
||||
|
||||
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
|
||||
- name: Publish to Cloudsmith (Debian/Ubuntu)
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
|
||||
|
||||
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
|
||||
- name: Publish .rpm to Cloudsmith
|
||||
if: matrix.os == 'linux'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
|
||||
|
||||
# Publish .exe file to Cloudsmith (Windows only)
|
||||
- name: Publish to Cloudsmith (Windows)
|
||||
if: matrix.os == 'win'
|
||||
working-directory: ./backend
|
||||
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -41,7 +41,6 @@ jobs:
|
||||
load: true
|
||||
context: backend
|
||||
tags: infisical/infisical:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: ⏻ Spawn backend container and dependencies
|
||||
run: |
|
||||
docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
@ -93,7 +92,6 @@ jobs:
|
||||
project: 64mmf0n610
|
||||
context: frontend
|
||||
tags: infisical/frontend:test
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
NEXT_INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
38
.github/workflows/build-patroni-docker-img.yml
vendored
38
.github/workflows/build-patroni-docker-img.yml
vendored
@ -1,38 +0,0 @@
|
||||
name: Build patroni
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
patroni-image:
|
||||
name: Build patroni
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: 'zalando/patroni'
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
infisical/patroni:${{ steps.commit.outputs.short }}
|
||||
infisical/patroni:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
|
120
.github/workflows/build-staging-and-deploy.yml
vendored
Normal file
120
.github/workflows/build-staging-and-deploy.yml
vendored
Normal file
@ -0,0 +1,120 @@
|
||||
name: Build, Publish and Deploy to Gamma
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
# - name: 🧪 Run tests
|
||||
# run: npm run test:ci
|
||||
# working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
load: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: infisical/infisical:test
|
||||
# - name: ⏻ Spawn backend container and dependencies
|
||||
# run: |
|
||||
# docker compose -f .github/resources/docker-compose.be-test.yml up --wait --quiet-pull
|
||||
# - name: 🧪 Test backend image
|
||||
# run: |
|
||||
# ./.github/resources/healthcheck.sh infisical-backend-test
|
||||
# - name: ⏻ Shut down backend container and dependencies
|
||||
# run: |
|
||||
# docker compose -f .github/resources/docker-compose.be-test.yml down
|
||||
- name: 🏗️ Build backend and push
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
postgres-migration:
|
||||
name: Run latest migration files
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
# - name: Run postgres DB migration files
|
||||
# env:
|
||||
# DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
# run: npm run migration:latest
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [postgres-migration]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install infisical helm chart
|
||||
run: |
|
||||
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
|
||||
helm repo update
|
||||
- name: Install kubectl
|
||||
uses: azure/setup-kubectl@v3
|
||||
- name: Install doctl
|
||||
uses: digitalocean/action-doctl@v2
|
||||
with:
|
||||
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
- name: Save DigitalOcean kubeconfig with short-lived credentials
|
||||
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 infisical-gamma-postgres
|
||||
- name: switch to gamma namespace
|
||||
run: kubectl config set-context --current --namespace=gamma
|
||||
- name: test kubectl
|
||||
run: kubectl get ingress
|
||||
- name: Download helm values to file and upgrade gamma deploy
|
||||
run: |
|
||||
wget https://raw.githubusercontent.com/Infisical/infisical/main/.github/values.yaml
|
||||
helm upgrade infisical infisical-helm-charts/infisical-standalone --values values.yaml --wait --install
|
||||
if [[ $(helm status infisical) == *"FAILED"* ]]; then
|
||||
echo "Helm upgrade failed"
|
||||
exit 1
|
||||
else
|
||||
echo "Helm upgrade was successful"
|
||||
fi
|
@ -5,7 +5,6 @@ on:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "backend/src/server/routes/**"
|
||||
- "backend/src/ee/routes/**"
|
||||
|
||||
jobs:
|
||||
check-be-api-changes:
|
||||
@ -22,82 +21,55 @@ jobs:
|
||||
# uncomment this when testing locally using nektos/act
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker compose` for local simulations
|
||||
name: Install `docker-compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 📦Build the latest image
|
||||
run: docker build --tag infisical-api .
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
JWT_AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.21.5"
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/oasdiff/oasdiff@latest
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
8
.github/workflows/check-fe-ts-and-lint.yml
vendored
8
.github/workflows/check-fe-ts-and-lint.yml
vendored
@ -18,18 +18,18 @@ jobs:
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 🔧 Setup Node 20
|
||||
- name: 🔧 Setup Node 16
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: "16"
|
||||
cache: "npm"
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Type check
|
||||
run: npm run type:check
|
||||
run: npm run type:check
|
||||
working-directory: frontend
|
||||
- name: 🏗️ Run Link check
|
||||
run: npm run lint:fix
|
||||
run: npm run lint:fix
|
||||
working-directory: frontend
|
||||
|
@ -1,25 +0,0 @@
|
||||
name: Check migration file edited
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check any migration files are modified, renamed or duplicated.
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
|
||||
if [ -s edited_files.txt ]; then
|
||||
echo "Exiting migration files cannot be modified."
|
||||
cat edited_files.txt
|
||||
exit 1
|
||||
fi
|
@ -1,22 +0,0 @@
|
||||
name: Release Infisical Core Helm chart
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
22
.github/workflows/helm_chart_release.yml
vendored
Normal file
22
.github/workflows/helm_chart_release.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: Release Helm Charts
|
||||
|
||||
on: [workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-to-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
@ -1,115 +1,62 @@
|
||||
name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
infisical-fips-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical-fips:latest-postgres
|
||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.fips.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
176
.github/workflows/release_build_infisical_cli.yml
vendored
176
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,132 +1,60 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-latest
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
@ -1,4 +1,4 @@
|
||||
name: Release image + Helm chart K8s Operator
|
||||
name: Release Docker image for K8 operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
@ -35,18 +35,3 @@ jobs:
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
12
.github/workflows/run-backend-tests.yml
vendored
12
.github/workflows/run-backend-tests.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker compose` for local simulations
|
||||
name: Install `docker-compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 🔧 Setup Node 20
|
||||
@ -33,11 +33,8 @@ jobs:
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Run unit test
|
||||
run: npm run test:unit
|
||||
working-directory: backend
|
||||
- name: Run integration test
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
@ -47,5 +44,4 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
55
.github/workflows/run-cli-tests.yml
vendored
55
.github/workflows/run-cli-tests.yml
vendored
@ -1,55 +0,0 @@
|
||||
name: Go CLI Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.x"
|
||||
- name: Install dependencies
|
||||
run: go get .
|
||||
- name: Test with the Go CLI
|
||||
env:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -59,17 +59,9 @@ yarn-error.log*
|
||||
# Infisical init
|
||||
.infisical.json
|
||||
|
||||
.infisicalignore
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
.idea/*
|
||||
|
||||
frontend-build
|
||||
|
||||
*.tgz
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
||||
/npm/bin
|
||||
|
@ -1,12 +1,6 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -1,10 +1 @@
|
||||
.github/resources/docker-compose.be-test.yml:generic-api-key:16
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
||||
|
@ -1,184 +0,0 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
USER non-root-user
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages including ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
@ -1,15 +1,17 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
@ -24,16 +26,15 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV VITE_INTERCOM_ID $INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@ -42,35 +43,31 @@ RUN npm run build
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER non-root-user
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -84,21 +81,6 @@ FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -108,59 +90,23 @@ RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
@ -168,11 +114,13 @@ ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
|
18
Makefile
18
Makefile
@ -10,26 +10,8 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
|
||||
reviewable-ui:
|
||||
cd frontend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable-api:
|
||||
cd backend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
up-dev-sso:
|
||||
docker compose -f docker-compose.dev.yml --profile sso up --build
|
||||
|
@ -23,17 +23,16 @@ module.exports = {
|
||||
root: true,
|
||||
overrides: [
|
||||
{
|
||||
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
|
||||
files: ["./e2e-test/**/*"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-argument": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off"
|
||||
"@typescript-eslint/no-unsafe-call": "off",
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
rules: {
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-unsafe-enum-comparison": "off",
|
||||
|
@ -1,23 +1,8 @@
|
||||
# Build stage
|
||||
FROM node:20-slim AS build
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -25,36 +10,20 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@ -1,62 +1,8 @@
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -1,4 +0,0 @@
|
||||
{
|
||||
"presets": ["@babel/preset-env", "@babel/preset-react"],
|
||||
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
|
||||
}
|
@ -1,5 +1,4 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
@ -26,12 +25,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@ -10,22 +10,17 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
startPg: async () => {},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
getRepeatableJobs: async () => [],
|
||||
clearQueue: async () => {},
|
||||
stopJobById: async () => {},
|
||||
stopRepeatableJobByJobId: async () => true,
|
||||
stopRepeatableJobByKey: async () => true
|
||||
stopRepeatableJobByJobId: async () => true
|
||||
};
|
||||
};
|
||||
|
@ -5,9 +5,6 @@ export const mockSmtpServer = (): TSmtpService => {
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@ -34,7 +34,7 @@ describe("Identity v1", async () => {
|
||||
test("Create identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
@ -42,7 +42,7 @@ describe("Identity v1", async () => {
|
||||
test("Update identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
|
||||
const updatedIdentity = await testServer.inject({
|
||||
method: "PATCH",
|
||||
|
@ -39,6 +39,8 @@ describe("Login V1 Router", async () => {
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("mfaEnabled");
|
||||
expect(payload).toHaveProperty("token");
|
||||
expect(payload.mfaEnabled).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
@ -123,7 +123,7 @@ describe("Project Environment Router", async () => {
|
||||
id: deletedProjectEnvironment.id,
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
position: 5,
|
||||
position: 4,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
|
@ -1,36 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [{id:seedData1.id, type: ApproverType.User}],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,61 +1,73 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "dev", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath,
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: importEnv,
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(payload.id);
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const createdImport1 = await createSecretImport("/", "dev");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@ -77,60 +89,25 @@ describe("Secret Import Router", async () => {
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const devImportDetails = { path: "/", envSlug: "dev" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
const createdImport1 = await createSecretImport(devImportDetails.path, devImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
@ -159,7 +136,7 @@ describe("Secret Import Router", async () => {
|
||||
position: 2,
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: expect.stringMatching(prodImportDetails.envSlug),
|
||||
slug: expect.stringMatching(devImportDetails.envSlug),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -184,55 +161,22 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
const createdImport1 = await createSecretImport("/", "dev");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -257,552 +201,6 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
@ -1,406 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
@ -510,7 +510,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
@ -532,7 +532,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
@ -557,7 +557,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(writeSecrets.statusCode).toBe(403);
|
||||
expect(writeSecrets.statusCode).toBe(401);
|
||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||
|
||||
// but read access should still work fine
|
||||
|
@ -1,86 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret Recursive Testing", async () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
const folderAndSecretNames = [
|
||||
{ name: "deep1", path: "/", expectedSecretCount: 4 },
|
||||
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
|
||||
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
|
||||
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
const rootFolderIds: string[] = [];
|
||||
for (const folder of folderAndSecretNames) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const createdFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: folder.path,
|
||||
name: folder.name
|
||||
});
|
||||
|
||||
if (folder.path === "/") {
|
||||
rootFolderIds.push(createdFolder.id);
|
||||
}
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2({
|
||||
secretPath: folder.path,
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
key: folder.name,
|
||||
value: folder.name
|
||||
});
|
||||
}
|
||||
|
||||
return async () => {
|
||||
await Promise.all(
|
||||
rootFolderIds.map((id) =>
|
||||
deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
key: folderAndSecretNames[0].name
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
|
||||
const secrets = await getSecretsV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: path,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
recursive: true
|
||||
});
|
||||
|
||||
expect(secrets.secrets.length).toEqual(expectedSecretCount);
|
||||
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
|
||||
folderAndSecretNames
|
||||
.filter((el) => el.path.startsWith(path))
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.name,
|
||||
secretValue: el.name
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
@ -1,344 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
for (const secret of secrets) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2(secret);
|
||||
}
|
||||
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -1,678 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.path,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
const deleteSecret = async (dto: { path: string; key: string }) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: dto.path
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }])(
|
||||
"Secret V2 Architecture - $auth mode",
|
||||
async ({ auth }) => {
|
||||
let folderId = "";
|
||||
let authToken = "";
|
||||
const secretTestCases = [
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "SEC1",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "NESTED-SEC1",
|
||||
value: "something-secret",
|
||||
comment: "some comment"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/",
|
||||
secret: {
|
||||
key: "secret-key-2",
|
||||
value: `-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
|
||||
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
|
||||
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
|
||||
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
|
||||
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
|
||||
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
|
||||
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
|
||||
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
|
||||
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
|
||||
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
|
||||
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
|
||||
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
|
||||
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
|
||||
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
|
||||
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
|
||||
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
|
||||
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
|
||||
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
|
||||
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
|
||||
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
|
||||
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
|
||||
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
|
||||
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
|
||||
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
|
||||
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
|
||||
omQDpP86RX/hIIQ+JyLSaWYa
|
||||
-----END PRIVATE KEY-----`,
|
||||
comment:
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "secret-key-3",
|
||||
value: `-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
|
||||
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
|
||||
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
|
||||
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
|
||||
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
|
||||
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
|
||||
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
|
||||
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
|
||||
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
|
||||
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
|
||||
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
|
||||
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
|
||||
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
|
||||
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
|
||||
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
|
||||
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
|
||||
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
|
||||
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
|
||||
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
|
||||
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
|
||||
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
|
||||
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
|
||||
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
|
||||
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
|
||||
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
|
||||
omQDpP86RX/hIIQ+JyLSaWYa
|
||||
-----END PRIVATE KEY-----`,
|
||||
comment:
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
|
||||
}
|
||||
},
|
||||
{
|
||||
path: "/nested1/nested2/folder",
|
||||
secret: {
|
||||
key: "secret-key-3",
|
||||
value:
|
||||
"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGRvIGVpdXNtb2QgdGVtcG9yIGluY2lkaWR1bnQgdXQgbGFib3JlIGV0IGRvbG9yZSBtYWduYSBhbGlxdWEuIFV0IGVuaW0gYWQgbWluaW0gdmVuaWFtLCBxdWlzIG5vc3RydWQgZXhlcmNpdGF0aW9uCg==",
|
||||
comment: ""
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
if (auth === AuthMode.JWT) {
|
||||
authToken = jwtAuthToken;
|
||||
} else if (auth === AuthMode.IDENTITY_ACCESS_TOKEN) {
|
||||
const identityLogin = await testServer.inject({
|
||||
method: "POST",
|
||||
url: "/api/v1/auth/universal-auth/login",
|
||||
body: {
|
||||
clientSecret: seedData1.machineIdentity.clientCredentials.secret,
|
||||
clientId: seedData1.machineIdentity.clientCredentials.id
|
||||
}
|
||||
});
|
||||
expect(identityLogin.statusCode).toBe(200);
|
||||
authToken = identityLogin.json().accessToken;
|
||||
}
|
||||
// create a deep folder
|
||||
const folderCreate = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: "folder",
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(folderCreate.statusCode).toBe(200);
|
||||
folderId = folderCreate.json().folder.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
const deleteFolder = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${folderId}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/nested1/nested2"
|
||||
}
|
||||
});
|
||||
expect(deleteFolder.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
const getSecrets = async (environment: string, secretPath = "/") => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath,
|
||||
environment,
|
||||
workspaceId: seedData1.projectV3.id
|
||||
}
|
||||
});
|
||||
const secrets: TRawSecret[] = JSON.parse(res.payload).secrets || [];
|
||||
return secrets;
|
||||
};
|
||||
|
||||
test.each(secretTestCases)("Create secret in path $path", async ({ secret, path }) => {
|
||||
const createdSecret = await createSecret({ path, ...secret });
|
||||
expect(createdSecret.secretKey).toEqual(secret.key);
|
||||
expect(createdSecret.secretValue).toEqual(secret.value);
|
||||
expect(createdSecret.secretComment || "").toEqual(secret.comment);
|
||||
expect(createdSecret.version).toEqual(1);
|
||||
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Get secret by name in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
|
||||
const getSecByNameRes = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
query: {
|
||||
secretPath: path,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug
|
||||
}
|
||||
});
|
||||
expect(getSecByNameRes.statusCode).toBe(200);
|
||||
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
|
||||
expect(getSecretByNamePayload).toHaveProperty("secret");
|
||||
const decryptedSecret = getSecretByNamePayload.secret as TRawSecret;
|
||||
expect(decryptedSecret.secretKey).toEqual(secret.key);
|
||||
expect(decryptedSecret.secretValue).toEqual(secret.value);
|
||||
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
if (auth === AuthMode.JWT) {
|
||||
test.each(secretTestCases)(
|
||||
"Creating personal secret without shared throw error in path $path",
|
||||
async ({ secret }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Personal,
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/SEC2`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
const payload = JSON.parse(createSecRes.payload);
|
||||
expect(createSecRes.statusCode).toBe(400);
|
||||
expect(payload.error).toEqual("BadRequest");
|
||||
}
|
||||
);
|
||||
|
||||
test.each(secretTestCases)("Creating personal secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Personal,
|
||||
secretPath: path,
|
||||
secretKey: secret.key,
|
||||
secretValue: "personal-value",
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
|
||||
// list secrets should contain personal one and shared one
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: "personal-value",
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)(
|
||||
"Deleting personal one should not delete shared secret in path $path",
|
||||
async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret }); // shared one
|
||||
await createSecret({ path, ...secret, type: SecretType.Personal });
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.not.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
test.each(secretTestCases)("Update secret in path $path", async ({ path, secret }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
const updateSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
type: SecretType.Shared,
|
||||
secretPath: path,
|
||||
secretKey: secret.key,
|
||||
secretValue: "new-value",
|
||||
secretComment: secret.comment
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/raw/${secret.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
const decryptedSecret = updatedSecretPayload.secret;
|
||||
expect(decryptedSecret.secretKey).toEqual(secret.key);
|
||||
expect(decryptedSecret.secretValue).toEqual("new-value");
|
||||
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
|
||||
|
||||
// list secret should have updated value
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
secretValue: "new-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Delete secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ path, ...secret });
|
||||
const deletedSecret = await deleteSecret({ path, key: secret.key });
|
||||
expect(deletedSecret.secretKey).toEqual(secret.key);
|
||||
|
||||
// shared secret deletion should delete personal ones also
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Shared
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: secret.key,
|
||||
type: SecretType.Personal
|
||||
})
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk create secrets in path $path", async ({ secret, path }) => {
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(200);
|
||||
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
|
||||
expect(createSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
|
||||
await createSecret({ ...secret, key: `BULK-${secret.key}-1`, path });
|
||||
|
||||
const createSharedSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(createSharedSecRes.statusCode).toBe(400);
|
||||
|
||||
await deleteSecret({ path, key: `BULK-${secret.key}-1` });
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk update secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
);
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
);
|
||||
|
||||
const deletedSharedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
expect(deletedSharedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.not.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.value}-${i + 1}`,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
);
|
@ -942,113 +942,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual([]);
|
||||
});
|
||||
|
||||
test.each(testRawSecrets)("Bulk create secret raw in path $path", async ({ path, secret }) => {
|
||||
const createSecretReqBody = {
|
||||
projectSlug: seedData1.project.slug,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: [
|
||||
{
|
||||
secretKey: secret.key,
|
||||
secretValue: secret.value,
|
||||
secretComment: secret.comment
|
||||
}
|
||||
]
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// fetch secrets
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: secret.value,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteRawSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(testRawSecrets)("Bulk update secret raw in path $path", async ({ secret, path }) => {
|
||||
await createRawSecret({ path, ...secret });
|
||||
const updateSecretReqBody = {
|
||||
projectSlug: seedData1.project.slug,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: [
|
||||
{
|
||||
secretValue: "new-value",
|
||||
secretKey: secret.key
|
||||
}
|
||||
]
|
||||
};
|
||||
const updateSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// fetch secrets
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: secret.key,
|
||||
value: "new-value",
|
||||
version: 2,
|
||||
type: SecretType.Shared
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteRawSecret({ path, key: secret.key });
|
||||
});
|
||||
|
||||
test.each(testRawSecrets)("Bulk delete secret raw in path $path", async ({ path, secret }) => {
|
||||
await createRawSecret({ path, ...secret });
|
||||
|
||||
const deletedSecretReqBody = {
|
||||
projectSlug: seedData1.project.slug,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
secrets: [{ secretKey: secret.key }]
|
||||
};
|
||||
const deletedSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: deletedSecretReqBody
|
||||
});
|
||||
expect(deletedSecRes.statusCode).toBe(200);
|
||||
const deletedSecretPayload = JSON.parse(deletedSecRes.payload);
|
||||
expect(deletedSecretPayload).toHaveProperty("secrets");
|
||||
|
||||
// fetch secrets
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual([]);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
@ -1075,7 +968,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(404);
|
||||
expect(createSecRes.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test("Update secret raw", async () => {
|
||||
@ -1093,7 +986,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(404);
|
||||
expect(updateSecRes.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test("Delete secret raw", async () => {
|
||||
@ -1110,6 +1003,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: deletedSecretReqBody
|
||||
});
|
||||
expect(deletedSecRes.statusCode).toBe(404);
|
||||
expect(deletedSecRes.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
@ -1,73 +0,0 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
@ -1,93 +0,0 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
@ -1,130 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
recursive?: boolean;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true",
|
||||
recursive: String(dto.recursive || false)
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -3,75 +3,47 @@ import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import knex from "knex";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { initEnvConfig } from "@app/lib/config/env";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = initLogger();
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await db.migrate.latest({
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: cfg.DB_CONNECTION_URI,
|
||||
migrations: {
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
},
|
||||
seeds: {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await db.migrate.latest();
|
||||
await db.seed.run();
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({
|
||||
db,
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
@ -80,20 +52,16 @@ export default {
|
||||
authTokenType: AuthTokenType.ACCESS_TOKEN,
|
||||
userId: seedData1.id,
|
||||
tokenVersionId: seedData1.token.id,
|
||||
authMethod: AuthMethod.EMAIL,
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
@ -104,17 +72,7 @@ export default {
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.migrate.rollback({}, true);
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
14234
backend/package-lock.json
generated
14234
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -3,86 +3,34 @@
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "./dist/main.mjs",
|
||||
"bin": "dist/main.js",
|
||||
"pkg": {
|
||||
"scripts": [
|
||||
"dist/**/*.js",
|
||||
"../frontend/node_modules/next/**/*.js",
|
||||
"../frontend/.next/*/**/*.js",
|
||||
"../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
|
||||
],
|
||||
"assets": [
|
||||
"dist/**",
|
||||
"!dist/**/*.js",
|
||||
"node_modules/**",
|
||||
"../frontend/node_modules/**",
|
||||
"../frontend/.next/**",
|
||||
"!../frontend/node_modules/next/dist/server/**/*.js",
|
||||
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
|
||||
"../frontend/public/**"
|
||||
],
|
||||
"outputPath": "binary"
|
||||
},
|
||||
"scripts": {
|
||||
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
|
||||
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
|
||||
"binary:babel-backend": " babel ./dist -d ./dist",
|
||||
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
|
||||
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
|
||||
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"build": "tsup",
|
||||
"start": "node dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.18.10",
|
||||
"@babel/core": "^7.18.10",
|
||||
"@babel/plugin-syntax-import-attributes": "^7.24.7",
|
||||
"@babel/preset-env": "^7.18.10",
|
||||
"@babel/preset-react": "^7.24.7",
|
||||
"@types/bcrypt": "^5.0.2",
|
||||
"@types/jmespath": "^0.15.2",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
@ -95,16 +43,11 @@
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/pkcs11js": "^1.0.4",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
"@types/sjcl": "^1.0.34",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
"@yao-pkg/pkg": "^5.12.0",
|
||||
"babel-plugin-transform-import-meta": "^2.2.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-airbnb-typescript": "^17.1.0",
|
||||
@ -117,57 +60,34 @@
|
||||
"pino-pretty": "^10.2.3",
|
||||
"prompt-sync": "^4.2.0",
|
||||
"rimraf": "^5.0.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"ts-node": "^10.9.1",
|
||||
"tsc-alias": "^1.8.8",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-kms": "^3.609.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/static": "^7.0.4",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@sindresorhus/slugify": "^2.2.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -175,59 +95,34 @@
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"bullmq": "^5.3.3",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
"google-auth-library": "^9.9.0",
|
||||
"googleapis": "^137.1.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"ldif": "0.5.1",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.8",
|
||||
"mysql2": "^3.9.1",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"otplib": "^12.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"pkcs11js": "^2.1.6",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.3.8",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"sjcl": "^1.0.8",
|
||||
"probot": "^13.0.0",
|
||||
"smee-client": "^2.0.0",
|
||||
"snowflake-sdk": "^1.14.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
|
@ -7,33 +7,14 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
type ComponentType = 1 | 2 | 3;
|
||||
|
||||
console.log(`
|
||||
Component List
|
||||
--------------
|
||||
0. Exit
|
||||
1. Service component
|
||||
2. DAL component
|
||||
3. Router component
|
||||
`);
|
||||
|
||||
function getComponentType(): ComponentType {
|
||||
while (true) {
|
||||
const input = prompt("Select a component (0-3): ");
|
||||
const componentType = parseInt(input, 10);
|
||||
|
||||
if (componentType === 0) {
|
||||
console.log("Exiting the program. Goodbye!");
|
||||
process.exit(0);
|
||||
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
|
||||
return componentType;
|
||||
} else {
|
||||
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
|
||||
}
|
||||
}
|
||||
}
|
||||
const componentType = getComponentType();
|
||||
const componentType = parseInt(prompt("Select a component: "), 10);
|
||||
|
||||
if (componentType === 1) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
@ -122,15 +103,11 @@ export const ${dalName} = (db: TDbClient) => {
|
||||
`import { z } from "zod";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
|
||||
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
method: "GET",
|
||||
schema: {
|
||||
params: z.object({}),
|
||||
response: {
|
||||
|
@ -2,16 +2,15 @@
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = slugify(migrationName);
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
`npx knex migrate:make --knexfile ${path.join(
|
||||
__dirname,
|
||||
"../src/db/knexfile.ts"
|
||||
)} -x ts ${migrationName}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
|
@ -35,8 +35,6 @@ const getZodPrimitiveType = (type: string) => {
|
||||
return "z.coerce.number()";
|
||||
case "text":
|
||||
return "z.string()";
|
||||
case "bytea":
|
||||
return "zodBuffer";
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
@ -90,12 +88,7 @@ const main = async () => {
|
||||
.whereRaw("table_schema = current_schema()")
|
||||
.select<{ tableName: string }[]>("table_name as tableName")
|
||||
.orderBy("table_name")
|
||||
).filter(
|
||||
(el) =>
|
||||
!el.tableName.includes("_migrations") &&
|
||||
!el.tableName.includes("audit_logs_") &&
|
||||
el.tableName !== "intermediate_audit_logs"
|
||||
);
|
||||
).filter((el) => !el.tableName.includes("_migrations"));
|
||||
|
||||
for (let i = 0; i < tables.length; i += 1) {
|
||||
const { tableName } = tables[i];
|
||||
@ -103,15 +96,10 @@ const main = async () => {
|
||||
const columnNames = Object.keys(columns);
|
||||
|
||||
let schema = "";
|
||||
const zodImportSet = new Set<string>();
|
||||
for (let colNum = 0; colNum < columnNames.length; colNum++) {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
||||
// don't put optional on id
|
||||
if (colInfo.defaultValue && columnName !== "id") {
|
||||
const { defaultValue } = colInfo;
|
||||
@ -133,8 +121,6 @@ const main = async () => {
|
||||
.split("_")
|
||||
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
|
||||
|
||||
const zodImports = Array.from(zodImportSet);
|
||||
|
||||
// the insert and update are changed to zod input type to use default cases
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
|
||||
@ -145,8 +131,6 @@ const main = async () => {
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ${pascalCase}Schema = z.object({${schema}});
|
||||
|
@ -1,103 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import promptSync from "prompt-sync";
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import { existsSync } from "fs";
|
||||
|
||||
const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
__dirname,
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
};
|
||||
|
||||
const main = () => {
|
||||
const action = prompt(
|
||||
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
|
||||
);
|
||||
if (action === "1") {
|
||||
exportDb();
|
||||
} else if (action === "2") {
|
||||
importDbForOrg();
|
||||
} else {
|
||||
console.log("Invalid action");
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
@ -1,27 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
function replaceMjsOccurrences(directory: string) {
|
||||
fs.readdir(directory, (err, files) => {
|
||||
if (err) throw err;
|
||||
files.forEach((file) => {
|
||||
const filePath = path.join(directory, file);
|
||||
if (fs.statSync(filePath).isDirectory()) {
|
||||
replaceMjsOccurrences(filePath);
|
||||
} else {
|
||||
fs.readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) throw err;
|
||||
const result = data.replace(/\.mjs/g, ".js");
|
||||
fs.writeFile(filePath, result, "utf8", (err) => {
|
||||
if (err) throw err;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Updated: ${filePath}`);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
replaceMjsOccurrences("dist");
|
@ -1,7 +0,0 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
@ -8,7 +8,7 @@ declare global {
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<CustomLogger>,
|
||||
Readonly<Logger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
|
119
backend/src/@types/fastify.d.ts
vendored
119
backend/src/@types/fastify.d.ts
vendored
@ -1,33 +1,11 @@
|
||||
import "fastify";
|
||||
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
|
||||
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
|
||||
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@ -35,43 +13,22 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
|
||||
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
|
||||
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
|
||||
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
|
||||
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
|
||||
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
@ -82,32 +39,15 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
}
|
||||
|
||||
interface FastifyRequest {
|
||||
realIp: string;
|
||||
// used for mfa session authentication
|
||||
@ -119,29 +59,21 @@ declare module "fastify" {
|
||||
// identity injection. depending on which kinda of token the information is filled in auth
|
||||
auth: TAuthMode;
|
||||
permission: {
|
||||
authMethod: ActorAuthMethod;
|
||||
type: ActorType;
|
||||
id: string;
|
||||
orgId: string;
|
||||
orgId?: string;
|
||||
};
|
||||
rateLimits: RateLimitConfiguration;
|
||||
// passport data
|
||||
passportUser: {
|
||||
isUserCompleted: string;
|
||||
providerAuthToken: string;
|
||||
};
|
||||
kmipUser: {
|
||||
projectId: string;
|
||||
clientId: string;
|
||||
name: string;
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
redis: Redis;
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
@ -150,20 +82,15 @@ declare module "fastify" {
|
||||
permission: TPermissionServiceFactory;
|
||||
org: TOrgServiceFactory;
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
oidc: TOidcConfigServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
groupProject: TGroupProjectServiceFactory;
|
||||
apiKey: TApiKeyServiceFactory;
|
||||
pkiAlert: TPkiAlertServiceFactory;
|
||||
project: TProjectServiceFactory;
|
||||
projectMembership: TProjectMembershipServiceFactory;
|
||||
projectEnv: TProjectEnvServiceFactory;
|
||||
projectKey: TProjectKeyServiceFactory;
|
||||
projectRole: TProjectRoleServiceFactory;
|
||||
secret: TSecretServiceFactory;
|
||||
secretReplication: TSecretReplicationServiceFactory;
|
||||
secretTag: TSecretTagServiceFactory;
|
||||
secretImport: TSecretImportServiceFactory;
|
||||
projectBot: TProjectBotServiceFactory;
|
||||
@ -175,16 +102,7 @@ declare module "fastify" {
|
||||
identity: TIdentityServiceFactory;
|
||||
identityAccessToken: TIdentityAccessTokenServiceFactory;
|
||||
identityProject: TIdentityProjectServiceFactory;
|
||||
identityTokenAuth: TIdentityTokenAuthServiceFactory;
|
||||
identityUa: TIdentityUaServiceFactory;
|
||||
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
|
||||
identityGcpAuth: TIdentityGcpAuthServiceFactory;
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||
identityJwtAuth: TIdentityJwtAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
|
||||
secretRotation: TSecretRotationServiceFactory;
|
||||
@ -193,49 +111,16 @@ declare module "fastify" {
|
||||
scim: TScimServiceFactory;
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
|
||||
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
certificateEst: TCertificateEstServiceFactory;
|
||||
pkiCollection: TPkiCollectionServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
secretBlindIndex: TSecretBlindIndexServiceFactory;
|
||||
telemetry: TTelemetryServiceFactory;
|
||||
dynamicSecret: TDynamicSecretServiceFactory;
|
||||
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
hsm: THsmServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
cmek: TCmekServiceFactory;
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
totp: TTotpServiceFactory;
|
||||
appConnection: TAppConnectionServiceFactory;
|
||||
secretSync: TSecretSyncServiceFactory;
|
||||
kmip: TKmipServiceFactory;
|
||||
kmipOperation: TKmipOperationServiceFactory;
|
||||
gateway: TGatewayServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
store: {
|
||||
user: Pick<TUserDALFactory, "findById">;
|
||||
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
4
backend/src/@types/hdb.d.ts
vendored
4
backend/src/@types/hdb.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "hdb" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function createClient(options): any;
|
||||
}
|
707
backend/src/@types/knex.d.ts
vendored
707
backend/src/@types/knex.d.ts
vendored
@ -1,27 +1,12 @@
|
||||
import { Knex as KnexOriginal } from "knex";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate,
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate,
|
||||
TAccessApprovalRequestsUpdate,
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate,
|
||||
TAuditLogsUpdate,
|
||||
TAuthTokens,
|
||||
TAuthTokenSessions,
|
||||
@ -32,102 +17,27 @@ import {
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate,
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate,
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate,
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate,
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate,
|
||||
TCertificates,
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate,
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
TGitAppOrg,
|
||||
TGitAppOrgInsert,
|
||||
TGitAppOrgUpdate,
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate,
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate,
|
||||
TGroups,
|
||||
TGroupsInsert,
|
||||
TGroupsUpdate,
|
||||
TIdentities,
|
||||
TIdentitiesInsert,
|
||||
TIdentitiesUpdate,
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate,
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate,
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate,
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate,
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate,
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate,
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate,
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate,
|
||||
@ -143,72 +53,27 @@ import {
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TInternalKms,
|
||||
TInternalKmsInsert,
|
||||
TInternalKmsUpdate,
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate,
|
||||
TKmipClients,
|
||||
TKmipClientsInsert,
|
||||
TKmipClientsUpdate,
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate,
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate,
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOidcConfigs,
|
||||
TOidcConfigsInsert,
|
||||
TOidcConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
TOrgRoles,
|
||||
TOrgRolesInsert,
|
||||
TOrgRolesUpdate,
|
||||
TPkiAlerts,
|
||||
TPkiAlertsInsert,
|
||||
TPkiAlertsUpdate,
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate,
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate,
|
||||
TProjectBots,
|
||||
TProjectBotsInsert,
|
||||
TProjectBotsUpdate,
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
@ -220,28 +85,10 @@ import {
|
||||
TProjectRolesUpdate,
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectSplitBackfillIds,
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate,
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
@ -258,9 +105,6 @@ import {
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate,
|
||||
TSecretApprovalRequestSecretTagsV2,
|
||||
TSecretApprovalRequestSecretTagsV2Insert,
|
||||
TSecretApprovalRequestSecretTagsV2Update,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
@ -268,9 +112,6 @@ import {
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate,
|
||||
TSecretApprovalRequestsSecretsV2,
|
||||
TSecretApprovalRequestsSecretsV2Insert,
|
||||
TSecretApprovalRequestsSecretsV2Update,
|
||||
TSecretApprovalRequestsUpdate,
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
@ -284,18 +125,9 @@ import {
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate,
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate,
|
||||
TSecretReferencesV2,
|
||||
TSecretReferencesV2Insert,
|
||||
TSecretReferencesV2Update,
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate,
|
||||
TSecretRotationOutputV2,
|
||||
TSecretRotationOutputV2Insert,
|
||||
TSecretRotationOutputV2Update,
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate,
|
||||
@ -303,9 +135,6 @@ import {
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate,
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate,
|
||||
TSecretsInsert,
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
@ -314,9 +143,6 @@ import {
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate,
|
||||
TSecretSnapshotSecretsV2,
|
||||
TSecretSnapshotSecretsV2Insert,
|
||||
TSecretSnapshotSecretsV2Update,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate,
|
||||
TSecretsUpdate,
|
||||
@ -332,36 +158,12 @@ import {
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate,
|
||||
TSecretVersionV2TagJunction,
|
||||
TSecretVersionV2TagJunctionInsert,
|
||||
TSecretVersionV2TagJunctionUpdate,
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate,
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate,
|
||||
TSshCertificateAuthorities,
|
||||
TSshCertificateAuthoritiesInsert,
|
||||
TSshCertificateAuthoritiesUpdate,
|
||||
TSshCertificateAuthoritySecrets,
|
||||
TSshCertificateAuthoritySecretsInsert,
|
||||
TSshCertificateAuthoritySecretsUpdate,
|
||||
TSshCertificateBodies,
|
||||
TSshCertificateBodiesInsert,
|
||||
TSshCertificateBodiesUpdate,
|
||||
TSshCertificates,
|
||||
TSshCertificatesInsert,
|
||||
TSshCertificatesUpdate,
|
||||
TSshCertificateTemplates,
|
||||
TSshCertificateTemplatesInsert,
|
||||
TSshCertificateTemplatesUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTotpConfigs,
|
||||
TTotpConfigsInsert,
|
||||
TTotpConfigsUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
@ -374,581 +176,196 @@ import {
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate,
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate,
|
||||
TUsers,
|
||||
TUsersInsert,
|
||||
TUsersUpdate,
|
||||
TWebhooks,
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate,
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
} from "@app/db/schemas/secret-v2-tag-junction";
|
||||
import {
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
} from "@app/db/schemas/secret-versions-v2";
|
||||
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
interface QueryInterface {
|
||||
primaryNode(): KnexOriginal;
|
||||
replicaNode(): KnexOriginal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateAuthorities,
|
||||
TSshCertificateAuthoritiesInsert,
|
||||
TSshCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateAuthoritySecrets,
|
||||
TSshCertificateAuthoritySecretsInsert,
|
||||
TSshCertificateAuthoritySecretsUpdate
|
||||
>;
|
||||
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateTemplates,
|
||||
TSshCertificateTemplatesInsert,
|
||||
TSshCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificates,
|
||||
TSshCertificatesInsert,
|
||||
TSshCertificatesUpdate
|
||||
>;
|
||||
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TSshCertificateBodies,
|
||||
TSshCertificateBodiesInsert,
|
||||
TSshCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplateEstConfigs,
|
||||
TCertificateTemplateEstConfigsInsert,
|
||||
TCertificateTemplateEstConfigsUpdate
|
||||
>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
|
||||
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate
|
||||
>;
|
||||
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
>;
|
||||
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
|
||||
[TableName.AuthTokens]: Knex.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: Knex.CompositeTableType<
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate
|
||||
>;
|
||||
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
|
||||
[TableName.BackupPrivateKey]: Knex.CompositeTableType<
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate
|
||||
>;
|
||||
[TableName.Organization]: KnexOriginal.CompositeTableType<
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate
|
||||
>;
|
||||
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Organization]: Knex.CompositeTableType<TOrganizations, TOrganizationsInsert, TOrganizationsUpdate>;
|
||||
[TableName.OrgMembership]: Knex.CompositeTableType<TOrgMemberships, TOrgMembershipsInsert, TOrgMembershipsUpdate>;
|
||||
[TableName.OrgRoles]: Knex.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: Knex.CompositeTableType<
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate
|
||||
>;
|
||||
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.UserAction]: Knex.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: Knex.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: Knex.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: Knex.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: Knex.CompositeTableType<
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.Environment]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Environment]: Knex.CompositeTableType<
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
|
||||
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate
|
||||
>;
|
||||
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate
|
||||
>;
|
||||
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretVersion]: Knex.CompositeTableType<TSecretVersions, TSecretVersionsInsert, TSecretVersionsUpdate>;
|
||||
[TableName.SecretFolder]: Knex.CompositeTableType<TSecretFolders, TSecretFoldersInsert, TSecretFoldersUpdate>;
|
||||
[TableName.SecretFolderVersion]: Knex.CompositeTableType<
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate
|
||||
>;
|
||||
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate
|
||||
>;
|
||||
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate
|
||||
>;
|
||||
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
|
||||
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: Knex.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: Knex.CompositeTableType<TServiceTokens, TServiceTokensInsert, TServiceTokensUpdate>;
|
||||
[TableName.IntegrationAuth]: Knex.CompositeTableType<
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate
|
||||
>;
|
||||
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityTokenAuths,
|
||||
TIdentityTokenAuthsInsert,
|
||||
TIdentityTokenAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Identity]: Knex.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityUniversalAuth]: Knex.CompositeTableType<
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate
|
||||
>;
|
||||
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityAccessToken]: Knex.CompositeTableType<
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate
|
||||
>;
|
||||
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityOrgMembership]: Knex.CompositeTableType<
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityProjectMembership]: Knex.CompositeTableType<
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequest]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretRotation]: Knex.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate
|
||||
>;
|
||||
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretRotationOutput]: Knex.CompositeTableType<
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate
|
||||
>;
|
||||
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Snapshot]: Knex.CompositeTableType<TSecretSnapshots, TSecretSnapshotsInsert, TSecretSnapshotsUpdate>;
|
||||
[TableName.SnapshotSecret]: Knex.CompositeTableType<
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate
|
||||
>;
|
||||
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SnapshotFolder]: Knex.CompositeTableType<
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate
|
||||
>;
|
||||
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate
|
||||
>;
|
||||
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate
|
||||
>;
|
||||
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
|
||||
[TableName.GitAppOrg]: Knex.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: Knex.CompositeTableType<
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate
|
||||
>;
|
||||
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
[TableName.SecretV2]: KnexOriginal.CompositeTableType<TSecretsV2, TSecretsV2Insert, TSecretsV2Update>;
|
||||
[TableName.SecretVersionV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
>;
|
||||
[TableName.SecretReferenceV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretReferencesV2,
|
||||
TSecretReferencesV2Insert,
|
||||
TSecretReferencesV2Update
|
||||
>;
|
||||
[TableName.TrustedIps]: Knex.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
// Junction tables
|
||||
[TableName.SecretV2JnTag]: KnexOriginal.CompositeTableType<
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
>;
|
||||
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
|
||||
[TableName.JnSecretTag]: Knex.CompositeTableType<
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretVersionTag]: Knex.CompositeTableType<
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionV2Tag]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersionV2TagJunction,
|
||||
TSecretVersionV2TagJunctionInsert,
|
||||
TSecretVersionV2TagJunctionUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecretV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshotSecretsV2,
|
||||
TSecretSnapshotSecretsV2Insert,
|
||||
TSecretSnapshotSecretsV2Update
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestsSecretsV2,
|
||||
TSecretApprovalRequestsSecretsV2Insert,
|
||||
TSecretApprovalRequestsSecretsV2Update
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTagV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTagsV2,
|
||||
TSecretApprovalRequestSecretTagsV2Insert,
|
||||
TSecretApprovalRequestSecretTagsV2Update
|
||||
>;
|
||||
[TableName.SecretRotationOutputV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationOutputV2,
|
||||
TSecretRotationOutputV2Insert,
|
||||
TSecretRotationOutputV2Update
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.InternalKms]: KnexOriginal.CompositeTableType<TInternalKms, TInternalKmsInsert, TInternalKmsUpdate>;
|
||||
[TableName.ExternalKms]: KnexOriginal.CompositeTableType<TExternalKms, TExternalKmsInsert, TExternalKmsUpdate>;
|
||||
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate
|
||||
>;
|
||||
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
>;
|
||||
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
|
||||
TProjectSplitBackfillIds,
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate
|
||||
>;
|
||||
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
|
||||
TResourceMetadata,
|
||||
TResourceMetadataInsert,
|
||||
TResourceMetadataUpdate
|
||||
>;
|
||||
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
|
||||
TAppConnections,
|
||||
TAppConnectionsInsert,
|
||||
TAppConnectionsUpdate
|
||||
>;
|
||||
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
|
||||
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
|
||||
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate
|
||||
>;
|
||||
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate
|
||||
>;
|
||||
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate
|
||||
>;
|
||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate
|
||||
>;
|
||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
4
backend/src/@types/ldif.d.ts
vendored
4
backend/src/@types/ldif.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "ldif" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function parse(input: string, ...args: any[]): any;
|
||||
}
|
@ -1,105 +0,0 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { PgSqlLock } from "./keystore/keystore";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
type TArgs = {
|
||||
auditLogDb?: Knex;
|
||||
applicationDb: Knex;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
const migrationConfig = {
|
||||
directory: path.join(__dirname, "./db/migrations"),
|
||||
loadExtensions: [".mjs", ".ts"],
|
||||
tableName: "infisical_migrations"
|
||||
};
|
||||
|
||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
||||
// happens for first time in which the migration table itself is not created yet
|
||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
||||
if (err?.message?.includes("does not exist")) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
};
|
||||
|
||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
||||
try {
|
||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
||||
if (isProduction) {
|
||||
const migrationTable = migrationConfig.tableName;
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldRunMigration = Boolean(
|
||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
||||
); // db.length - code.length
|
||||
if (!shouldRunMigration) {
|
||||
logger.info("No migrations pending: Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (auditLogDb) {
|
||||
await auditLogDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running audit log migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await auditLogDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished audit log migrations.");
|
||||
});
|
||||
}
|
||||
|
||||
await applicationDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running application migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await applicationDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished application migrations.");
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Boot up migration failed");
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
@ -1,75 +0,0 @@
|
||||
// eslint-disable-next-line
|
||||
import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import type { Knex } from "knex";
|
||||
import path from "path";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) {
|
||||
console.info("Dedicated audit log database not found. No further migrations necessary");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
console.info("Executing migration on audit log database...");
|
||||
|
||||
export default {
|
||||
development: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
seeds: {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
},
|
||||
production: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
@ -1,2 +1,2 @@
|
||||
export type { TDbClient } from "./instance";
|
||||
export { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||
export { initDbConnection } from "./instance";
|
||||
|
@ -1,38 +1,8 @@
|
||||
import knex, { Knex } from "knex";
|
||||
import knex from "knex";
|
||||
|
||||
export type TDbClient = ReturnType<typeof initDbConnection>;
|
||||
export const initDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert,
|
||||
readReplicas = []
|
||||
}: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
readReplicas?: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
}[];
|
||||
}) => {
|
||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
||||
// eslint-disable-next-line
|
||||
let db: Knex<any, unknown[]>;
|
||||
// eslint-disable-next-line
|
||||
let readReplicaDbs: Knex<any, unknown[]>[];
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("primaryNode", () => {
|
||||
return db;
|
||||
});
|
||||
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("replicaNode", () => {
|
||||
if (!readReplicaDbs.length) return db;
|
||||
|
||||
const selectedReplica = readReplicaDbs[Math.floor(Math.random() * readReplicaDbs.length)];
|
||||
return selectedReplica;
|
||||
});
|
||||
|
||||
db = knex({
|
||||
export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => {
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
@ -49,75 +19,8 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
readReplicaDbs = readReplicas.map((el) => {
|
||||
const replicaDbCertificate = el.dbRootCert || dbRootCert;
|
||||
return knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: el.dbConnectionUri,
|
||||
ssl: replicaDbCertificate
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
export const initAuditLogDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert
|
||||
}: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
}) => {
|
||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
||||
// eslint-disable-next-line
|
||||
const db: Knex<any, unknown[]> = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
// @ts-expect-error I have no clue why only for the port there is a type error
|
||||
// eslint-disable-next-line
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: dbRootCert
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
// we add these overrides so that auditLogDb and the primary DB are interchangeable
|
||||
db.primaryNode = () => {
|
||||
return db;
|
||||
};
|
||||
|
||||
db.replicaNode = () => {
|
||||
return db;
|
||||
};
|
||||
|
||||
return db;
|
||||
};
|
||||
|
@ -38,8 +38,7 @@ export default {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@ -63,8 +62,7 @@ export default {
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
@ -1,161 +0,0 @@
|
||||
import kx, { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const INTERMEDIATE_AUDIT_LOG_TABLE = "intermediate_audit_logs";
|
||||
|
||||
const formatPartitionDate = (date: Date) => {
|
||||
const year = date.getFullYear();
|
||||
const month = String(date.getMonth() + 1).padStart(2, "0");
|
||||
const day = String(date.getDate()).padStart(2, "0");
|
||||
|
||||
return `${year}-${month}-${day}`;
|
||||
};
|
||||
|
||||
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
);
|
||||
};
|
||||
|
||||
const up = async (knex: Knex): Promise<void> => {
|
||||
console.info("Dropping primary key of audit log table...");
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
// remove existing keys
|
||||
t.dropPrimary();
|
||||
});
|
||||
|
||||
// Get all indices of the audit log table and drop them
|
||||
const indexNames: { rows: { indexname: string }[] } = await knex.raw(
|
||||
`
|
||||
SELECT indexname
|
||||
FROM pg_indexes
|
||||
WHERE tablename = '${TableName.AuditLog}'
|
||||
`
|
||||
);
|
||||
|
||||
console.log(
|
||||
"Deleting existing audit log indices:",
|
||||
indexNames.rows.map((e) => e.indexname)
|
||||
);
|
||||
|
||||
for await (const row of indexNames.rows) {
|
||||
await knex.raw(`DROP INDEX IF EXISTS ${row.indexname}`);
|
||||
}
|
||||
|
||||
// renaming audit log to intermediate table
|
||||
console.log("Renaming audit log table to the intermediate name");
|
||||
await knex.schema.renameTable(TableName.AuditLog, INTERMEDIATE_AUDIT_LOG_TABLE);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AuditLog))) {
|
||||
const createTableSql = knex.schema
|
||||
.createTable(TableName.AuditLog, (t) => {
|
||||
t.uuid("id").defaultTo(knex.fn.uuid());
|
||||
t.string("actor").notNullable();
|
||||
t.jsonb("actorMetadata").notNullable();
|
||||
t.string("ipAddress");
|
||||
t.string("eventType").notNullable();
|
||||
t.jsonb("eventMetadata");
|
||||
t.string("userAgent");
|
||||
t.string("userAgentType");
|
||||
t.datetime("expiresAt");
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("orgId");
|
||||
t.string("projectId");
|
||||
t.string("projectName");
|
||||
t.primary(["id", "createdAt"]);
|
||||
})
|
||||
.toString();
|
||||
|
||||
console.info("Creating partition table...");
|
||||
await knex.schema.raw(`
|
||||
${createTableSql} PARTITION BY RANGE ("createdAt");
|
||||
`);
|
||||
|
||||
console.log("Adding indices...");
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
t.index(["projectId", "createdAt"]);
|
||||
t.index(["orgId", "createdAt"]);
|
||||
t.index("expiresAt");
|
||||
t.index("orgId");
|
||||
t.index("projectId");
|
||||
});
|
||||
|
||||
console.log("Adding GIN indices...");
|
||||
|
||||
await knex.raw(
|
||||
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)`
|
||||
);
|
||||
console.log("GIN index for actorMetadata done");
|
||||
|
||||
await knex.raw(
|
||||
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.AuditLog} USING gin("eventMetadata" jsonb_path_ops)`
|
||||
);
|
||||
console.log("GIN index for eventMetadata done");
|
||||
|
||||
// create default partition
|
||||
console.log("Creating default partition...");
|
||||
await knex.schema.raw(`CREATE TABLE ${TableName.AuditLog}_default PARTITION OF ${TableName.AuditLog} DEFAULT`);
|
||||
|
||||
const nextDate = new Date();
|
||||
nextDate.setDate(nextDate.getDate() + 1);
|
||||
const nextDateStr = formatPartitionDate(nextDate);
|
||||
|
||||
console.log("Attaching existing audit log table as a partition...");
|
||||
await knex.schema.raw(`
|
||||
ALTER TABLE ${INTERMEDIATE_AUDIT_LOG_TABLE} ADD CONSTRAINT audit_log_old
|
||||
CHECK ( "createdAt" < DATE '${nextDateStr}' );
|
||||
|
||||
ALTER TABLE ${TableName.AuditLog} ATTACH PARTITION ${INTERMEDIATE_AUDIT_LOG_TABLE}
|
||||
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
|
||||
`);
|
||||
|
||||
// create partition from now until end of month
|
||||
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
|
||||
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
|
||||
|
||||
// create partitions 4 years ahead
|
||||
const partitionMonths = 4 * 12;
|
||||
const partitionPromises: Promise<void>[] = [];
|
||||
for (let x = 1; x <= partitionMonths; x += 1) {
|
||||
partitionPromises.push(
|
||||
createAuditLogPartition(
|
||||
knex,
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(partitionPromises);
|
||||
console.log("Partition migration complete");
|
||||
}
|
||||
};
|
||||
|
||||
export const executeMigration = async (url: string) => {
|
||||
console.log("Executing migration...");
|
||||
const knex = kx({
|
||||
client: "pg",
|
||||
connection: url
|
||||
});
|
||||
|
||||
await knex.transaction(async (tx) => {
|
||||
await up(tx);
|
||||
});
|
||||
};
|
||||
|
||||
const dbUrl = process.env.AUDIT_LOGS_DB_CONNECTION_URI;
|
||||
if (!dbUrl) {
|
||||
console.error("Please provide a DB connection URL to the AUDIT_LOGS_DB_CONNECTION_URI env");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
void executeMigration(dbUrl).then(() => {
|
||||
console.log("Migration: partition-audit-logs DONE");
|
||||
process.exit(0);
|
||||
});
|
@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("integration").notNullable();
|
||||
t.string("teamId"); // vercel-specific
|
||||
t.string("url"); // for self-hosted
|
||||
t.string("url"); // for self hosted
|
||||
t.string("namespace"); // hashicorp specific
|
||||
t.string("accountId"); // netlify
|
||||
t.text("refreshCiphertext");
|
||||
@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.createTable(TableName.Integration, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.boolean("isActive").notNullable();
|
||||
t.string("url"); // self-hosted
|
||||
t.string("url"); // self hosted
|
||||
t.string("app"); // name of app in provider
|
||||
t.string("appId");
|
||||
t.string("targetEnvironment");
|
||||
|
@ -1,58 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesTableExist = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (!doesTableExist) {
|
||||
await knex.schema.createTable(TableName.DynamicSecret, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.integer("version").notNullable();
|
||||
t.string("type").notNullable();
|
||||
t.string("defaultTTL").notNullable();
|
||||
t.string("maxTTL");
|
||||
t.string("inputIV").notNullable();
|
||||
t.text("inputCiphertext").notNullable();
|
||||
t.string("inputTag").notNullable();
|
||||
t.string("algorithm").notNullable().defaultTo(SecretEncryptionAlgo.AES_256_GCM);
|
||||
t.string("keyEncoding").notNullable().defaultTo(SecretKeyEncoding.UTF8);
|
||||
t.uuid("folderId").notNullable();
|
||||
// for background process communication
|
||||
t.string("status");
|
||||
t.string("statusDetails");
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
|
||||
t.unique(["name", "folderId"]);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.DynamicSecret);
|
||||
|
||||
const doesTableDynamicSecretLease = await knex.schema.hasTable(TableName.DynamicSecretLease);
|
||||
if (!doesTableDynamicSecretLease) {
|
||||
await knex.schema.createTable(TableName.DynamicSecretLease, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.integer("version").notNullable();
|
||||
t.string("externalEntityId").notNullable();
|
||||
t.datetime("expireAt").notNullable();
|
||||
// for background process communication
|
||||
t.string("status");
|
||||
t.string("statusDetails");
|
||||
t.uuid("dynamicSecretId").notNullable();
|
||||
t.foreign("dynamicSecretId").references("id").inTable(TableName.DynamicSecret).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.DynamicSecretLease);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.DynamicSecretLease);
|
||||
await knex.schema.dropTableIfExists(TableName.DynamicSecretLease);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.DynamicSecret);
|
||||
await knex.schema.dropTableIfExists(TableName.DynamicSecret);
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectUserAdditionalPrivilege))) {
|
||||
await knex.schema.createTable(TableName.ProjectUserAdditionalPrivilege, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("slug", 60).notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.jsonb("permissions").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectUserAdditionalPrivilege);
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectUserAdditionalPrivilege);
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityProjectAdditionalPrivilege))) {
|
||||
await knex.schema.createTable(TableName.IdentityProjectAdditionalPrivilege, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("slug", 60).notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId")
|
||||
.references("id")
|
||||
.inTable(TableName.IdentityProjectMembership)
|
||||
.onDelete("CASCADE");
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.jsonb("permissions").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityProjectAdditionalPrivilege);
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityProjectAdditionalPrivilege);
|
||||
}
|
@ -1,112 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TableName, TOrgMemberships } from "../schemas";
|
||||
|
||||
const validateOrgMembership = (membershipToValidate: TOrgMemberships, firstMembership: TOrgMemberships) => {
|
||||
const firstOrgId = firstMembership.orgId;
|
||||
const firstUserId = firstMembership.userId;
|
||||
|
||||
if (membershipToValidate.id === firstMembership.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (membershipToValidate.inviteEmail !== firstMembership.inviteEmail) {
|
||||
throw new Error(`Invite emails are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.orgId !== firstMembership.orgId) {
|
||||
throw new Error(`OrgIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.role !== firstMembership.role) {
|
||||
throw new Error(`Roles are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.roleId !== firstMembership.roleId) {
|
||||
throw new Error(`RoleIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.status !== firstMembership.status) {
|
||||
throw new Error(`Statuses are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
if (membershipToValidate.userId !== firstMembership.userId) {
|
||||
throw new Error(`UserIds are different for the same userId and orgId: ${firstUserId}, ${firstOrgId}`);
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const RowSchema = z.object({
|
||||
userId: z.string(),
|
||||
orgId: z.string(),
|
||||
cnt: z.string()
|
||||
});
|
||||
|
||||
// Transactional find and delete duplicate rows
|
||||
await knex.transaction(async (tx) => {
|
||||
const duplicateRows = await tx(TableName.OrgMembership)
|
||||
.select("userId", "orgId") // Select the userId and orgId so we can group by them
|
||||
.whereNotNull("userId") // Ensure that the userId is not null
|
||||
.count("* as cnt") // Count the number of rows for each userId and orgId, so we can make sure there are more than 1 row (a duplicate)
|
||||
.groupBy("userId", "orgId")
|
||||
.havingRaw("count(*) > ?", [1]); // Using havingRaw for direct SQL expressions
|
||||
|
||||
// Parse the rows to ensure they are in the correct format, and for type safety
|
||||
const parsedRows = RowSchema.array().parse(duplicateRows);
|
||||
|
||||
// For each of the duplicate rows, loop through and find the actual memberships to delete
|
||||
for (const row of parsedRows) {
|
||||
const count = Number(row.cnt);
|
||||
|
||||
// An extra check to ensure that the count is actually a number, and the number is greater than 2
|
||||
if (typeof count !== "number" || count < 2) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find all the organization memberships that have the same userId and orgId
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const rowsToDelete = await tx(TableName.OrgMembership).where({
|
||||
userId: row.userId,
|
||||
orgId: row.orgId
|
||||
});
|
||||
|
||||
// Ensure that all the rows have exactly the same value, except id, createdAt, updatedAt
|
||||
for (const rowToDelete of rowsToDelete) {
|
||||
validateOrgMembership(rowToDelete, rowsToDelete[0]);
|
||||
}
|
||||
|
||||
// Find the row with the latest createdAt, which we will keep
|
||||
|
||||
let lowestCreatedAt: number | null = null;
|
||||
let latestCreatedRow: TOrgMemberships | null = null;
|
||||
|
||||
for (const rowToDelete of rowsToDelete) {
|
||||
if (lowestCreatedAt === null || rowToDelete.createdAt.getTime() < lowestCreatedAt) {
|
||||
lowestCreatedAt = rowToDelete.createdAt.getTime();
|
||||
latestCreatedRow = rowToDelete;
|
||||
}
|
||||
}
|
||||
if (!latestCreatedRow) {
|
||||
throw new Error("Failed to find last created membership");
|
||||
}
|
||||
|
||||
// Filter out the latest row from the rows to delete
|
||||
const membershipIdsToDelete = rowsToDelete.map((r) => r.id).filter((id) => id !== latestCreatedRow!.id);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const numberOfRowsDeleted = await tx(TableName.OrgMembership).whereIn("id", membershipIdsToDelete).delete();
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(
|
||||
`Deleted ${numberOfRowsDeleted} duplicate organization memberships for ${row.userId} and ${row.orgId}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
|
||||
table.unique(["userId", "orgId"]);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (table) => {
|
||||
table.dropUnique(["userId", "orgId"]);
|
||||
});
|
||||
}
|
@ -1,82 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.Groups))) {
|
||||
await knex.schema.createTable(TableName.Groups, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.string("name").notNullable();
|
||||
t.string("slug").notNullable();
|
||||
t.unique(["orgId", "slug"]);
|
||||
t.string("role").notNullable();
|
||||
t.uuid("roleId");
|
||||
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.Groups);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.UserGroupMembership))) {
|
||||
await knex.schema.createTable(TableName.UserGroupMembership, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); // link to user and link to groups cascade on groups
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.UserGroupMembership);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.GroupProjectMembership))) {
|
||||
await knex.schema.createTable(TableName.GroupProjectMembership, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
await createOnUpdateTrigger(knex, TableName.GroupProjectMembership);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.GroupProjectMembershipRole))) {
|
||||
await knex.schema.createTable(TableName.GroupProjectMembershipRole, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("role").notNullable();
|
||||
t.uuid("projectMembershipId").notNullable();
|
||||
t.foreign("projectMembershipId").references("id").inTable(TableName.GroupProjectMembership).onDelete("CASCADE");
|
||||
// until role is changed/removed the role should not deleted
|
||||
t.uuid("customRoleId");
|
||||
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
|
||||
t.boolean("isTemporary").notNullable().defaultTo(false);
|
||||
t.string("temporaryMode");
|
||||
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
|
||||
t.datetime("temporaryAccessStartTime");
|
||||
t.datetime("temporaryAccessEndTime");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.GroupProjectMembershipRole);
|
||||
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.UserGroupMembership);
|
||||
await dropOnUpdateTrigger(knex, TableName.UserGroupMembership);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.GroupProjectMembership);
|
||||
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembership);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Groups);
|
||||
await dropOnUpdateTrigger(knex, TableName.Groups);
|
||||
}
|
@ -1,47 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ProjectMembershipRole, TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
|
||||
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
|
||||
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
|
||||
if (doesProjectRoleFieldExist) t.dropColumn("roleId");
|
||||
if (doesProjectRoleIdFieldExist) t.dropColumn("role");
|
||||
});
|
||||
|
||||
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
|
||||
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
|
||||
TableName.IdentityProjectMembership,
|
||||
"roleId"
|
||||
);
|
||||
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
|
||||
if (doesIdentityProjectRoleFieldExist) t.dropColumn("roleId");
|
||||
if (doesIdentityProjectRoleIdFieldExist) t.dropColumn("role");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
|
||||
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
|
||||
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
|
||||
if (!doesProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
|
||||
if (!doesProjectRoleIdFieldExist) {
|
||||
t.uuid("roleId");
|
||||
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
|
||||
}
|
||||
});
|
||||
|
||||
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
|
||||
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
|
||||
TableName.IdentityProjectMembership,
|
||||
"roleId"
|
||||
);
|
||||
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
|
||||
if (!doesIdentityProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
|
||||
if (!doesIdentityProjectRoleIdFieldExist) {
|
||||
t.uuid("roleId");
|
||||
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
|
||||
}
|
||||
});
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
|
||||
t.boolean("isPending").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.UserGroupMembership, (t) => {
|
||||
t.dropColumn("isPending");
|
||||
});
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.LdapGroupMap))) {
|
||||
await knex.schema.createTable(TableName.LdapGroupMap, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("ldapConfigId").notNullable();
|
||||
t.foreign("ldapConfigId").references("id").inTable(TableName.LdapConfig).onDelete("CASCADE");
|
||||
t.string("ldapGroupCN").notNullable();
|
||||
t.uuid("groupId").notNullable();
|
||||
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
t.unique(["ldapGroupCN", "groupId", "ldapConfigId"]);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.LdapGroupMap);
|
||||
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.string("groupSearchBase").notNullable().defaultTo("");
|
||||
t.string("groupSearchFilter").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.LdapGroupMap);
|
||||
await dropOnUpdateTrigger(knex, TableName.LdapGroupMap);
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("groupSearchBase");
|
||||
t.dropColumn("groupSearchFilter");
|
||||
});
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.string("searchFilter").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("searchFilter");
|
||||
});
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist && doesCreatedAtExist) t.index(["projectId", "createdAt"]);
|
||||
if (doesOrgIdExist && doesCreatedAtExist) t.index(["orgId", "createdAt"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist && doesCreatedAtExist) t.dropIndex(["projectId", "createdAt"]);
|
||||
if (doesOrgIdExist && doesCreatedAtExist) t.dropIndex(["orgId", "createdAt"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AuditLogStream))) {
|
||||
await knex.schema.createTable(TableName.AuditLogStream, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("url").notNullable();
|
||||
t.text("encryptedHeadersCiphertext");
|
||||
t.text("encryptedHeadersIV");
|
||||
t.text("encryptedHeadersTag");
|
||||
t.string("encryptedHeadersAlgorithm");
|
||||
t.string("encryptedHeadersKeyEncoding");
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.AuditLogStream);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.AuditLogStream);
|
||||
await knex.schema.dropTableIfExists(TableName.AuditLogStream);
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const isUsersTablePresent = await knex.schema.hasTable(TableName.Users);
|
||||
if (isUsersTablePresent) {
|
||||
const hasIsEmailVerifiedColumn = await knex.schema.hasColumn(TableName.Users, "isEmailVerified");
|
||||
|
||||
if (!hasIsEmailVerifiedColumn) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.boolean("isEmailVerified").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
// Backfilling the isEmailVerified to true where isAccepted is true
|
||||
await knex(TableName.Users).update({ isEmailVerified: true }).where("isAccepted", true);
|
||||
}
|
||||
|
||||
const isUserAliasTablePresent = await knex.schema.hasTable(TableName.UserAliases);
|
||||
if (isUserAliasTablePresent) {
|
||||
await knex.schema.alterTable(TableName.UserAliases, (t) => {
|
||||
t.string("username").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
const isSuperAdminTablePresent = await knex.schema.hasTable(TableName.SuperAdmin);
|
||||
if (isSuperAdminTablePresent) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.boolean("trustSamlEmails").defaultTo(false);
|
||||
t.boolean("trustLdapEmails").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Users, "isEmailVerified")) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.dropColumn("isEmailVerified");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustSamlEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustSamlEmails");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustLdapEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustLdapEmails");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicy))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.integer("approvals").defaultTo(1).notNullable();
|
||||
t.string("secretPath");
|
||||
|
||||
t.uuid("envId").notNullable();
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicy);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalPolicyApprover, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("approverId").notNullable();
|
||||
t.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyApprover);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyApprover);
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicy);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyApprover);
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicy);
|
||||
}
|
@ -1,51 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalRequest))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
|
||||
|
||||
t.uuid("privilegeId").nullable();
|
||||
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
|
||||
|
||||
t.uuid("requestedBy").notNullable();
|
||||
t.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
|
||||
// We use these values to create the actual privilege at a later point in time.
|
||||
t.boolean("isTemporary").notNullable();
|
||||
t.string("temporaryRange").nullable();
|
||||
|
||||
t.jsonb("permissions").notNullable();
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalRequest);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalRequestReviewer))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalRequestReviewer, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("member").notNullable();
|
||||
t.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
t.string("status").notNullable();
|
||||
t.uuid("requestId").notNullable();
|
||||
t.foreign("requestId").references("id").inTable(TableName.AccessApprovalRequest).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalRequestReviewer);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalRequestReviewer);
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalRequest);
|
||||
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalRequestReviewer);
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalRequest);
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityAwsAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("type").notNullable();
|
||||
t.string("stsEndpoint").notNullable();
|
||||
t.string("allowedPrincipalArns").notNullable();
|
||||
t.string("allowedAccountIds").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityAwsAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityAwsAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityAwsAuth);
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityGcpAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityGcpAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("type").notNullable();
|
||||
t.string("allowedServiceAccounts").notNullable();
|
||||
t.string("allowedProjects").notNullable();
|
||||
t.string("allowedZones").notNullable(); // GCE only (fully qualified zone names)
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityGcpAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityGcpAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityGcpAuth);
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretReference))) {
|
||||
await knex.schema.createTable(TableName.SecretReference, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("environment").notNullable();
|
||||
t.string("secretPath").notNullable();
|
||||
t.uuid("secretId").notNullable();
|
||||
t.foreign("secretId").references("id").inTable(TableName.Secret).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretReference);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretReference);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretReference);
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityKubernetesAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("kubernetesHost").notNullable();
|
||||
t.text("encryptedCaCert").notNullable();
|
||||
t.string("caCertIV").notNullable();
|
||||
t.string("caCertTag").notNullable();
|
||||
t.text("encryptedTokenReviewerJwt").notNullable();
|
||||
t.string("tokenReviewerJwtIV").notNullable();
|
||||
t.string("tokenReviewerJwtTag").notNullable();
|
||||
t.string("allowedNamespaces").notNullable();
|
||||
t.string("allowedNames").notNullable();
|
||||
t.string("allowedAudience").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityKubernetesAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
|
||||
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
|
||||
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
|
||||
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
if (!hasIsSyncedColumn) {
|
||||
t.boolean("isSynced").nullable();
|
||||
}
|
||||
|
||||
if (!hasSyncMessageColumn) {
|
||||
t.text("syncMessage").nullable();
|
||||
}
|
||||
|
||||
if (!hasLastSyncJobId) {
|
||||
t.string("lastSyncJobId").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
|
||||
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
|
||||
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
|
||||
|
||||
await knex.schema.alterTable(TableName.Integration, (t) => {
|
||||
if (hasIsSyncedColumn) {
|
||||
t.dropColumn("isSynced");
|
||||
}
|
||||
|
||||
if (hasSyncMessageColumn) {
|
||||
t.dropColumn("syncMessage");
|
||||
}
|
||||
|
||||
if (hasLastSyncJobId) {
|
||||
t.dropColumn("lastSyncJobId");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist) t.index("projectId");
|
||||
if (doesOrgIdExist) t.index("orgId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist) t.dropIndex("projectId");
|
||||
if (doesOrgIdExist) t.dropIndex("orgId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesEnvIdExist) t.index("envId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesEnvIdExist) t.dropIndex("envId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
|
||||
if (await knex.schema.hasTable(TableName.SecretVersion)) {
|
||||
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
|
||||
if (doesEnvIdExist) t.index("envId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretVersion)) {
|
||||
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
|
||||
if (doesEnvIdExist) t.dropIndex("envId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSnapshotIdExist) t.index("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
|
||||
if (doesSnapshotIdExist) t.index("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
|
||||
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
|
||||
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
|
||||
if (await knex.schema.hasTable(TableName.Secret)) {
|
||||
await knex.schema.alterTable(TableName.Secret, (t) => {
|
||||
if (doesFolderIdExist && doesUserIdExist) t.index(["folderId", "userId"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
|
||||
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Secret)) {
|
||||
await knex.schema.alterTable(TableName.Secret, (t) => {
|
||||
if (doesUserIdExist && doesFolderIdExist) t.dropIndex(["folderId", "userId"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesExpireAtExist) t.index("expiresAt");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesExpireAtExist) t.dropIndex("expiresAt");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityAzureAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityAzureAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("tenantId").notNullable();
|
||||
t.string("resource").notNullable();
|
||||
t.string("allowedServicePrincipalIds").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityAzureAuth);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityAzureAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityAzureAuth);
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedMfaAttempts = await knex.schema.hasColumn(TableName.Users, "consecutiveFailedMfaAttempts");
|
||||
const hasIsLocked = await knex.schema.hasColumn(TableName.Users, "isLocked");
|
||||
const hasTemporaryLockDateEnd = await knex.schema.hasColumn(TableName.Users, "temporaryLockDateEnd");
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (!hasConsecutiveFailedMfaAttempts) {
|
||||
t.integer("consecutiveFailedMfaAttempts").defaultTo(0);
|
||||
}
|
||||
|
||||
if (!hasIsLocked) {
|
||||
t.boolean("isLocked").defaultTo(false);
|
||||
}
|
||||
|
||||
if (!hasTemporaryLockDateEnd) {
|
||||
t.dateTime("temporaryLockDateEnd").nullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedMfaAttempts = await knex.schema.hasColumn(TableName.Users, "consecutiveFailedMfaAttempts");
|
||||
const hasIsLocked = await knex.schema.hasColumn(TableName.Users, "isLocked");
|
||||
const hasTemporaryLockDateEnd = await knex.schema.hasColumn(TableName.Users, "temporaryLockDateEnd");
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (hasConsecutiveFailedMfaAttempts) {
|
||||
t.dropColumn("consecutiveFailedMfaAttempts");
|
||||
}
|
||||
|
||||
if (hasIsLocked) {
|
||||
t.dropColumn("isLocked");
|
||||
}
|
||||
|
||||
if (hasTemporaryLockDateEnd) {
|
||||
t.dropColumn("temporaryLockDateEnd");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretSharing))) {
|
||||
await knex.schema.createTable(TableName.SecretSharing, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.text("encryptedValue").notNullable();
|
||||
t.text("iv").notNullable();
|
||||
t.text("tag").notNullable();
|
||||
t.text("hashedHex").notNullable();
|
||||
t.timestamp("expiresAt").notNullable();
|
||||
t.uuid("userId").notNullable();
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretSharing);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretSharing);
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSecretVersionIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "secretVersionId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSecretVersionIdExist) t.index("secretVersionId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSecretVersionIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "secretVersionId");
|
||||
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
|
||||
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
|
||||
if (doesSecretVersionIdExist) t.dropIndex("secretVersionId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretSharing))) {
|
||||
await knex.schema.createTable(TableName.SecretSharing, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.text("encryptedValue").notNullable();
|
||||
t.text("iv").notNullable();
|
||||
t.text("tag").notNullable();
|
||||
t.text("hashedHex").notNullable();
|
||||
t.timestamp("expiresAt").notNullable();
|
||||
t.uuid("userId").notNullable();
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretSharing);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretSharing);
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
|
||||
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (!hasExpiresAfterViewsColumn) {
|
||||
t.integer("expiresAfterViews");
|
||||
}
|
||||
|
||||
if (hasSecretNameColumn) {
|
||||
t.dropColumn("name");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
|
||||
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasExpiresAfterViewsColumn) {
|
||||
t.dropColumn("expiresAfterViews");
|
||||
}
|
||||
|
||||
if (!hasSecretNameColumn) {
|
||||
t.string("name").notNullable();
|
||||
}
|
||||
});
|
||||
}
|
@ -1,85 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
|
||||
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
|
||||
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
|
||||
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
|
||||
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
|
||||
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
|
||||
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
|
||||
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
|
||||
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user