mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-14 17:22:51 +00:00
Compare commits
1 Commits
infisical-
...
sheen/hack
Author | SHA1 | Date | |
---|---|---|---|
0762d8b172 |
@ -112,11 +112,4 @@ INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
3
.envrc
3
.envrc
@ -1,3 +0,0 @@
|
||||
# Learn more at https://direnv.net
|
||||
# We instruct direnv to use our Nix flake for a consistent development environment.
|
||||
use flake
|
@ -32,23 +32,10 @@ jobs:
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
@ -56,48 +43,35 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.21.5"
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/oasdiff/oasdiff@latest
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
|
102
.github/workflows/codeql.yml
vendored
102
.github/workflows/codeql.yml
vendored
@ -1,102 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL Advanced"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main", "development" ]
|
||||
pull_request:
|
||||
branches: [ "main", "development" ]
|
||||
schedule:
|
||||
- cron: '33 7 * * 3'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: actions
|
||||
build-mode: none
|
||||
- language: go
|
||||
build-mode: autobuild
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Add any setup steps before running the `github/codeql-action/init` action.
|
||||
# This includes steps like installing compilers or runtimes (`actions/setup-node`
|
||||
# or others). This is typically only required for manual builds.
|
||||
# - name: Setup runtime (example)
|
||||
# uses: actions/setup-example@v1
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
27
.github/workflows/release-k8-operator-helm.yml
vendored
27
.github/workflows/release-k8-operator-helm.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
252
.github/workflows/release_build_infisical_cli.yml
vendored
252
.github/workflows/release_build_infisical_cli.yml
vendored
@ -1,147 +1,131 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
|
||||
with:
|
||||
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
- name: Install deb-s3
|
||||
run: gem install deb-s3
|
||||
- name: Configure GPG Key
|
||||
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --passphrase "$GPG_SIGNING_KEY_PASSPHRASE" --import
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
|
||||
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
143
.github/workflows/release_docker_k8_operator.yaml
vendored
143
.github/workflows/release_docker_k8_operator.yaml
vendored
@ -1,107 +1,52 @@
|
||||
name: Release K8 Operator Docker Image
|
||||
name: Release image + Helm chart K8s Operator
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
jobs:
|
||||
release-image:
|
||||
name: Generate Helm Chart PR
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
# Dependency for helm generation
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# Install binaries for helm generation
|
||||
- name: Install dependencies
|
||||
working-directory: k8-operator
|
||||
run: |
|
||||
make helmify
|
||||
make kustomize
|
||||
make controller-gen
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
echo "Current git status:"
|
||||
git status
|
||||
echo ""
|
||||
echo "Modified files:"
|
||||
git diff --name-only
|
||||
|
||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
||||
if [ -z "$(git diff --name-only)" ]; then
|
||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Helm Chart PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
||||
delete-branch: true
|
||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
body: |
|
||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
||||
|
||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
||||
base: main
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
|
8
.github/workflows/run-backend-tests.yml
vendored
8
.github/workflows/run-backend-tests.yml
vendored
@ -34,10 +34,7 @@ jobs:
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Run unit test
|
||||
run: npm run test:unit
|
||||
working-directory: backend
|
||||
- name: Run integration test
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
@ -47,5 +44,4 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
|
||||
docker compose -f "docker-compose.dev.yml" down
|
@ -8,9 +8,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
|
||||
docs/cli/commands/bootstrap.mdx:jwt:86
|
||||
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
|
||||
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
|
||||
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
|
||||
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
|
||||
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
|
||||
|
@ -161,9 +161,6 @@ COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
|
@ -3,10 +3,13 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
@ -42,8 +45,8 @@ RUN npm run build
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
@ -53,23 +56,21 @@ USER non-root-user
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apt-get update && apt-get install -y \
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
freetds-dev
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -85,19 +86,18 @@ FROM base AS backend-runner
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apt-get update && apt-get install -y \
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -109,36 +109,34 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
openssh
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
@ -156,11 +154,11 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
@ -168,7 +166,6 @@ ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
@ -1,22 +1,23 @@
|
||||
# Build stage
|
||||
FROM node:20-slim AS build
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -25,36 +26,36 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
@ -45,18 +45,16 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -1,85 +0,0 @@
|
||||
FROM node:20-slim
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config \
|
||||
perl \
|
||||
wget
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
WORKDIR /openssl-build
|
||||
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
|
||||
&& tar -xf openssl-3.1.2.tar.gz \
|
||||
&& cd openssl-3.1.2 \
|
||||
&& ./Configure enable-fips \
|
||||
&& make \
|
||||
&& make install_fips
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package.json
|
||||
COPY package-lock.json package-lock.json
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV HOST=0.0.0.0
|
||||
ENV OPENSSL_CONF=/app/nodejs.cnf
|
||||
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
|
||||
ENV NODE_OPTIONS=--force-fips
|
||||
|
||||
CMD ["npm", "run", "dev:docker"]
|
@ -11,7 +11,6 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
schedulePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
|
@ -120,3 +120,4 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,16 +0,0 @@
|
||||
nodejs_conf = nodejs_init
|
||||
|
||||
.include /usr/local/ssl/fipsmodule.cnf
|
||||
|
||||
[nodejs_init]
|
||||
providers = provider_sect
|
||||
|
||||
[provider_sect]
|
||||
default = default_sect
|
||||
fips = fips_sect
|
||||
|
||||
[default_sect]
|
||||
activate = 1
|
||||
|
||||
[algorithm_sect]
|
||||
default_properties = fips=yes
|
3341
backend/package-lock.json
generated
3341
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -40,7 +40,6 @@
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
@ -61,17 +60,9 @@
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@ -147,18 +138,17 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.5",
|
||||
"@octokit/plugin-retry": "^7.1.4",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
@ -179,7 +169,6 @@
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -188,7 +177,6 @@
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
@ -204,6 +192,7 @@
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openai": "^4.85.4",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
13
backend/src/@types/fastify.d.ts
vendored
13
backend/src/@types/fastify.d.ts
vendored
@ -13,7 +13,6 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
@ -33,7 +32,6 @@ import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
|
||||
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
|
||||
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
|
||||
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
|
||||
@ -47,6 +45,7 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TAutomatedSecurityServiceFactory } from "@app/services/automated-security/automated-security-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
@ -101,13 +100,6 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
identityAuthInfo?: {
|
||||
identityId: string;
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
}
|
||||
}
|
||||
|
||||
@ -237,8 +229,7 @@ declare module "fastify" {
|
||||
secretSync: TSecretSyncServiceFactory;
|
||||
kmip: TKmipServiceFactory;
|
||||
kmipOperation: TKmipOperationServiceFactory;
|
||||
gateway: TGatewayServiceFactory;
|
||||
secretRotationV2: TSecretRotationV2ServiceFactory;
|
||||
automatedSecurity: TAutomatedSecurityServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
84
backend/src/@types/knex.d.ts
vendored
84
backend/src/@types/knex.d.ts
vendored
@ -17,9 +17,6 @@ import {
|
||||
TApiKeys,
|
||||
TApiKeysInsert,
|
||||
TApiKeysUpdate,
|
||||
TAppConnections,
|
||||
TAppConnectionsInsert,
|
||||
TAppConnectionsUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
@ -32,6 +29,9 @@ import {
|
||||
TAuthTokenSessionsUpdate,
|
||||
TAuthTokensInsert,
|
||||
TAuthTokensUpdate,
|
||||
TAutomatedSecurityReports,
|
||||
TAutomatedSecurityReportsInsert,
|
||||
TAutomatedSecurityReportsUpdate,
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
@ -68,15 +68,9 @@ import {
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate,
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate,
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
@ -122,6 +116,9 @@ import {
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
TIdentityProfile,
|
||||
TIdentityProfileInsert,
|
||||
TIdentityProfileUpdate,
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate,
|
||||
@ -188,9 +185,6 @@ import {
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
@ -212,9 +206,6 @@ import {
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
@ -305,12 +296,6 @@ import {
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate,
|
||||
TSecretRotationsV2,
|
||||
TSecretRotationsV2Insert,
|
||||
TSecretRotationsV2Update,
|
||||
TSecretRotationV2SecretMappings,
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate,
|
||||
TSecrets,
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
@ -332,27 +317,15 @@ import {
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate,
|
||||
TSecretsUpdate,
|
||||
TSecretsV2,
|
||||
TSecretsV2Insert,
|
||||
TSecretsV2Update,
|
||||
TSecretSyncs,
|
||||
TSecretSyncsInsert,
|
||||
TSecretSyncsUpdate,
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate,
|
||||
TSecretTags,
|
||||
TSecretTagsInsert,
|
||||
TSecretTagsUpdate,
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate,
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate,
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update,
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate,
|
||||
@ -411,6 +384,24 @@ import {
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
|
||||
import {
|
||||
TExternalGroupOrgRoleMappings,
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
} from "@app/db/schemas/external-group-org-role-mappings";
|
||||
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
TSecretV2TagJunctionInsert,
|
||||
TSecretV2TagJunctionUpdate
|
||||
} from "@app/db/schemas/secret-v2-tag-junction";
|
||||
import {
|
||||
TSecretVersionsV2,
|
||||
TSecretVersionsV2Insert,
|
||||
TSecretVersionsV2Update
|
||||
} from "@app/db/schemas/secret-versions-v2";
|
||||
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
@ -945,26 +936,15 @@ declare module "knex/types/tables" {
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate
|
||||
>;
|
||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate
|
||||
[TableName.IdentityProfile]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProfile,
|
||||
TIdentityProfileInsert,
|
||||
TIdentityProfileUpdate
|
||||
>;
|
||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate
|
||||
>;
|
||||
[TableName.SecretRotationV2]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationsV2,
|
||||
TSecretRotationsV2Insert,
|
||||
TSecretRotationsV2Update
|
||||
>;
|
||||
[TableName.SecretRotationV2SecretMapping]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotationV2SecretMappings,
|
||||
TSecretRotationV2SecretMappingsInsert,
|
||||
TSecretRotationV2SecretMappingsUpdate
|
||||
[TableName.AutomatedSecurityReports]: KnexOriginal.CompositeTableType<
|
||||
TAutomatedSecurityReports,
|
||||
TAutomatedSecurityReportsInsert,
|
||||
TAutomatedSecurityReportsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ export default {
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@ -64,7 +64,7 @@ export default {
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
|
||||
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
|
@ -1,115 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
|
||||
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("rootCaKeyAlgorithm").notNullable();
|
||||
|
||||
t.datetime("rootCaIssuedAt").notNullable();
|
||||
t.datetime("rootCaExpiration").notNullable();
|
||||
t.string("rootCaSerialNumber").notNullable();
|
||||
t.binary("encryptedRootCaCertificate").notNullable();
|
||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("clientCaIssuedAt").notNullable();
|
||||
t.datetime("clientCaExpiration").notNullable();
|
||||
t.string("clientCaSerialNumber");
|
||||
t.binary("encryptedClientCaCertificate").notNullable();
|
||||
t.binary("encryptedClientCaPrivateKey").notNullable();
|
||||
|
||||
t.string("clientCertSerialNumber").notNullable();
|
||||
t.string("clientCertKeyAlgorithm").notNullable();
|
||||
t.datetime("clientCertIssuedAt").notNullable();
|
||||
t.datetime("clientCertExpiration").notNullable();
|
||||
t.binary("encryptedClientCertificate").notNullable();
|
||||
t.binary("encryptedClientPrivateKey").notNullable();
|
||||
|
||||
t.datetime("gatewayCaIssuedAt").notNullable();
|
||||
t.datetime("gatewayCaExpiration").notNullable();
|
||||
t.string("gatewayCaSerialNumber").notNullable();
|
||||
t.binary("encryptedGatewayCaCertificate").notNullable();
|
||||
t.binary("encryptedGatewayCaPrivateKey").notNullable();
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.unique("orgId");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.Gateway))) {
|
||||
await knex.schema.createTable(TableName.Gateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("name").notNullable();
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
t.datetime("heartbeat");
|
||||
|
||||
t.binary("relayAddress").notNullable();
|
||||
|
||||
t.uuid("orgGatewayRootCaId").notNullable();
|
||||
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
|
||||
|
||||
t.uuid("identityId").notNullable();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.Gateway);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
|
||||
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
|
||||
t.uuid("gatewayId").notNullable();
|
||||
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
// not setting a foreign constraint so that cascade effects are not triggered
|
||||
if (!doesGatewayColExist) {
|
||||
t.uuid("projectGatewayId");
|
||||
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Gateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.Gateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
53
backend/src/db/migrations/20250224015833_identity-profile.ts
Normal file
53
backend/src/db/migrations/20250224015833_identity-profile.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityProfile))) {
|
||||
await knex.schema.createTable(TableName.IdentityProfile, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("userId");
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.uuid("identityId");
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
|
||||
t.text("temporalProfile").notNullable(); // access pattern or frequency
|
||||
t.text("scopeProfile").notNullable(); // scope of usage - are they accessing development environment secrets. which paths? are they doing mainly admin work?
|
||||
t.text("usageProfile").notNullable(); // method of usage
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityProfile);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AutomatedSecurityReports))) {
|
||||
await knex.schema.createTable(TableName.AutomatedSecurityReports, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("profileId").notNullable();
|
||||
t.foreign("profileId").references("id").inTable(TableName.IdentityProfile).onDelete("CASCADE");
|
||||
|
||||
t.jsonb("event").notNullable();
|
||||
|
||||
t.string("remarks").notNullable();
|
||||
t.string("severity").notNullable();
|
||||
t.string("status").notNullable();
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.AutomatedSecurityReports);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityProfile);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.AutomatedSecurityReports);
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (!hasSharingTypeColumn) {
|
||||
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (hasSharingTypeColumn) {
|
||||
table.dropColumn("type");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasAuthConsentContentCol) {
|
||||
t.text("authConsentContent");
|
||||
}
|
||||
if (!hasPageFrameContentCol) {
|
||||
t.text("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasAuthConsentContentCol) {
|
||||
t.dropColumn("authConsentContent");
|
||||
}
|
||||
if (hasPageFrameContentCol) {
|
||||
t.dropColumn("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote").alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.string("comment");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.dropColumn("comment");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (!hasSecretVersionV2UserActorId) {
|
||||
t.uuid("userActorId");
|
||||
t.foreign("userActorId").references("id").inTable(TableName.Users);
|
||||
}
|
||||
if (!hasSecretVersionV2IdentityActorId) {
|
||||
t.uuid("identityActorId");
|
||||
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
|
||||
}
|
||||
if (!hasSecretVersionV2ActorType) {
|
||||
t.string("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (hasSecretVersionV2UserActorId) {
|
||||
t.dropColumn("userActorId");
|
||||
}
|
||||
if (hasSecretVersionV2IdentityActorId) {
|
||||
t.dropColumn("identityActorId");
|
||||
}
|
||||
if (hasSecretVersionV2ActorType) {
|
||||
t.dropColumn("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
|
||||
if (!hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
if (hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("allowSecretSharingOutsideOrganization");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem");
|
||||
t.string("privilegeUpgradeInitiatedByUsername");
|
||||
t.dateTime("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
|
||||
await knex(TableName.Organization).update({
|
||||
shouldUseNewPrivilegeSystem: false
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("shouldUseNewPrivilegeSystem");
|
||||
t.dropColumn("privilegeUpgradeInitiatedByUsername");
|
||||
t.dropColumn("privilegeUpgradeInitiatedAt");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (!hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.jsonb("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.dropColumn("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.specificType("adminIdentityIds", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("adminIdentityIds");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.index(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropIndex(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasReviewerJwtCol = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
if (hasReviewerJwtCol) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// we can't make it back to non nullable, it will fail
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropColumn("allowedSelfApprovals");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials"))) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.boolean("isPlatformManagedCredentials").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.AppConnection, "isPlatformManagedCredentials")) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.dropColumn("isPlatformManagedCredentials");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,58 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.SecretRotationV2))) {
|
||||
await knex.schema.createTable(TableName.SecretRotationV2, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name", 32).notNullable();
|
||||
t.string("description");
|
||||
t.string("type").notNullable();
|
||||
t.jsonb("parameters").notNullable();
|
||||
t.jsonb("secretsMapping").notNullable();
|
||||
t.binary("encryptedGeneratedCredentials").notNullable();
|
||||
t.boolean("isAutoRotationEnabled").notNullable().defaultTo(true);
|
||||
t.integer("activeIndex").notNullable().defaultTo(0);
|
||||
t.uuid("folderId").notNullable();
|
||||
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
|
||||
t.uuid("connectionId").notNullable();
|
||||
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
|
||||
t.timestamps(true, true, true);
|
||||
t.integer("rotationInterval").notNullable();
|
||||
t.jsonb("rotateAtUtc").notNullable(); // { hours: number; minutes: number }
|
||||
t.string("rotationStatus").notNullable();
|
||||
t.datetime("lastRotationAttemptedAt").notNullable();
|
||||
t.datetime("lastRotatedAt").notNullable();
|
||||
t.binary("encryptedLastRotationMessage"); // we encrypt this because it may contain sensitive info (SQL errors showing credentials)
|
||||
t.string("lastRotationJobId");
|
||||
t.datetime("nextRotationAt");
|
||||
t.boolean("isLastRotationManual").notNullable().defaultTo(true); // creation is considered a "manual" rotation
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretRotationV2);
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretRotationV2, (t) => {
|
||||
t.unique(["folderId", "name"]);
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SecretRotationV2SecretMapping))) {
|
||||
await knex.schema.createTable(TableName.SecretRotationV2SecretMapping, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("secretId").notNullable();
|
||||
// scott: this is deferred to block secret deletion but not prevent folder/environment/project deletion
|
||||
// ie, if rotation is being deleted as well we permit it, otherwise throw
|
||||
t.foreign("secretId").references("id").inTable(TableName.SecretV2).deferrable("deferred");
|
||||
t.uuid("rotationId").notNullable();
|
||||
t.foreign("rotationId").references("id").inTable(TableName.SecretRotationV2).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretRotationV2SecretMapping);
|
||||
await knex.schema.dropTableIfExists(TableName.SecretRotationV2);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretRotationV2);
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
|
||||
if (!hasCol) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.datetime("lastSecretModified");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
|
||||
if (hasCol) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("lastSecretModified");
|
||||
});
|
||||
}
|
||||
}
|
@ -16,8 +16,7 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
deletedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||
|
@ -19,8 +19,7 @@ export const AppConnectionsSchema = z.object({
|
||||
version: z.number().default(1),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isPlatformManagedCredentials: z.boolean().default(false).nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
|
||||
|
25
backend/src/db/schemas/automated-security-reports.ts
Normal file
25
backend/src/db/schemas/automated-security-reports.ts
Normal file
@ -0,0 +1,25 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AutomatedSecurityReportsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
profileId: z.string().uuid(),
|
||||
event: z.unknown(),
|
||||
remarks: z.string(),
|
||||
severity: z.string(),
|
||||
status: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAutomatedSecurityReports = z.infer<typeof AutomatedSecurityReportsSchema>;
|
||||
export type TAutomatedSecurityReportsInsert = Omit<z.input<typeof AutomatedSecurityReportsSchema>, TImmutableDBKeys>;
|
||||
export type TAutomatedSecurityReportsUpdate = Partial<
|
||||
Omit<z.input<typeof AutomatedSecurityReportsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -26,8 +26,7 @@ export const DynamicSecretsSchema = z.object({
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer,
|
||||
projectGatewayId: z.string().uuid().nullable().optional()
|
||||
encryptedInput: zodBuffer
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@ -1,29 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const GatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date(),
|
||||
heartbeat: z.date().nullable().optional(),
|
||||
relayAddress: zodBuffer,
|
||||
orgGatewayRootCaId: z.string().uuid(),
|
||||
identityId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TGateways = z.infer<typeof GatewaysSchema>;
|
||||
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;
|
@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
|
@ -26,8 +26,7 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional(),
|
||||
claimMetadataMapping: z.unknown().nullable().optional()
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
24
backend/src/db/schemas/identity-profile.ts
Normal file
24
backend/src/db/schemas/identity-profile.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityProfileSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
identityId: z.string().uuid().nullable().optional(),
|
||||
temporalProfile: z.string(),
|
||||
scopeProfile: z.string(),
|
||||
usageProfile: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityProfile = z.infer<typeof IdentityProfileSchema>;
|
||||
export type TIdentityProfileInsert = Omit<z.input<typeof IdentityProfileSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityProfileUpdate = Partial<Omit<z.input<typeof IdentityProfileSchema>, TImmutableDBKeys>>;
|
@ -3,11 +3,11 @@ export * from "./access-approval-policies-approvers";
|
||||
export * from "./access-approval-requests";
|
||||
export * from "./access-approval-requests-reviewers";
|
||||
export * from "./api-keys";
|
||||
export * from "./app-connections";
|
||||
export * from "./audit-log-streams";
|
||||
export * from "./audit-logs";
|
||||
export * from "./auth-token-sessions";
|
||||
export * from "./auth-tokens";
|
||||
export * from "./automated-security-reports";
|
||||
export * from "./backup-private-key";
|
||||
export * from "./certificate-authorities";
|
||||
export * from "./certificate-authority-certs";
|
||||
@ -20,9 +20,7 @@ export * from "./certificate-templates";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./external-group-org-role-mappings";
|
||||
export * from "./external-kms";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
export * from "./git-app-org";
|
||||
export * from "./group-project-membership-roles";
|
||||
@ -38,6 +36,7 @@ export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
export * from "./identity-oidc-auths";
|
||||
export * from "./identity-org-memberships";
|
||||
export * from "./identity-profile";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
export * from "./identity-project-membership-role";
|
||||
export * from "./identity-project-memberships";
|
||||
@ -60,7 +59,6 @@ export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./oidc-configs";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-gateway-config";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
export * from "./organizations";
|
||||
@ -69,7 +67,6 @@ export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
export * from "./project-bots";
|
||||
export * from "./project-environments";
|
||||
export * from "./project-gateways";
|
||||
export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
@ -99,16 +96,13 @@ export * from "./secret-references";
|
||||
export * from "./secret-references-v2";
|
||||
export * from "./secret-rotation-output-v2";
|
||||
export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotation-v2-secret-mappings";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-rotations-v2";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
export * from "./secret-sharing";
|
||||
export * from "./secret-snapshot-folders";
|
||||
export * from "./secret-snapshot-secrets";
|
||||
export * from "./secret-snapshot-secrets-v2";
|
||||
export * from "./secret-snapshots";
|
||||
export * from "./secret-syncs";
|
||||
export * from "./secret-tag-junction";
|
||||
export * from "./secret-tags";
|
||||
export * from "./secret-v2-tag-junction";
|
||||
|
@ -80,6 +80,8 @@ export enum TableName {
|
||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||
// used by both identity and users
|
||||
IdentityMetadata = "identity_metadata",
|
||||
IdentityProfile = "identity_profile",
|
||||
AutomatedSecurityReports = "automated_security_reports",
|
||||
ResourceMetadata = "resource_metadata",
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
@ -113,10 +115,6 @@ export enum TableName {
|
||||
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
||||
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
||||
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
||||
// Gateway
|
||||
OrgGatewayConfig = "org_gateway_config",
|
||||
Gateway = "gateways",
|
||||
ProjectGateway = "project_gateways",
|
||||
// junction tables with tags
|
||||
SecretV2JnTag = "secret_v2_tag_junction",
|
||||
JnSecretTag = "secret_tag_junction",
|
||||
@ -140,9 +138,7 @@ export enum TableName {
|
||||
KmipClient = "kmip_clients",
|
||||
KmipOrgConfig = "kmip_org_configs",
|
||||
KmipOrgServerCertificates = "kmip_org_server_certificates",
|
||||
KmipClientCertificates = "kmip_client_certificates",
|
||||
SecretRotationV2 = "secret_rotations_v2",
|
||||
SecretRotationV2SecretMapping = "secret_rotation_v2_secret_mappings"
|
||||
KmipClientCertificates = "kmip_client_certificates"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
@ -235,8 +231,3 @@ export enum ActionProjectType {
|
||||
// project operations that happen on all types
|
||||
Any = "any"
|
||||
}
|
||||
|
||||
export enum SortDirection {
|
||||
ASC = "asc",
|
||||
DESC = "desc"
|
||||
}
|
||||
|
@ -1,43 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OrgGatewayConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
rootCaKeyAlgorithm: z.string(),
|
||||
rootCaIssuedAt: z.date(),
|
||||
rootCaExpiration: z.date(),
|
||||
rootCaSerialNumber: z.string(),
|
||||
encryptedRootCaCertificate: zodBuffer,
|
||||
encryptedRootCaPrivateKey: zodBuffer,
|
||||
clientCaIssuedAt: z.date(),
|
||||
clientCaExpiration: z.date(),
|
||||
clientCaSerialNumber: z.string().nullable().optional(),
|
||||
encryptedClientCaCertificate: zodBuffer,
|
||||
encryptedClientCaPrivateKey: zodBuffer,
|
||||
clientCertSerialNumber: z.string(),
|
||||
clientCertKeyAlgorithm: z.string(),
|
||||
clientCertIssuedAt: z.date(),
|
||||
clientCertExpiration: z.date(),
|
||||
encryptedClientCertificate: zodBuffer,
|
||||
encryptedClientPrivateKey: zodBuffer,
|
||||
gatewayCaIssuedAt: z.date(),
|
||||
gatewayCaExpiration: z.date(),
|
||||
gatewayCaSerialNumber: z.string(),
|
||||
encryptedGatewayCaCertificate: zodBuffer,
|
||||
encryptedGatewayCaPrivateKey: zodBuffer,
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
|
||||
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
|
||||
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;
|
@ -22,11 +22,7 @@ export const OrganizationsSchema = z.object({
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional(),
|
||||
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional(),
|
||||
shouldUseNewPrivilegeSystem: z.boolean().default(true),
|
||||
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
|
||||
privilegeUpgradeInitiatedAt: z.date().nullable().optional()
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -1,20 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectGatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
gatewayId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
|
||||
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;
|
@ -16,8 +16,7 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
deletedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@ -13,8 +13,7 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid(),
|
||||
comment: z.string().nullable().optional()
|
||||
reviewerUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@ -15,9 +15,7 @@ export const SecretFoldersSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
lastSecretModified: z.date().nullable().optional()
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
@ -1,23 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationV2SecretMappingsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
secretId: z.string().uuid(),
|
||||
rotationId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretRotationV2SecretMappings = z.infer<typeof SecretRotationV2SecretMappingsSchema>;
|
||||
export type TSecretRotationV2SecretMappingsInsert = Omit<
|
||||
z.input<typeof SecretRotationV2SecretMappingsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TSecretRotationV2SecretMappingsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretRotationV2SecretMappingsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,39 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationsV2Schema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
parameters: z.unknown(),
|
||||
secretsMapping: z.unknown(),
|
||||
encryptedGeneratedCredentials: zodBuffer,
|
||||
isAutoRotationEnabled: z.boolean().default(true),
|
||||
activeIndex: z.number().default(0),
|
||||
folderId: z.string().uuid(),
|
||||
connectionId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
rotationInterval: z.number(),
|
||||
rotateAtUtc: z.unknown(),
|
||||
rotationStatus: z.string(),
|
||||
lastRotationAttemptedAt: z.date(),
|
||||
lastRotatedAt: z.date(),
|
||||
encryptedLastRotationMessage: zodBuffer.nullable().optional(),
|
||||
lastRotationJobId: z.string().nullable().optional(),
|
||||
nextRotationAt: z.date().nullable().optional(),
|
||||
isLastRotationManual: z.boolean().default(true)
|
||||
});
|
||||
|
||||
export type TSecretRotationsV2 = z.infer<typeof SecretRotationsV2Schema>;
|
||||
export type TSecretRotationsV2Insert = Omit<z.input<typeof SecretRotationsV2Schema>, TImmutableDBKeys>;
|
||||
export type TSecretRotationsV2Update = Partial<Omit<z.input<typeof SecretRotationsV2Schema>, TImmutableDBKeys>>;
|
@ -26,8 +26,7 @@ export const SecretSharingSchema = z.object({
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional(),
|
||||
type: z.string().default("share")
|
||||
identifier: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -25,10 +25,7 @@ export const SecretVersionsV2Schema = z.object({
|
||||
folderId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
userActorId: z.string().uuid().nullable().optional(),
|
||||
identityActorId: z.string().uuid().nullable().optional(),
|
||||
actorType: z.string().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;
|
||||
|
@ -23,10 +23,7 @@ export const SuperAdminSchema = z.object({
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
|
||||
authConsentContent: z.string().nullable().optional(),
|
||||
pageFrameContent: z.string().nullable().optional(),
|
||||
adminIdentityIds: z.string().array().nullable().optional()
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -16,7 +16,7 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) =
|
||||
// for CSRs sent in PEM, we leave them as is
|
||||
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
|
||||
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
|
||||
csrBody = csrBody.replaceAll("\n", "").replaceAll(" ", "");
|
||||
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
|
||||
}
|
||||
|
||||
done(null, csrBody);
|
||||
|
@ -29,8 +29,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -148,8 +147,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -110,8 +110,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretPath: z.string().nullish(),
|
||||
envId: z.string(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
deletedAt: z.date().nullish()
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
|
@ -1,3 +1,4 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
@ -5,7 +6,6 @@ import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/pro
|
||||
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -1,265 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GatewaysSchema } from "@app/db/schemas";
|
||||
import { isValidIp } from "@app/lib/ip";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SanitizedGatewaySchema = GatewaysSchema.pick({
|
||||
id: true,
|
||||
identityId: true,
|
||||
name: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
issuedAt: true,
|
||||
serialNumber: true,
|
||||
heartbeat: true
|
||||
});
|
||||
|
||||
const isValidRelayAddress = (relayAddress: string) => {
|
||||
const [ip, port] = relayAddress.split(":");
|
||||
return isValidIp(ip) && Number(port) <= 65535 && Number(port) >= 40000;
|
||||
};
|
||||
|
||||
export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/register-identity",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
turnServerUsername: z.string(),
|
||||
turnServerPassword: z.string(),
|
||||
turnServerRealm: z.string(),
|
||||
turnServerAddress: z.string(),
|
||||
infisicalStaticIp: z.string().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const relayDetails = await server.services.gateway.getGatewayRelayDetails(
|
||||
req.permission.id,
|
||||
req.permission.orgId,
|
||||
req.permission.authMethod
|
||||
);
|
||||
return relayDetails;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/exchange-cert",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
relayAddress: z.string().refine(isValidRelayAddress, { message: "Invalid relay address" })
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
serialNumber: z.string(),
|
||||
privateKey: z.string(),
|
||||
certificate: z.string(),
|
||||
certificateChain: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const gatewayCertificates = await server.services.gateway.exchangeAllocatedRelayAddress({
|
||||
identityOrg: req.permission.orgId,
|
||||
identityId: req.permission.id,
|
||||
relayAddress: req.body.relayAddress,
|
||||
identityOrgAuthMethod: req.permission.authMethod
|
||||
});
|
||||
return gatewayCertificates;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/heartbeat",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
await server.services.gateway.heartbeat({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { message: "Successfully registered heartbeat" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectId: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateways: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projects: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
.array()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.listGateways({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/projects/:projectId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateways: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projectGatewayId: z.string()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.getProjectGateways({
|
||||
projectId: req.params.projectId,
|
||||
projectPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.getGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: slugSchema({ field: "name" }).optional(),
|
||||
projectIds: z.string().array().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.updateGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id,
|
||||
name: req.body.name,
|
||||
projectIds: req.body.projectIds
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.deleteGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
};
|
@ -1,11 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
|
@ -7,7 +7,6 @@ import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||
import { registerExternalKmsRouter } from "./external-kms-router";
|
||||
import { registerGatewayRouter } from "./gateway-router";
|
||||
import { registerGroupRouter } from "./group-router";
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerKmipRouter } from "./kmip-router";
|
||||
@ -68,8 +67,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/dynamic-secrets" }
|
||||
);
|
||||
|
||||
await server.register(registerGatewayRouter, { prefix: "/gateways" });
|
||||
|
||||
await server.register(
|
||||
async (pkiRouter) => {
|
||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { KmipClientsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
|
||||
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -61,8 +61,8 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
if (ldapConfig.groupSearchBase) {
|
||||
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
|
||||
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
|
||||
.replaceAll("{{.Username}}", user.uid)
|
||||
.replaceAll("{{.UserDN}}", user.dn);
|
||||
.replace(/{{\.Username}}/g, user.uid)
|
||||
.replace(/{{\.UserDN}}/g, user.dn);
|
||||
|
||||
if (!isValidLdapFilter(groupSearchFilter)) {
|
||||
throw new Error("Generated LDAP search filter is invalid.");
|
||||
|
@ -25,7 +25,7 @@ type TSAMLConfig = {
|
||||
callbackUrl: string;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
idpCert: string;
|
||||
cert: string;
|
||||
audience: string;
|
||||
wantAuthnResponseSigned?: boolean;
|
||||
wantAssertionsSigned?: boolean;
|
||||
@ -72,7 +72,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${ssoConfig.id}`,
|
||||
entryPoint: ssoConfig.entryPoint,
|
||||
issuer: ssoConfig.issuer,
|
||||
idpCert: ssoConfig.cert,
|
||||
cert: ssoConfig.cert,
|
||||
audience: appCfg.SITE_URL || ""
|
||||
};
|
||||
if (ssoConfig.authProvider === SamlProviders.JUMPCLOUD_SAML) {
|
||||
@ -302,21 +302,15 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.createSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
const saml = await server.services.saml.createSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
|
||||
@ -343,21 +337,15 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.updateSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
const saml = await server.services.saml.updateSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -35,8 +35,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -86,8 +85,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -1,11 +1,16 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import {
|
||||
SecretApprovalRequestsReviewersSchema,
|
||||
SecretApprovalRequestsSchema,
|
||||
SecretTagsSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
@ -49,8 +54,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
deletedAt: z.date().nullish()
|
||||
}),
|
||||
committerUser: approvalRequestUser,
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
@ -155,8 +159,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
|
||||
comment: z.string().optional()
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -172,25 +175,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
approvalId: req.params.id,
|
||||
status: req.body.status,
|
||||
comment: req.body.comment
|
||||
status: req.body.status
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: review.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
|
||||
metadata: {
|
||||
secretApprovalRequestId: review.requestId,
|
||||
reviewedBy: review.reviewerUserId,
|
||||
status: review.status as ApprovalStatus,
|
||||
comment: review.comment || ""
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { review };
|
||||
}
|
||||
});
|
||||
@ -246,6 +232,15 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}
|
||||
});
|
||||
|
||||
const tagSchema = SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
@ -268,21 +263,18 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish(),
|
||||
allowedSelfApprovals: z.boolean()
|
||||
deletedAt: z.date().nullish()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser,
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
|
||||
.extend({
|
||||
secretValue: z.string().optional(),
|
||||
isRotatedSecret: z.boolean().optional(),
|
||||
op: z.string(),
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
tags: tagSchema,
|
||||
secretMetadata: ResourceMetadataSchema.nullish(),
|
||||
secret: z
|
||||
.object({
|
||||
@ -301,7 +293,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretKey: z.string(),
|
||||
secretValue: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
tags: tagSchema,
|
||||
secretMetadata: ResourceMetadataSchema.nullish()
|
||||
})
|
||||
.optional()
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretRotationOutputsSchema, SecretRotationsSchema } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -41,10 +40,16 @@ export const registerSecretRotationRouter = async (server: FastifyZodProvider) =
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async () => {
|
||||
throw new BadRequestError({
|
||||
message: `This version of Secret Rotations has been deprecated. Please see docs for new version.`
|
||||
handler: async (req) => {
|
||||
const secretRotation = await server.services.secretRotation.createRotation({
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
projectId: req.body.workspaceId
|
||||
});
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import z from "zod";
|
||||
|
||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
|
||||
import { RAW_SECRETS } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const AccessListEntrySchema = z
|
||||
.object({
|
||||
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
|
||||
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
|
||||
id: z.string(),
|
||||
membershipId: z.string(),
|
||||
name: z.string()
|
||||
|
@ -22,11 +22,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretVersions: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
.array()
|
||||
secretVersions: secretRawSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -41,7 +37,6 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
offset: req.query.offset,
|
||||
secretId: req.params.secretId
|
||||
});
|
||||
|
||||
return { secretVersions };
|
||||
}
|
||||
});
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
@ -31,10 +31,13 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
secretVersions: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true })
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretId: z.string(),
|
||||
tags: SanitizedTagSchema.array(),
|
||||
isRotatedSecret: z.boolean().optional()
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
}).array()
|
||||
})
|
||||
.array(),
|
||||
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
|
||||
@ -53,7 +56,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.secretSnapshotId
|
||||
});
|
||||
|
||||
return { secretSnapshot };
|
||||
}
|
||||
});
|
||||
|
@ -1,15 +1,13 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -75,16 +73,6 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SignSshKey,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey
|
||||
@ -164,16 +152,6 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.IssueSshCreds,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey,
|
||||
|
@ -1,4 +1,5 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -9,7 +10,6 @@ import {
|
||||
isValidUserPattern
|
||||
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
|
||||
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
@ -1,11 +1,10 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -24,9 +23,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
body: z.object({
|
||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -84,8 +81,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -1,11 +1,10 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -31,9 +30,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -97,8 +94,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -1,8 +1,3 @@
|
||||
import {
|
||||
registerSecretRotationV2Router,
|
||||
SECRET_ROTATION_REGISTER_ROUTER_MAP
|
||||
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
|
||||
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
|
||||
@ -18,17 +13,4 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerIdentityProjectAdditionalPrivilegeRouter, {
|
||||
prefix: "/identity-project-additional-privilege"
|
||||
});
|
||||
|
||||
await server.register(
|
||||
async (secretRotationV2Router) => {
|
||||
// register generic secret rotation endpoints
|
||||
await secretRotationV2Router.register(registerSecretRotationV2Router);
|
||||
|
||||
// register service specific secret rotation endpoints (secret-rotations/postgres-credentials, etc.)
|
||||
for await (const [type, router] of Object.entries(SECRET_ROTATION_REGISTER_ROUTER_MAP)) {
|
||||
await secretRotationV2Router.register(router, { prefix: `/${type}` });
|
||||
}
|
||||
},
|
||||
{ prefix: "/secret-rotations" }
|
||||
);
|
||||
};
|
||||
|
@ -2,7 +2,6 @@ import { packRules } from "@casl/ability/extra";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -38,9 +37,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination)
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -95,10 +92,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.UPDATE.permissions)
|
||||
.optional()
|
||||
.superRefine(checkForInvalidPermissionCombination)
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -1,14 +0,0 @@
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
|
||||
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
|
||||
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
|
||||
|
||||
export * from "./secret-rotation-v2-router";
|
||||
|
||||
export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
|
||||
SecretRotation,
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
|
||||
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter
|
||||
};
|
@ -1,19 +0,0 @@
|
||||
import {
|
||||
CreateMsSqlCredentialsRotationSchema,
|
||||
MsSqlCredentialsRotationSchema,
|
||||
UpdateMsSqlCredentialsRotationSchema
|
||||
} from "@app/ee/services/secret-rotation-v2/mssql-credentials";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
|
||||
|
||||
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
|
||||
|
||||
export const registerMsSqlCredentialsRotationRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretRotationEndpoints({
|
||||
type: SecretRotation.MsSqlCredentials,
|
||||
server,
|
||||
responseSchema: MsSqlCredentialsRotationSchema,
|
||||
createSchema: CreateMsSqlCredentialsRotationSchema,
|
||||
updateSchema: UpdateMsSqlCredentialsRotationSchema,
|
||||
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
|
||||
});
|
@ -1,19 +0,0 @@
|
||||
import {
|
||||
CreatePostgresCredentialsRotationSchema,
|
||||
PostgresCredentialsRotationSchema,
|
||||
UpdatePostgresCredentialsRotationSchema
|
||||
} from "@app/ee/services/secret-rotation-v2/postgres-credentials";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
|
||||
|
||||
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
|
||||
|
||||
export const registerPostgresCredentialsRotationRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretRotationEndpoints({
|
||||
type: SecretRotation.PostgresCredentials,
|
||||
server,
|
||||
responseSchema: PostgresCredentialsRotationSchema,
|
||||
createSchema: CreatePostgresCredentialsRotationSchema,
|
||||
updateSchema: UpdatePostgresCredentialsRotationSchema,
|
||||
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
|
||||
});
|
@ -1,429 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { SECRET_ROTATION_NAME_MAP } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
|
||||
import {
|
||||
TRotateAtUtc,
|
||||
TSecretRotationV2,
|
||||
TSecretRotationV2GeneratedCredentials,
|
||||
TSecretRotationV2Input
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { SecretRotations } from "@app/lib/api-docs";
|
||||
import { startsWithVowel } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSecretRotationEndpoints = <
|
||||
T extends TSecretRotationV2,
|
||||
I extends TSecretRotationV2Input,
|
||||
C extends TSecretRotationV2GeneratedCredentials
|
||||
>({
|
||||
server,
|
||||
type,
|
||||
createSchema,
|
||||
updateSchema,
|
||||
responseSchema,
|
||||
generatedCredentialsSchema
|
||||
}: {
|
||||
type: SecretRotation;
|
||||
server: FastifyZodProvider;
|
||||
createSchema: z.ZodType<{
|
||||
name: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
projectId: string;
|
||||
connectionId: string;
|
||||
parameters: I["parameters"];
|
||||
secretsMapping: I["secretsMapping"];
|
||||
description?: string | null;
|
||||
isAutoRotationEnabled?: boolean;
|
||||
rotationInterval: number;
|
||||
rotateAtUtc?: TRotateAtUtc;
|
||||
}>;
|
||||
updateSchema: z.ZodType<{
|
||||
connectionId?: string;
|
||||
name?: string;
|
||||
environment?: string;
|
||||
secretPath?: string;
|
||||
parameters?: I["parameters"];
|
||||
secretsMapping?: I["secretsMapping"];
|
||||
description?: string | null;
|
||||
isAutoRotationEnabled?: boolean;
|
||||
rotationInterval?: number;
|
||||
rotateAtUtc?: TRotateAtUtc;
|
||||
}>;
|
||||
responseSchema: z.ZodTypeAny;
|
||||
generatedCredentialsSchema: z.ZodTypeAny;
|
||||
}) => {
|
||||
const rotationType = SECRET_ROTATION_NAME_MAP[type];
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `List the ${rotationType} Rotations for the specified project.`,
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretRotations.LIST(type).projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotations: responseSchema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId }
|
||||
} = req;
|
||||
|
||||
const secretRotations = (await server.services.secretRotationV2.listSecretRotationsByProjectId(
|
||||
{ projectId, type },
|
||||
req.permission
|
||||
)) as T[];
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATIONS,
|
||||
metadata: {
|
||||
type,
|
||||
count: secretRotations.length,
|
||||
rotationIds: secretRotations.map((rotation) => rotation.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotations };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:rotationId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Get the specified ${rotationType} Rotation by ID.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.GET_BY_ID(type).rotationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.findSecretRotationById(
|
||||
{ rotationId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId,
|
||||
type,
|
||||
secretPath: secretRotation.folder.path,
|
||||
environment: secretRotation.environment.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/rotation-name/:rotationName`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Get the specified ${rotationType} Rotation by name, secret path, environment and project ID.`,
|
||||
params: z.object({
|
||||
rotationName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Rotation name required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).rotationName)
|
||||
}),
|
||||
querystring: z.object({
|
||||
projectId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Project ID required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).projectId),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Secret path required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).secretPath),
|
||||
environment: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Environment required")
|
||||
.describe(SecretRotations.GET_BY_NAME(type).environment)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationName } = req.params;
|
||||
const { projectId, secretPath, environment } = req.query;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.findSecretRotationByName(
|
||||
{ rotationName, projectId, type, secretPath, environment },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId: secretRotation.id,
|
||||
type,
|
||||
secretPath,
|
||||
environment
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Create ${
|
||||
startsWithVowel(rotationType) ? "an" : "a"
|
||||
} ${rotationType} Rotation for the specified project.`,
|
||||
body: createSchema,
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const secretRotation = (await server.services.secretRotationV2.createSecretRotation(
|
||||
{ ...req.body, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId: secretRotation.id,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:rotationId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Update the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.UPDATE(type).rotationId)
|
||||
}),
|
||||
body: updateSchema,
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.updateSecretRotation(
|
||||
{ ...req.body, rotationId, type },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_SECRET_ROTATION,
|
||||
metadata: {
|
||||
rotationId,
|
||||
type,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: `/:rotationId`,
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Delete the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.DELETE(type).rotationId)
|
||||
}),
|
||||
querystring: z.object({
|
||||
deleteSecrets: z
|
||||
.enum(["true", "false"])
|
||||
.transform((value) => value === "true")
|
||||
.describe(SecretRotations.DELETE(type).deleteSecrets),
|
||||
revokeGeneratedCredentials: z
|
||||
.enum(["true", "false"])
|
||||
.transform((value) => value === "true")
|
||||
.describe(SecretRotations.DELETE(type).revokeGeneratedCredentials)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
const { deleteSecrets, revokeGeneratedCredentials } = req.query;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.deleteSecretRotation(
|
||||
{ type, rotationId, deleteSecrets, revokeGeneratedCredentials },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: secretRotation.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_SECRET_ROTATION,
|
||||
metadata: {
|
||||
type,
|
||||
rotationId,
|
||||
deleteSecrets,
|
||||
revokeGeneratedCredentials
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:rotationId/generated-credentials",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Get the generated credentials for the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.GET_GENERATED_CREDENTIALS_BY_ID(type).rotationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
generatedCredentials: generatedCredentialsSchema,
|
||||
activeIndex: z.number(),
|
||||
rotationId: z.string().uuid(),
|
||||
type: z.literal(type)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const {
|
||||
generatedCredentials,
|
||||
secretRotation: { activeIndex, projectId, folder, environment }
|
||||
} = await server.services.secretRotationV2.findSecretRotationGeneratedCredentialsById(
|
||||
{
|
||||
rotationId,
|
||||
type
|
||||
},
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATION_GENERATED_CREDENTIALS,
|
||||
metadata: {
|
||||
type,
|
||||
rotationId,
|
||||
secretPath: folder.path,
|
||||
environment: environment.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { generatedCredentials: generatedCredentials as C, activeIndex, rotationId, type };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:rotationId/rotate-secrets",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: `Rotate the generated credentials for the specified ${rotationType} Rotation.`,
|
||||
params: z.object({
|
||||
rotationId: z.string().uuid().describe(SecretRotations.ROTATE(type).rotationId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotation: responseSchema })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { rotationId } = req.params;
|
||||
|
||||
const secretRotation = (await server.services.secretRotationV2.rotateSecretRotation(
|
||||
{
|
||||
rotationId,
|
||||
type,
|
||||
auditLogInfo: req.auditLogInfo
|
||||
},
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
return { secretRotation };
|
||||
}
|
||||
});
|
||||
};
|
@ -1,81 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
|
||||
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
|
||||
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
|
||||
import { SecretRotations } from "@app/lib/api-docs";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
|
||||
PostgresCredentialsRotationListItemSchema,
|
||||
MsSqlCredentialsRotationListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretRotationV2Router = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/options",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List the available Secret Rotation Options.",
|
||||
response: {
|
||||
200: z.object({
|
||||
secretRotationOptions: SecretRotationV2OptionsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: () => {
|
||||
const secretRotationOptions = server.services.secretRotationV2.listSecretRotationOptions();
|
||||
return { secretRotationOptions };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List all the Secret Rotations for the specified project.",
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim().min(1, "Project ID required").describe(SecretRotations.LIST().projectId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({ secretRotations: SecretRotationV2Schema.array() })
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
query: { projectId },
|
||||
permission
|
||||
} = req;
|
||||
|
||||
const secretRotations = await server.services.secretRotationV2.listSecretRotationsByProjectId(
|
||||
{ projectId },
|
||||
permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.GET_SECRET_ROTATIONS,
|
||||
metadata: {
|
||||
rotationIds: secretRotations.map((sync) => sync.id),
|
||||
count: secretRotations.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { secretRotations };
|
||||
}
|
||||
});
|
||||
};
|
@ -65,8 +65,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvers,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
enforcementLevel
|
||||
}: TCreateAccessApprovalPolicy) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
@ -154,8 +153,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
enforcementLevel
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -218,8 +216,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
enforcementLevel
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
@ -265,8 +262,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
enforcementLevel
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -26,7 +26,6 @@ export type TCreateAccessApprovalPolicy = {
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
@ -36,7 +35,6 @@ export type TUpdateAccessApprovalPolicy = {
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteAccessApprovalPolicy = {
|
||||
|
@ -61,7 +61,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
)
|
||||
@ -120,7 +119,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: doc.policyApprovals,
|
||||
secretPath: doc.policySecretPath,
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
|
||||
envId: doc.policyEnvId,
|
||||
deletedAt: doc.policyDeletedAt
|
||||
},
|
||||
@ -256,7 +254,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
@ -278,7 +275,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
allowedSelfApprovals: el.policyAllowedSelfApprovals,
|
||||
deletedAt: el.policyDeletedAt
|
||||
},
|
||||
requestedByUser: {
|
||||
|
@ -1,10 +1,9 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import msFn from "ms";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -247,7 +246,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary,
|
||||
...(isTemporary && {
|
||||
expiresIn: msFn(ms(temporaryRange || ""), { long: true })
|
||||
expiresIn: ms(ms(temporaryRange || ""), { long: true })
|
||||
}),
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
@ -320,11 +319,6 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
message: "The policy associated with this access request has been deleted."
|
||||
});
|
||||
}
|
||||
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to review access approval request. Users are not authorized to review their own request."
|
||||
});
|
||||
}
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
|
@ -45,6 +45,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams) {
|
||||
throw new BadRequestError({
|
||||
@ -61,8 +62,9 @@ export const auditLogStreamServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
if (appCfg.isCloud) {
|
||||
blockLocalAndPrivateIpAddresses(url);
|
||||
}
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
@ -133,8 +135,9 @@ export const auditLogStreamServiceFactory = ({
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
|
||||
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
@ -9,14 +9,13 @@ import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { EventType, filterableSecretEvents } from "./audit-log-types";
|
||||
import { EventType } from "./audit-log-types";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
type TFindQuery = {
|
||||
actor?: string;
|
||||
projectId?: string;
|
||||
environment?: string;
|
||||
orgId?: string;
|
||||
eventType?: string;
|
||||
startDate?: string;
|
||||
@ -33,7 +32,6 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
orgId,
|
||||
projectId,
|
||||
environment,
|
||||
userAgentType,
|
||||
startDate,
|
||||
endDate,
|
||||
@ -42,14 +40,12 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
actorId,
|
||||
actorType,
|
||||
secretPath,
|
||||
secretKey,
|
||||
eventType,
|
||||
eventMetadata
|
||||
}: Omit<TFindQuery, "actor" | "eventType"> & {
|
||||
actorId?: string;
|
||||
actorType?: ActorType;
|
||||
secretPath?: string;
|
||||
secretKey?: string;
|
||||
eventType?: EventType[];
|
||||
eventMetadata?: Record<string, string>;
|
||||
},
|
||||
@ -94,29 +90,8 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
});
|
||||
}
|
||||
|
||||
const eventIsSecretType = !eventType?.length || eventType.some((event) => filterableSecretEvents.includes(event));
|
||||
// We only want to filter for environment/secretPath/secretKey if the user is either checking for all event types
|
||||
|
||||
// ? Note(daniel): use the `eventMetadata" @> ?::jsonb` approach to properly use our GIN index
|
||||
if (projectId && eventIsSecretType) {
|
||||
if (environment || secretPath) {
|
||||
// Handle both environment and secret path together to only use the GIN index once
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> ?::jsonb`, [
|
||||
JSON.stringify({
|
||||
...(environment && { environment }),
|
||||
...(secretPath && { secretPath })
|
||||
})
|
||||
]);
|
||||
}
|
||||
|
||||
// Handle secret key separately to include the OR condition
|
||||
if (secretKey) {
|
||||
void sqlQuery.whereRaw(
|
||||
`("eventMetadata" @> ?::jsonb
|
||||
OR "eventMetadata"->'secrets' @> ?::jsonb)`,
|
||||
[JSON.stringify({ secretKey }), JSON.stringify([{ secretKey }])]
|
||||
);
|
||||
}
|
||||
if (projectId && secretPath) {
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
|
||||
}
|
||||
|
||||
// Filter by actor type
|
||||
|
@ -1,10 +1,8 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
@ -63,8 +61,6 @@ export const auditLogServiceFactory = ({
|
||||
actorType: filter.actorType,
|
||||
eventMetadata: filter.eventMetadata,
|
||||
secretPath: filter.secretPath,
|
||||
secretKey: filter.secretKey,
|
||||
environment: filter.environment,
|
||||
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
|
||||
});
|
||||
|
||||
@ -85,12 +81,8 @@ export const auditLogServiceFactory = ({
|
||||
if (!data.projectId && !data.orgId)
|
||||
throw new BadRequestError({ message: "Must specify either project id or org id" });
|
||||
}
|
||||
const el = { ...data };
|
||||
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
|
||||
const permissionMetadata = requestContext.get("identityPermissionMetadata");
|
||||
el.actor.metadata.permission = permissionMetadata;
|
||||
}
|
||||
return auditLogQueue.pushToLog(el);
|
||||
|
||||
return auditLogQueue.pushToLog(data);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -2,13 +2,6 @@ import {
|
||||
TCreateProjectTemplateDTO,
|
||||
TUpdateProjectTemplateDTO
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { SecretRotation, SecretRotationStatus } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import {
|
||||
TCreateSecretRotationV2DTO,
|
||||
TDeleteSecretRotationV2DTO,
|
||||
TSecretRotationV2Raw,
|
||||
TUpdateSecretRotationV2DTO
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
@ -29,7 +22,6 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
@ -40,11 +32,9 @@ export type TListProjectAuditLogDTO = {
|
||||
endDate?: string;
|
||||
startDate?: string;
|
||||
projectId?: string;
|
||||
environment?: string;
|
||||
auditLogActorId?: string;
|
||||
actorType?: ActorType;
|
||||
secretPath?: string;
|
||||
secretKey?: string;
|
||||
eventMetadata?: Record<string, string>;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
@ -63,8 +53,6 @@ export type TCreateAuditLogDTO = {
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
|
||||
export type AuditLogInfo = Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
|
||||
interface BaseAuthData {
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
@ -177,7 +165,6 @@ export enum EventType {
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
|
||||
SIGN_SSH_KEY = "sign-ssh-key",
|
||||
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||
@ -263,7 +250,6 @@ export enum EventType {
|
||||
UPDATE_APP_CONNECTION = "update-app-connection",
|
||||
DELETE_APP_CONNECTION = "delete-app-connection",
|
||||
CREATE_SHARED_SECRET = "create-shared-secret",
|
||||
CREATE_SECRET_REQUEST = "create-secret-request",
|
||||
DELETE_SHARED_SECRET = "delete-shared-secret",
|
||||
READ_SHARED_SECRET = "read-shared-secret",
|
||||
GET_SECRET_SYNCS = "get-secret-syncs",
|
||||
@ -294,34 +280,13 @@ export enum EventType {
|
||||
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
|
||||
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
|
||||
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
|
||||
KMIP_OPERATION_REGISTER = "kmip-operation-register",
|
||||
|
||||
GET_SECRET_ROTATIONS = "get-secret-rotations",
|
||||
GET_SECRET_ROTATION = "get-secret-rotation",
|
||||
GET_SECRET_ROTATION_GENERATED_CREDENTIALS = "get-secret-rotation-generated-credentials",
|
||||
CREATE_SECRET_ROTATION = "create-secret-rotation",
|
||||
UPDATE_SECRET_ROTATION = "update-secret-rotation",
|
||||
DELETE_SECRET_ROTATION = "delete-secret-rotation",
|
||||
SECRET_ROTATION_ROTATE_SECRETS = "secret-rotation-rotate-secrets",
|
||||
|
||||
PROJECT_ACCESS_REQUEST = "project-access-request"
|
||||
KMIP_OPERATION_REGISTER = "kmip-operation-register"
|
||||
}
|
||||
|
||||
export const filterableSecretEvents: EventType[] = [
|
||||
EventType.GET_SECRET,
|
||||
EventType.DELETE_SECRETS,
|
||||
EventType.CREATE_SECRETS,
|
||||
EventType.UPDATE_SECRETS,
|
||||
EventType.CREATE_SECRET,
|
||||
EventType.UPDATE_SECRET,
|
||||
EventType.DELETE_SECRET
|
||||
];
|
||||
|
||||
interface UserActorMetadata {
|
||||
userId: string;
|
||||
email?: string | null;
|
||||
username: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ServiceActorMetadata {
|
||||
@ -332,7 +297,6 @@ interface ServiceActorMetadata {
|
||||
interface IdentityActorMetadata {
|
||||
identityId: string;
|
||||
name: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ScimClientActorMetadata {}
|
||||
@ -999,7 +963,6 @@ interface LoginIdentityOidcAuthEvent {
|
||||
identityId: string;
|
||||
identityOidcAuthId: string;
|
||||
identityAccessTokenId: string;
|
||||
oidcClaimsReceived: Record<string, unknown>;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1012,7 +975,6 @@ interface AddIdentityOidcAuthEvent {
|
||||
boundIssuer: string;
|
||||
boundAudiences: string;
|
||||
boundClaims: Record<string, string>;
|
||||
claimMetadataMapping: Record<string, string>;
|
||||
boundSubject: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
@ -1037,7 +999,6 @@ interface UpdateIdentityOidcAuthEvent {
|
||||
boundIssuer?: string;
|
||||
boundAudiences?: string;
|
||||
boundClaims?: Record<string, string>;
|
||||
claimMetadataMapping?: Record<string, string>;
|
||||
boundSubject?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
@ -1180,7 +1141,6 @@ interface CreateFolderEvent {
|
||||
folderId: string;
|
||||
folderName: string;
|
||||
folderPath: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1352,16 +1312,6 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretApprovalRequestReview {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
|
||||
metadata: {
|
||||
secretApprovalRequestId: string;
|
||||
reviewedBy: string;
|
||||
status: ApprovalStatus;
|
||||
comment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignSshKey {
|
||||
type: EventType.SIGN_SSH_KEY;
|
||||
metadata: {
|
||||
@ -2070,15 +2020,6 @@ interface CreateSharedSecretEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretRequestEvent {
|
||||
type: EventType.CREATE_SECRET_REQUEST;
|
||||
metadata: {
|
||||
id: string;
|
||||
accessType: string;
|
||||
name?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSharedSecretEvent {
|
||||
type: EventType.DELETE_SHARED_SECRET;
|
||||
metadata: {
|
||||
@ -2296,15 +2237,6 @@ interface KmipOperationRegisterEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface ProjectAccessRequestEvent {
|
||||
type: EventType.PROJECT_ACCESS_REQUEST;
|
||||
metadata: {
|
||||
projectId: string;
|
||||
requesterId: string;
|
||||
requesterEmail: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SetupKmipEvent {
|
||||
type: EventType.SETUP_KMIP;
|
||||
metadata: {
|
||||
@ -2330,63 +2262,6 @@ interface RegisterKmipServerEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretRotationsEvent {
|
||||
type: EventType.GET_SECRET_ROTATIONS;
|
||||
metadata: {
|
||||
type?: SecretRotation;
|
||||
count: number;
|
||||
rotationIds: string[];
|
||||
secretPath?: string;
|
||||
environment?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretRotationEvent {
|
||||
type: EventType.GET_SECRET_ROTATION;
|
||||
metadata: {
|
||||
type: SecretRotation;
|
||||
rotationId: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretRotationCredentialsEvent {
|
||||
type: EventType.GET_SECRET_ROTATION_GENERATED_CREDENTIALS;
|
||||
metadata: {
|
||||
type: SecretRotation;
|
||||
rotationId: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretRotationEvent {
|
||||
type: EventType.CREATE_SECRET_ROTATION;
|
||||
metadata: Omit<TCreateSecretRotationV2DTO, "projectId"> & { rotationId: string };
|
||||
}
|
||||
|
||||
interface UpdateSecretRotationEvent {
|
||||
type: EventType.UPDATE_SECRET_ROTATION;
|
||||
metadata: TUpdateSecretRotationV2DTO;
|
||||
}
|
||||
|
||||
interface DeleteSecretRotationEvent {
|
||||
type: EventType.DELETE_SECRET_ROTATION;
|
||||
metadata: TDeleteSecretRotationV2DTO;
|
||||
}
|
||||
|
||||
interface RotateSecretRotationEvent {
|
||||
type: EventType.SECRET_ROTATION_ROTATE_SECRETS;
|
||||
metadata: Pick<TSecretRotationV2Raw, "parameters" | "secretsMapping" | "type" | "connectionId" | "folderId"> & {
|
||||
status: SecretRotationStatus;
|
||||
rotationId: string;
|
||||
jobId?: string | undefined;
|
||||
occurredAt: Date;
|
||||
message?: string | null | undefined;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -2595,14 +2470,4 @@ export type Event =
|
||||
| KmipOperationActivateEvent
|
||||
| KmipOperationRevokeEvent
|
||||
| KmipOperationLocateEvent
|
||||
| KmipOperationRegisterEvent
|
||||
| ProjectAccessRequestEvent
|
||||
| CreateSecretRequestEvent
|
||||
| SecretApprovalRequestReview
|
||||
| GetSecretRotationsEvent
|
||||
| GetSecretRotationEvent
|
||||
| GetSecretRotationCredentialsEvent
|
||||
| CreateSecretRotationEvent
|
||||
| UpdateSecretRotationEvent
|
||||
| DeleteSecretRotationEvent
|
||||
| RotateSecretRotationEvent;
|
||||
| KmipOperationRegisterEvent;
|
||||
|
@ -1,6 +1,5 @@
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
|
||||
@ -68,7 +67,9 @@ export const certificateEstServiceFactory = ({
|
||||
|
||||
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
|
||||
|
||||
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new UnauthorizedError({ message: "Missing client certificate" });
|
||||
@ -87,7 +88,10 @@ export const certificateEstServiceFactory = ({
|
||||
const verifiedChains = await Promise.all(
|
||||
caCertChains.map((chain) => {
|
||||
const caCert = new x509.X509Certificate(chain.certificate);
|
||||
const caChain = extractX509CertFromChain(chain.certificateChain)?.map((c) => new x509.X509Certificate(c)) || [];
|
||||
const caChain =
|
||||
chain.certificateChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((c) => new x509.X509Certificate(c)) || [];
|
||||
|
||||
return isCertChainValid([cert, caCert, ...caChain]);
|
||||
})
|
||||
@ -168,15 +172,19 @@ export const certificateEstServiceFactory = ({
|
||||
}
|
||||
|
||||
if (!estConfig.disableBootstrapCertValidation) {
|
||||
const caCerts = extractX509CertFromChain(estConfig.caChain)?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
|
||||
if (!caCerts) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
@ -242,7 +250,13 @@ export const certificateEstServiceFactory = ({
|
||||
kmsService
|
||||
});
|
||||
|
||||
const certificates = extractX509CertFromChain(caCertChain).map((cert) => new x509.X509Certificate(cert));
|
||||
const certificates = caCertChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => new x509.X509Certificate(cert));
|
||||
|
||||
if (!certificates) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const caCertificate = new x509.X509Certificate(caCert);
|
||||
return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user