mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-03 20:23:35 +00:00
Compare commits
134 Commits
fix/azureC
...
sid/ENG-29
Author | SHA1 | Date | |
---|---|---|---|
|
11d72450bb | ||
|
2f09fff734 | ||
|
373f68f9b8 | ||
|
7357d377e1 | ||
|
573b990aa3 | ||
|
e15086edc0 | ||
|
13ef3809bd | ||
|
fb49c9250a | ||
|
5ced7fa923 | ||
|
5ffd42378a | ||
|
f995708e44 | ||
|
c266d68993 | ||
|
c7c8107f85 | ||
|
b906fe34a1 | ||
|
bec1fefee8 | ||
|
cd03107a60 | ||
|
07965de1db | ||
|
b20ff0f029 | ||
|
691cbe0a4f | ||
|
0787128803 | ||
|
837158e344 | ||
|
03bd1471b2 | ||
|
f53c39f65b | ||
|
092695089d | ||
|
2d80681597 | ||
|
cf23f98170 | ||
|
c4c8e121f0 | ||
|
0701c996e5 | ||
|
4ca6f165b7 | ||
|
b9dd565926 | ||
|
136b0bdcb5 | ||
|
7266d1f310 | ||
|
9c6ec807cb | ||
|
e2fda5afe0 | ||
|
756b46428a | ||
|
5fcae35fae | ||
|
359e19f804 | ||
|
2aa548c7dc | ||
|
9d3a382b48 | ||
|
4f00fc6777 | ||
|
1f6a63fa71 | ||
|
9e76fa8230 | ||
|
e2d4816465 | ||
|
37c8fc80f7 | ||
|
5ca521ea6b | ||
|
40de8331a3 | ||
|
9374ee3c2e | ||
|
561dbb8835 | ||
|
dece214073 | ||
|
992df5c7d0 | ||
|
00e382d774 | ||
|
f63c434c0e | ||
|
9f0250caf2 | ||
|
d47f6f7ec9 | ||
|
1126c6b0fa | ||
|
7949142ea7 | ||
|
da28f9224b | ||
|
122de99606 | ||
|
82b765553c | ||
|
8972521716 | ||
|
81b45b24ec | ||
|
f2b0e4ae37 | ||
|
57fcfdaf21 | ||
|
e430abfc9e | ||
|
7d1bc86702 | ||
|
975b621bc8 | ||
|
ba9da3e6ec | ||
|
d2274a622a | ||
|
41ba7edba2 | ||
|
7acefbca29 | ||
|
e246f6bbfe | ||
|
f265fa6d37 | ||
|
8eebd7228f | ||
|
2a5593ea30 | ||
|
17af33372c | ||
|
27da14df9d | ||
|
cd4b9cd03a | ||
|
0779091d1f | ||
|
c421057cf1 | ||
|
8df4616265 | ||
|
484f34a257 | ||
|
32851565a7 | ||
|
0b7b32bdc3 | ||
|
b5ce965a92 | ||
|
3400a8f911 | ||
|
e6588b5d0e | ||
|
44c475fad9 | ||
|
608979efa7 | ||
|
cb86fcbc43 | ||
|
585cb1b30c | ||
|
00e4bf8134 | ||
|
7fdee073d8 | ||
|
d1c5ff07e4 | ||
|
c368178cb1 | ||
|
15b3a78db1 | ||
|
b93a43460c | ||
|
99e8bdef58 | ||
|
52ef0e6b81 | ||
|
0f06c4c27a | ||
|
e34deb7bd0 | ||
|
4b6f9fdec2 | ||
|
5df7539f65 | ||
|
4e960445a4 | ||
|
7af5a4ad8d | ||
|
2ff211d235 | ||
|
b05ea8a69a | ||
|
0d97bb4c8c | ||
|
d944aefa69 | ||
|
339c3b36af | ||
|
60657f0bc6 | ||
|
05408bc151 | ||
|
bb6daae305 | ||
|
96a4a43db3 | ||
|
691eed6680 | ||
|
b4ed1fa96a | ||
|
cbab7e1bcb | ||
|
8c2c68b53d | ||
|
5ce5d6a8f5 | ||
|
1da18815dd | ||
|
0fdc8a5c96 | ||
|
068522fadf | ||
|
27360e101e | ||
|
b74a4bfe7e | ||
|
0e461bcd09 | ||
|
d9d0dbd8f5 | ||
|
d3052512a2 | ||
|
944cad54ba | ||
|
fcf0a36cfd | ||
|
4a8ff6874c | ||
|
0529b44aa9 | ||
|
e20f461666 | ||
|
4ef8b3f674 | ||
|
01545cd817 | ||
|
565e780e53 |
13
.env.example
13
.env.example
@@ -123,8 +123,17 @@ INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET=
|
||||
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET=
|
||||
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET=
|
||||
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
|
153
.github/workflows/release_build_infisical_cli.yml
vendored
153
.github/workflows/release_build_infisical_cli.yml
vendored
@@ -1,153 +0,0 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest-8-cores
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
|
||||
with:
|
||||
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
- name: Install deb-s3
|
||||
run: gem install deb-s3
|
||||
- name: Configure GPG Key
|
||||
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
|
||||
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
- name: Invalidate Cloudfront cache
|
||||
run: aws cloudfront create-invalidation --distribution-id $CLOUDFRONT_DISTRIBUTION_ID --paths '/deb/dists/stable/*'
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}
|
55
.github/workflows/run-cli-tests.yml
vendored
55
.github/workflows/run-cli-tests.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Go CLI Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.x"
|
||||
- name: Install dependencies
|
||||
run: go get .
|
||||
- name: Test with the Go CLI
|
||||
env:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
241
.goreleaser.yaml
241
.goreleaser.yaml
@@ -1,241 +0,0 @@
|
||||
# This is an example .goreleaser.yml file with some sensible defaults.
|
||||
# Make sure to check the documentation at https://goreleaser.com
|
||||
# before:
|
||||
# hooks:
|
||||
# # You may remove this if you don't use go modules.
|
||||
# - cd cli && go mod tidy
|
||||
# # you may remove this if you don't need go generate
|
||||
# - cd cli && go generate ./...
|
||||
before:
|
||||
hooks:
|
||||
- ./cli/scripts/completions.sh
|
||||
- ./cli/scripts/manpages.sh
|
||||
|
||||
monorepo:
|
||||
tag_prefix: infisical-cli/
|
||||
dir: cli
|
||||
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=/home/runner/work/osxcross/target/bin/o64-clang
|
||||
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
|
||||
goos:
|
||||
- darwin
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
- id: all-other-builds
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
goos:
|
||||
- freebsd
|
||||
- linux
|
||||
- netbsd
|
||||
- openbsd
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
goarm:
|
||||
- "6"
|
||||
- "7"
|
||||
ignore:
|
||||
- goos: windows
|
||||
goarch: "386"
|
||||
- goos: freebsd
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
files:
|
||||
- ../README*
|
||||
- ../LICENSE*
|
||||
- ../manpages/*
|
||||
- ../completions/*
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: "replace"
|
||||
|
||||
checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ .Version }}-devel"
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
# ids:
|
||||
# - infisical
|
||||
# dir: "{{ dir .ArtifactPath }}"
|
||||
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
|
||||
|
||||
brews:
|
||||
- name: infisical
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
- name: "infisical@{{.Version}}"
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
scoop:
|
||||
bucket:
|
||||
owner: Infisical
|
||||
name: scoop-infisical
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
license: MIT
|
||||
|
||||
winget:
|
||||
- name: infisical
|
||||
publisher: infisical
|
||||
license: MIT
|
||||
homepage: https://infisical.com
|
||||
short_description: "The official Infisical CLI"
|
||||
repository:
|
||||
owner: infisical
|
||||
name: winget-pkgs
|
||||
branch: "infisical-{{.Version}}"
|
||||
pull_request:
|
||||
enabled: true
|
||||
draft: false
|
||||
base:
|
||||
owner: microsoft
|
||||
name: winget-pkgs
|
||||
branch: master
|
||||
|
||||
aurs:
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
# license
|
||||
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
|
||||
# completions
|
||||
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
@@ -34,6 +34,8 @@ ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
@@ -209,6 +211,11 @@ EXPOSE 443
|
||||
RUN grep -v 'import "./lib/telemetry/instrumentation.mjs";' dist/main.mjs > dist/main.mjs.tmp && \
|
||||
mv dist/main.mjs.tmp dist/main.mjs
|
||||
|
||||
# The OpenSSL library is installed in different locations in different architectures (x86_64 and arm64).
|
||||
# This is a workaround to avoid errors when the library is not found.
|
||||
RUN ln -sf /usr/local/lib64/ossl-modules /usr/local/lib/ossl-modules || \
|
||||
ln -sf /usr/local/lib/ossl-modules /usr/local/lib64/ossl-modules
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
@@ -55,6 +55,8 @@ USER non-root-user
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
@@ -84,6 +86,8 @@ RUN npm run build
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
@@ -112,6 +116,11 @@ RUN mkdir frontend-build
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
@@ -171,6 +180,7 @@ ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
|
84
backend/package-lock.json
generated
84
backend/package-lock.json
generated
@@ -33,7 +33,7 @@
|
||||
"@gitbeaker/rest": "^42.5.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||
@@ -61,7 +61,7 @@
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.6.7",
|
||||
"axios": "^1.11.0",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
@@ -9573,20 +9573,20 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.1.0.tgz",
|
||||
"integrity": "sha512-t3cJnZ4aC7HhPZ6MGylGZULvUtBOZ6FzuUndaHGXjmIZHXnLfC/7L8a57O9Q9V7AxJGKAiRM5zu2wNm9EsvQpw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/debug": "^4.1.12",
|
||||
"@types/qs": "^6.9.11",
|
||||
"@types/qs": "^6.9.18",
|
||||
"@types/xml-encryption": "^1.2.4",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"debug": "^4.3.4",
|
||||
"xml-crypto": "^6.0.1",
|
||||
"xml-encryption": "^3.0.2",
|
||||
"debug": "^4.4.0",
|
||||
"xml-crypto": "^6.1.2",
|
||||
"xml-encryption": "^3.1.0",
|
||||
"xml2js": "^0.6.2",
|
||||
"xmlbuilder": "^15.1.1",
|
||||
"xpath": "^0.0.34"
|
||||
@@ -9596,9 +9596,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml/node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
@@ -9635,14 +9635,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/passport-saml": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.1.0.tgz",
|
||||
"integrity": "sha512-pBm+iFjv9eihcgeJuSUs4c0AuX1QEFdHwP8w1iaWCfDzXdeWZxUBU5HT2bY2S4dvNutcy+A9hYsH7ZLBGtgwDg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@node-saml/node-saml": "^5.0.1",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/passport": "^1.0.16",
|
||||
"@node-saml/node-saml": "^5.1.0",
|
||||
"@types/express": "^4.17.23",
|
||||
"@types/passport": "^1.0.17",
|
||||
"@types/passport-strategy": "^0.2.38",
|
||||
"passport": "^0.7.0",
|
||||
"passport-strategy": "^1.0.0"
|
||||
@@ -13350,9 +13350,10 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/express": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
|
||||
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz",
|
||||
"integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/body-parser": "*",
|
||||
"@types/express-serve-static-core": "^4.17.33",
|
||||
@@ -13522,9 +13523,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/passport": {
|
||||
"version": "1.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.16.tgz",
|
||||
"integrity": "sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A==",
|
||||
"version": "1.0.17",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.17.tgz",
|
||||
"integrity": "sha512-aciLyx+wDwT2t2/kJGJR2AEeBz0nJU4WuRX04Wu9Dqc5lSUtwu0WERPHYsLhF9PtseiAMPBGNUOtFjxZ56prsg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/express": "*"
|
||||
}
|
||||
@@ -13699,14 +13701,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/request/node_modules/form-data": {
|
||||
"version": "2.5.2",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.2.tgz",
|
||||
"integrity": "sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==",
|
||||
"version": "2.5.5",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
|
||||
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.6",
|
||||
"mime-types": "^2.1.12",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.35",
|
||||
"safe-buffer": "^5.2.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -15230,13 +15234,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.9",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||
"version": "1.11.0",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
|
||||
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"form-data": "^4.0.4",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -18761,13 +18765,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
|
||||
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
@@ -31948,9 +31954,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/xml-crypto": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
|
||||
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.1.2.tgz",
|
||||
"integrity": "sha512-leBOVQdVi8FvPJrMYoum7Ici9qyxfE4kVi+AkpUoYCSXaQF4IlBm1cneTK9oAxR61LpYxTx7lNcsnBIeRpGW2w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
|
@@ -153,7 +153,7 @@
|
||||
"@gitbeaker/rest": "^42.5.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||
@@ -181,7 +181,7 @@
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
"aws-sdk": "^2.1553.0",
|
||||
"axios": "^1.6.7",
|
||||
"axios": "^1.11.0",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"botbuilder": "^4.23.2",
|
||||
|
21
backend/src/@types/knex.d.ts
vendored
21
backend/src/@types/knex.d.ts
vendored
@@ -489,6 +489,11 @@ import {
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TAccessApprovalPoliciesEnvironments,
|
||||
TAccessApprovalPoliciesEnvironmentsInsert,
|
||||
TAccessApprovalPoliciesEnvironmentsUpdate
|
||||
} from "@app/db/schemas/access-approval-policies-environments";
|
||||
import {
|
||||
TIdentityLdapAuths,
|
||||
TIdentityLdapAuthsInsert,
|
||||
@@ -510,6 +515,11 @@ import {
|
||||
TRemindersRecipientsInsert,
|
||||
TRemindersRecipientsUpdate
|
||||
} from "@app/db/schemas/reminders-recipients";
|
||||
import {
|
||||
TSecretApprovalPoliciesEnvironments,
|
||||
TSecretApprovalPoliciesEnvironmentsInsert,
|
||||
TSecretApprovalPoliciesEnvironmentsUpdate
|
||||
} from "@app/db/schemas/secret-approval-policies-environments";
|
||||
import {
|
||||
TSecretReminderRecipients,
|
||||
TSecretReminderRecipientsInsert,
|
||||
@@ -887,6 +897,12 @@ declare module "knex/types/tables" {
|
||||
TAccessApprovalPoliciesBypassersUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalPoliciesEnvironments,
|
||||
TAccessApprovalPoliciesEnvironmentsInsert,
|
||||
TAccessApprovalPoliciesEnvironmentsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
@@ -935,6 +951,11 @@ declare module "knex/types/tables" {
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
|
||||
TSecretApprovalPoliciesEnvironments,
|
||||
TSecretApprovalPoliciesEnvironmentsInsert,
|
||||
TSecretApprovalPoliciesEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
|
@@ -0,0 +1,96 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment))) {
|
||||
await knex.schema.createTable(TableName.AccessApprovalPolicyEnvironment, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
|
||||
t.uuid("envId").notNullable();
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment);
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["policyId", "envId"]);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
|
||||
|
||||
const existingAccessApprovalPolicies = await knex(TableName.AccessApprovalPolicy)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
|
||||
.whereNotNull(`${TableName.AccessApprovalPolicy}.envId`);
|
||||
|
||||
const accessApprovalPolicies = existingAccessApprovalPolicies.map(async (policy) => {
|
||||
await knex(TableName.AccessApprovalPolicyEnvironment).insert({
|
||||
policyId: policy.id,
|
||||
envId: policy.envId
|
||||
});
|
||||
});
|
||||
|
||||
await Promise.all(accessApprovalPolicies);
|
||||
}
|
||||
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment))) {
|
||||
await knex.schema.createTable(TableName.SecretApprovalPolicyEnvironment, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("policyId").notNullable();
|
||||
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
|
||||
t.uuid("envId").notNullable();
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment);
|
||||
t.timestamps(true, true, true);
|
||||
t.unique(["policyId", "envId"]);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
|
||||
|
||||
const existingSecretApprovalPolicies = await knex(TableName.SecretApprovalPolicy)
|
||||
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
|
||||
.whereNotNull(`${TableName.SecretApprovalPolicy}.envId`);
|
||||
|
||||
const secretApprovalPolicies = existingSecretApprovalPolicies.map(async (policy) => {
|
||||
await knex(TableName.SecretApprovalPolicyEnvironment).insert({
|
||||
policyId: policy.id,
|
||||
envId: policy.envId
|
||||
});
|
||||
});
|
||||
|
||||
await Promise.all(secretApprovalPolicies);
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropForeign(["envId"]);
|
||||
|
||||
// Add the new foreign key constraint with ON DELETE SET NULL
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropForeign(["envId"]);
|
||||
|
||||
// Add the new foreign key constraint with ON DELETE SET NULL
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment)) {
|
||||
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyEnvironment);
|
||||
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
|
||||
}
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment)) {
|
||||
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyEnvironment);
|
||||
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropForeign(["envId"]);
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropForeign(["envId"]);
|
||||
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
|
||||
});
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues"))) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.specificType("secretDetectionIgnoreValues", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues")) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("secretDetectionIgnoreValues");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,25 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalPoliciesEnvironmentsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesEnvironments = z.infer<typeof AccessApprovalPoliciesEnvironmentsSchema>;
|
||||
export type TAccessApprovalPoliciesEnvironmentsInsert = Omit<
|
||||
z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TAccessApprovalPoliciesEnvironmentsUpdate = Partial<
|
||||
Omit<z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
|
||||
>;
|
@@ -100,6 +100,7 @@ export enum TableName {
|
||||
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
|
||||
AccessApprovalRequest = "access_approval_requests",
|
||||
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
|
||||
AccessApprovalPolicyEnvironment = "access_approval_policies_environments",
|
||||
SecretApprovalPolicy = "secret_approval_policies",
|
||||
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
|
||||
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
|
||||
@@ -107,6 +108,7 @@ export enum TableName {
|
||||
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
|
||||
SecretApprovalRequestSecret = "secret_approval_requests_secrets",
|
||||
SecretApprovalRequestSecretTag = "secret_approval_request_secret_tags",
|
||||
SecretApprovalPolicyEnvironment = "secret_approval_policies_environments",
|
||||
SecretRotation = "secret_rotations",
|
||||
SecretRotationOutput = "secret_rotation_outputs",
|
||||
SamlConfig = "saml_configs",
|
||||
|
@@ -30,7 +30,8 @@ export const ProjectsSchema = z.object({
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
|
||||
secretSharing: z.boolean().default(true),
|
||||
showSnapshotsLegacy: z.boolean().default(false),
|
||||
defaultProduct: z.string().nullable().optional()
|
||||
defaultProduct: z.string().nullable().optional(),
|
||||
secretDetectionIgnoreValues: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@@ -0,0 +1,25 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretApprovalPoliciesEnvironmentsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPoliciesEnvironments = z.infer<typeof SecretApprovalPoliciesEnvironmentsSchema>;
|
||||
export type TSecretApprovalPoliciesEnvironmentsInsert = Omit<
|
||||
z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TSecretApprovalPoliciesEnvironmentsUpdate = Partial<
|
||||
Omit<z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
|
||||
>;
|
@@ -17,52 +17,66 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectSlug: z.string().trim(),
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().min(1, { message: "Secret path cannot be empty" }).transform(removeTrailingSlash),
|
||||
environment: z.string(),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({
|
||||
type: z.literal(ApproverType.Group),
|
||||
id: z.string(),
|
||||
sequence: z.number().int().default(1)
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal(ApproverType.User),
|
||||
id: z.string().optional(),
|
||||
username: z.string().optional(),
|
||||
sequence: z.number().int().default(1)
|
||||
body: z
|
||||
.object({
|
||||
projectSlug: z.string().trim(),
|
||||
name: z.string().optional(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, { message: "Secret path cannot be empty" })
|
||||
.transform(removeTrailingSlash),
|
||||
environment: z.string().optional(),
|
||||
environments: z.string().array().optional(),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({
|
||||
type: z.literal(ApproverType.Group),
|
||||
id: z.string(),
|
||||
sequence: z.number().int().default(1)
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal(ApproverType.User),
|
||||
id: z.string().optional(),
|
||||
username: z.string().optional(),
|
||||
sequence: z.number().int().default(1)
|
||||
})
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 approvers")
|
||||
.min(1, { message: "At least one approver should be provided" })
|
||||
.refine(
|
||||
// @ts-expect-error this is ok
|
||||
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
|
||||
"Must provide either username or id"
|
||||
),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({
|
||||
type: z.literal(BypasserType.User),
|
||||
id: z.string().optional(),
|
||||
username: z.string().optional()
|
||||
})
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvalsRequired: z
|
||||
.object({
|
||||
numberOfApprovals: z.number().int(),
|
||||
stepNumber: z.number().int()
|
||||
})
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 approvers")
|
||||
.min(1, { message: "At least one approver should be provided" })
|
||||
.refine(
|
||||
// @ts-expect-error this is ok
|
||||
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
|
||||
"Must provide either username or id"
|
||||
),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvalsRequired: z
|
||||
.object({
|
||||
numberOfApprovals: z.number().int(),
|
||||
stepNumber: z.number().int()
|
||||
})
|
||||
.array()
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
.array()
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
})
|
||||
.refine(
|
||||
(val) => Boolean(val.environment) || Boolean(val.environments),
|
||||
"Must provide either environment or environments"
|
||||
),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
@@ -78,7 +92,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
projectSlug: req.body.projectSlug,
|
||||
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
|
||||
name:
|
||||
req.body.name ?? `${req.body.environment || req.body.environments?.join("-").substring(0, 250)}-${nanoid(3)}`,
|
||||
enforcementLevel: req.body.enforcementLevel
|
||||
});
|
||||
return { approval };
|
||||
@@ -211,6 +226,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true),
|
||||
environments: z.array(z.string()).optional(),
|
||||
approvalsRequired: z
|
||||
.object({
|
||||
numberOfApprovals: z.number().int(),
|
||||
|
@@ -17,34 +17,45 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
workspaceId: z.string(),
|
||||
name: z.string().optional(),
|
||||
environment: z.string(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.min(1, { message: "Secret path cannot be empty" })
|
||||
.transform((val) => removeTrailingSlash(val)),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" })
|
||||
.max(100, "Cannot have more than 100 approvers"),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
workspaceId: z.string(),
|
||||
name: z.string().optional(),
|
||||
environment: z.string().optional(),
|
||||
environments: z.string().array().optional(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.min(1, { message: "Secret path cannot be empty" })
|
||||
.transform((val) => removeTrailingSlash(val)),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({
|
||||
type: z.literal(ApproverType.User),
|
||||
id: z.string().optional(),
|
||||
username: z.string().optional()
|
||||
})
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" })
|
||||
.max(100, "Cannot have more than 100 approvers"),
|
||||
bypassers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
|
||||
z.object({
|
||||
type: z.literal(BypasserType.User),
|
||||
id: z.string().optional(),
|
||||
username: z.string().optional()
|
||||
})
|
||||
])
|
||||
.array()
|
||||
.max(100, "Cannot have more than 100 bypassers")
|
||||
.optional(),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
})
|
||||
.refine((data) => data.environment || data.environments, "At least one environment should be provided"),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
@@ -60,7 +71,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectId: req.body.workspaceId,
|
||||
...req.body,
|
||||
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
|
||||
name: req.body.name ?? `${req.body.environment || req.body.environments?.join(",")}-${nanoid(3)}`,
|
||||
enforcementLevel: req.body.enforcementLevel
|
||||
});
|
||||
return { approval };
|
||||
@@ -103,7 +114,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.optional()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : undefined)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
|
||||
allowedSelfApprovals: z.boolean().default(true)
|
||||
allowedSelfApprovals: z.boolean().default(true),
|
||||
environments: z.array(z.string()).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -0,0 +1,16 @@
|
||||
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||
import {
|
||||
CreateGitLabDataSourceSchema,
|
||||
GitLabDataSourceSchema,
|
||||
UpdateGitLabDataSourceSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export const registerGitLabSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretScanningEndpoints({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
server,
|
||||
responseSchema: GitLabDataSourceSchema,
|
||||
createSchema: CreateGitLabDataSourceSchema,
|
||||
updateSchema: UpdateGitLabDataSourceSchema
|
||||
});
|
@@ -1,3 +1,4 @@
|
||||
import { registerGitLabSecretScanningRouter } from "@app/ee/routes/v2/secret-scanning-v2-routers/gitlab-secret-scanning-router";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
|
||||
@@ -10,5 +11,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
|
||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
|
||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter,
|
||||
[SecretScanningDataSource.GitLab]: registerGitLabSecretScanningRouter
|
||||
};
|
||||
|
@@ -4,6 +4,7 @@ import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GitLabDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
@@ -24,7 +25,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceListItemSchema,
|
||||
BitbucketDataSourceListItemSchema
|
||||
BitbucketDataSourceListItemSchema,
|
||||
GitLabDataSourceListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||
|
@@ -26,6 +26,7 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
>,
|
||||
customFilter?: {
|
||||
policyId?: string;
|
||||
envId?: string;
|
||||
},
|
||||
tx?: Knex
|
||||
) => Promise<
|
||||
@@ -55,11 +56,6 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
projectId: string;
|
||||
bypassers: (
|
||||
| {
|
||||
@@ -72,6 +68,11 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
type: BypasserType.Group;
|
||||
}
|
||||
)[];
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}[];
|
||||
}[]
|
||||
>;
|
||||
findById: (
|
||||
@@ -95,11 +96,11 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
environment: {
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
}[];
|
||||
projectId: string;
|
||||
}
|
||||
| undefined
|
||||
@@ -143,6 +144,26 @@ export interface TAccessApprovalPolicyDALFactory
|
||||
}
|
||||
| undefined
|
||||
>;
|
||||
findPolicyByEnvIdAndSecretPath: (
|
||||
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
|
||||
tx?: Knex
|
||||
) => Promise<{
|
||||
name: string;
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
approvals: number;
|
||||
enforcementLevel: string;
|
||||
allowedSelfApprovals: boolean;
|
||||
secretPath: string;
|
||||
deletedAt?: Date | null | undefined;
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}[];
|
||||
projectId: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface TAccessApprovalPolicyServiceFactory {
|
||||
@@ -367,6 +388,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
|
||||
customFilter?: {
|
||||
policyId?: string;
|
||||
envId?: string;
|
||||
}
|
||||
) => {
|
||||
const result = await tx(TableName.AccessApprovalPolicy)
|
||||
@@ -377,7 +399,17 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
void qb.where(`${TableName.AccessApprovalPolicy}.id`, "=", customFilter.policyId);
|
||||
}
|
||||
})
|
||||
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
TableName.AccessApprovalPolicyEnvironment,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.AccessApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
|
||||
.where((qb) => {
|
||||
if (customFilter?.envId) {
|
||||
void qb.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
|
||||
}
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
@@ -404,7 +436,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
.select(tx.ref("bypasserGroupId").withSchema(TableName.AccessApprovalPolicyBypasser))
|
||||
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("environmentId"))
|
||||
.select(tx.ref("projectId").withSchema(TableName.Environment))
|
||||
.select(selectAllTableCols(TableName.AccessApprovalPolicy));
|
||||
|
||||
@@ -448,6 +480,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
sequence: approverSequence,
|
||||
approvalsRequired
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "environmentId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ environmentId: id, envName, envSlug }) => ({
|
||||
id,
|
||||
name: envName,
|
||||
slug: envSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -470,11 +511,6 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
environment: {
|
||||
id: data.envId,
|
||||
name: data.envName,
|
||||
slug: data.envSlug
|
||||
},
|
||||
projectId: data.projectId,
|
||||
...AccessApprovalPoliciesSchema.parse(data)
|
||||
// secretPath: data.secretPath || undefined,
|
||||
@@ -517,6 +553,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
id,
|
||||
type: BypasserType.Group as const
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "environmentId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ environmentId: id, envName, envSlug }) => ({
|
||||
id,
|
||||
name: envName,
|
||||
slug: envSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -545,14 +590,20 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
buildFindFilter(
|
||||
{
|
||||
envId,
|
||||
secretPath
|
||||
},
|
||||
TableName.AccessApprovalPolicy
|
||||
)
|
||||
)
|
||||
.join(
|
||||
TableName.AccessApprovalPolicyEnvironment,
|
||||
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
|
||||
`${TableName.AccessApprovalPolicy}.id`
|
||||
)
|
||||
.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", envId)
|
||||
.orderBy("deletedAt", "desc")
|
||||
.orderByRaw(`"deletedAt" IS NULL`)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
|
||||
.first();
|
||||
|
||||
return result;
|
||||
@@ -561,5 +612,81 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
|
||||
}
|
||||
};
|
||||
|
||||
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById, findLastValidPolicy };
|
||||
const findPolicyByEnvIdAndSecretPath: TAccessApprovalPolicyDALFactory["findPolicyByEnvIdAndSecretPath"] = async (
|
||||
{ envIds, secretPath },
|
||||
tx
|
||||
) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicy)
|
||||
.join(
|
||||
TableName.AccessApprovalPolicyEnvironment,
|
||||
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
|
||||
`${TableName.AccessApprovalPolicy}.id`
|
||||
)
|
||||
.join(
|
||||
TableName.Environment,
|
||||
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
|
||||
`${TableName.Environment}.id`
|
||||
)
|
||||
.where(
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
buildFindFilter(
|
||||
{
|
||||
$in: {
|
||||
envId: envIds
|
||||
}
|
||||
},
|
||||
TableName.AccessApprovalPolicyEnvironment
|
||||
)
|
||||
)
|
||||
.where(
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
buildFindFilter(
|
||||
{
|
||||
secretPath
|
||||
},
|
||||
TableName.AccessApprovalPolicy
|
||||
)
|
||||
)
|
||||
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
|
||||
.orderBy("deletedAt", "desc")
|
||||
.orderByRaw(`"deletedAt" IS NULL`)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
|
||||
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
|
||||
.select(db.ref("projectId").withSchema(TableName.Environment));
|
||||
const formattedDocs = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
projectId: data.projectId,
|
||||
...AccessApprovalPoliciesSchema.parse(data)
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "environmentId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ environmentId: id, envName, envSlug }) => ({
|
||||
id,
|
||||
name: envName,
|
||||
slug: envSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
return formattedDocs?.[0];
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...accessApprovalPolicyOrm,
|
||||
find,
|
||||
findById,
|
||||
softDeleteById,
|
||||
findLastValidPolicy,
|
||||
findPolicyByEnvIdAndSecretPath
|
||||
};
|
||||
};
|
||||
|
@@ -0,0 +1,32 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TAccessApprovalPolicyEnvironmentDALFactory = ReturnType<typeof accessApprovalPolicyEnvironmentDALFactory>;
|
||||
|
||||
export const accessApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
|
||||
const accessApprovalPolicyEnvironmentOrm = ormify(db, TableName.AccessApprovalPolicyEnvironment);
|
||||
|
||||
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicyEnvironment)
|
||||
.join(
|
||||
TableName.AccessApprovalPolicy,
|
||||
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
|
||||
`${TableName.AccessApprovalPolicy}.id`
|
||||
)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ envId }, TableName.AccessApprovalPolicyEnvironment))
|
||||
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalPolicyEnvironment));
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...accessApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
|
||||
};
|
@@ -21,6 +21,7 @@ import {
|
||||
TAccessApprovalPolicyBypasserDALFactory
|
||||
} from "./access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
||||
import { TAccessApprovalPolicyEnvironmentDALFactory } from "./access-approval-policy-environment-dal";
|
||||
import {
|
||||
ApproverType,
|
||||
BypasserType,
|
||||
@@ -45,12 +46,14 @@ type TAccessApprovalPolicyServiceFactoryDep = {
|
||||
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
|
||||
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update" | "delete">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find">;
|
||||
accessApprovalPolicyEnvironmentDAL: TAccessApprovalPolicyEnvironmentDALFactory;
|
||||
};
|
||||
|
||||
export const accessApprovalPolicyServiceFactory = ({
|
||||
accessApprovalPolicyDAL,
|
||||
accessApprovalPolicyApproverDAL,
|
||||
accessApprovalPolicyBypasserDAL,
|
||||
accessApprovalPolicyEnvironmentDAL,
|
||||
groupDAL,
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
@@ -63,21 +66,22 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => {
|
||||
const $policyExists = async ({
|
||||
envId,
|
||||
envIds,
|
||||
secretPath,
|
||||
policyId
|
||||
}: {
|
||||
envId: string;
|
||||
envId?: string;
|
||||
envIds?: string[];
|
||||
secretPath: string;
|
||||
policyId?: string;
|
||||
}) => {
|
||||
const policy = await accessApprovalPolicyDAL
|
||||
.findOne({
|
||||
envId,
|
||||
secretPath,
|
||||
deletedAt: null
|
||||
})
|
||||
.catch(() => null);
|
||||
|
||||
if (!envId && !envIds) {
|
||||
throw new BadRequestError({ message: "Must provide either envId or envIds" });
|
||||
}
|
||||
const policy = await accessApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
|
||||
secretPath,
|
||||
envIds: envId ? [envId] : (envIds as string[])
|
||||
});
|
||||
return policyId ? policy && policy.id !== policyId : Boolean(policy);
|
||||
};
|
||||
|
||||
@@ -93,6 +97,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
bypassers,
|
||||
projectSlug,
|
||||
environment,
|
||||
environments,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals,
|
||||
approvalsRequired
|
||||
@@ -125,13 +130,23 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.SecretApproval
|
||||
);
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
|
||||
if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` });
|
||||
const mergedEnvs = (environment ? [environment] : environments) || [];
|
||||
if (mergedEnvs.length === 0) {
|
||||
throw new BadRequestError({ message: "Must provide either environment or environments" });
|
||||
}
|
||||
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId: project.id });
|
||||
if (!envs.length || envs.length !== mergedEnvs.length) {
|
||||
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
|
||||
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
|
||||
}
|
||||
|
||||
if (await $policyExists({ envId: env.id, secretPath })) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
|
||||
});
|
||||
for (const env of envs) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await $policyExists({ envId: env.id, secretPath })) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let approverUserIds = userApprovers;
|
||||
@@ -199,7 +214,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await accessApprovalPolicyDAL.create(
|
||||
{
|
||||
envId: env.id,
|
||||
envId: envs[0].id,
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
@@ -208,6 +223,10 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await accessApprovalPolicyEnvironmentDAL.insertMany(
|
||||
envs.map((el) => ({ policyId: doc.id, envId: el.id })),
|
||||
tx
|
||||
);
|
||||
|
||||
if (approverUserIds.length) {
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
@@ -260,7 +279,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
return doc;
|
||||
});
|
||||
|
||||
return { ...accessApproval, environment: env, projectId: project.id };
|
||||
return { ...accessApproval, environments: envs, projectId: project.id, environment: envs[0] };
|
||||
};
|
||||
|
||||
const getAccessApprovalPolicyByProjectSlug: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyByProjectSlug"] =
|
||||
@@ -279,7 +298,10 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
|
||||
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
|
||||
return accessApprovalPolicies;
|
||||
return accessApprovalPolicies.map((policy) => ({
|
||||
...policy,
|
||||
environment: policy.environments[0]
|
||||
}));
|
||||
};
|
||||
|
||||
const updateAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["updateAccessApprovalPolicy"] = async ({
|
||||
@@ -295,7 +317,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals,
|
||||
approvalsRequired
|
||||
approvalsRequired,
|
||||
environments
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
|
||||
|
||||
@@ -323,16 +346,27 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
}
|
||||
|
||||
let envs = accessApprovalPolicy.environments;
|
||||
if (
|
||||
await $policyExists({
|
||||
envId: accessApprovalPolicy.envId,
|
||||
secretPath: secretPath || accessApprovalPolicy.secretPath,
|
||||
policyId: accessApprovalPolicy.id
|
||||
})
|
||||
environments &&
|
||||
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath}' already exists in environment '${accessApprovalPolicy.environment.slug}'`
|
||||
});
|
||||
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: accessApprovalPolicy.projectId });
|
||||
}
|
||||
|
||||
for (const env of envs) {
|
||||
if (
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await $policyExists({
|
||||
envId: env.id,
|
||||
secretPath: secretPath || accessApprovalPolicy.secretPath,
|
||||
policyId: accessApprovalPolicy.id
|
||||
})
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath || accessApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
@@ -488,6 +522,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
if (environments) {
|
||||
await accessApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyEnvironmentDAL.insertMany(
|
||||
envs.map((env) => ({ policyId: doc.id, envId: env.id })),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await accessApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
|
||||
|
||||
if (bypasserUserIds.length) {
|
||||
@@ -517,7 +559,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
return {
|
||||
...updatedPolicy,
|
||||
environment: accessApprovalPolicy.environment,
|
||||
environments: accessApprovalPolicy.environments,
|
||||
environment: accessApprovalPolicy.environments[0],
|
||||
projectId: accessApprovalPolicy.projectId
|
||||
};
|
||||
};
|
||||
@@ -568,7 +611,10 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
}
|
||||
});
|
||||
|
||||
return policy;
|
||||
return {
|
||||
...policy,
|
||||
environment: policy.environments[0]
|
||||
};
|
||||
};
|
||||
|
||||
const getAccessPolicyCountByEnvSlug: TAccessApprovalPolicyServiceFactory["getAccessPolicyCountByEnvSlug"] = async ({
|
||||
@@ -598,11 +644,13 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
||||
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
|
||||
|
||||
const policies = await accessApprovalPolicyDAL.find({
|
||||
envId: environment.id,
|
||||
projectId: project.id,
|
||||
deletedAt: null
|
||||
});
|
||||
const policies = await accessApprovalPolicyDAL.find(
|
||||
{
|
||||
projectId: project.id,
|
||||
deletedAt: null
|
||||
},
|
||||
{ envId: environment.id }
|
||||
);
|
||||
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
|
||||
|
||||
return { count: policies.length };
|
||||
@@ -634,7 +682,10 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
return policy;
|
||||
return {
|
||||
...policy,
|
||||
environment: policy.environments[0]
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
|
@@ -26,7 +26,8 @@ export enum BypasserType {
|
||||
export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
environment?: string;
|
||||
environments?: string[];
|
||||
approvers: (
|
||||
| { type: ApproverType.Group; id: string; sequence?: number }
|
||||
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
|
||||
@@ -58,6 +59,7 @@ export type TUpdateAccessApprovalPolicy = {
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals: boolean;
|
||||
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
|
||||
environments?: string[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteAccessApprovalPolicy = {
|
||||
@@ -113,6 +115,15 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
slug: string;
|
||||
position: number;
|
||||
};
|
||||
environments: {
|
||||
name: string;
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
projectId: string;
|
||||
slug: string;
|
||||
position: number;
|
||||
}[];
|
||||
projectId: string;
|
||||
name: string;
|
||||
id: string;
|
||||
@@ -153,6 +164,11 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}[];
|
||||
projectId: string;
|
||||
}>;
|
||||
updateAccessApprovalPolicy: ({
|
||||
@@ -168,13 +184,19 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
approvals,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals,
|
||||
approvalsRequired
|
||||
approvalsRequired,
|
||||
environments
|
||||
}: TUpdateAccessApprovalPolicy) => Promise<{
|
||||
environment: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}[];
|
||||
projectId: string;
|
||||
name: string;
|
||||
id: string;
|
||||
@@ -225,6 +247,11 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}[];
|
||||
projectId: string;
|
||||
bypassers: (
|
||||
| {
|
||||
@@ -276,6 +303,11 @@ export interface TAccessApprovalPolicyServiceFactory {
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
environments: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}[];
|
||||
projectId: string;
|
||||
bypassers: (
|
||||
| {
|
||||
|
@@ -65,7 +65,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
|
||||
deletedAt: Date | null | undefined;
|
||||
};
|
||||
projectId: string;
|
||||
environment: string;
|
||||
environments: string[];
|
||||
requestedByUser: {
|
||||
userId: string;
|
||||
email: string | null | undefined;
|
||||
@@ -515,7 +515,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
`accessApprovalReviewerUser.id`
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyEnvironment,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.Environment,
|
||||
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
|
||||
`${TableName.Environment}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(
|
||||
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
|
||||
@@ -683,6 +693,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "environment",
|
||||
label: "environments" as const,
|
||||
mapper: ({ environment }) => environment
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@@ -86,6 +86,25 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
projectMicrosoftTeamsConfigDAL,
|
||||
projectSlackConfigDAL
|
||||
}: TSecretApprovalRequestServiceFactoryDep): TAccessApprovalRequestServiceFactory => {
|
||||
const $getEnvironmentFromPermissions = (permissions: unknown): string | null => {
|
||||
if (!Array.isArray(permissions) || permissions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const firstPermission = permissions[0] as unknown[];
|
||||
if (!Array.isArray(firstPermission) || firstPermission.length < 3) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const metadata = firstPermission[2] as Record<string, unknown>;
|
||||
if (typeof metadata === "object" && metadata !== null && "environment" in metadata) {
|
||||
const env = metadata.environment;
|
||||
return typeof env === "string" ? env : null;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const createAccessApprovalRequest: TAccessApprovalRequestServiceFactory["createAccessApprovalRequest"] = async ({
|
||||
isTemporary,
|
||||
temporaryRange,
|
||||
@@ -308,6 +327,15 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
requests = requests.filter((request) => request.environment === envSlug);
|
||||
}
|
||||
|
||||
requests = requests.map((request) => {
|
||||
const permissionEnvironment = $getEnvironmentFromPermissions(request.permissions);
|
||||
|
||||
if (permissionEnvironment) {
|
||||
request.environmentName = permissionEnvironment;
|
||||
}
|
||||
return request;
|
||||
});
|
||||
|
||||
return { requests };
|
||||
};
|
||||
|
||||
@@ -325,13 +353,27 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` });
|
||||
}
|
||||
|
||||
const { policy, environment } = accessApprovalRequest;
|
||||
const { policy, environments, permissions } = accessApprovalRequest;
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this access request has been deleted."
|
||||
});
|
||||
}
|
||||
|
||||
const permissionEnvironment = $getEnvironmentFromPermissions(permissions);
|
||||
if (
|
||||
!permissionEnvironment ||
|
||||
(!environments.includes(permissionEnvironment) && status === ApprovalStatus.APPROVED)
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `The original policy ${policy.name} is not attached to environment '${permissionEnvironment}'.`
|
||||
});
|
||||
}
|
||||
const environment = await projectEnvDAL.findOne({
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
slug: permissionEnvironment
|
||||
});
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
@@ -553,7 +595,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
requesterEmail: actingUser.email,
|
||||
bypassReason: bypassReason || "No reason provided",
|
||||
secretPath: policy.secretPath || "/",
|
||||
environment,
|
||||
environment: environment?.name || permissionEnvironment,
|
||||
approvalUrl: `${cfg.SITE_URL}/projects/secret-management/${project.id}/approval`,
|
||||
requestType: "access"
|
||||
},
|
||||
|
@@ -5,13 +5,14 @@
|
||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { AxiosError } from "axios";
|
||||
import { CronJob } from "cron";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { verifyOfflineLicense } from "@app/lib/crypto";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@@ -603,10 +604,22 @@ export const licenseServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { data } = await licenseServerCloudApi.request.delete(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
||||
);
|
||||
return data;
|
||||
try {
|
||||
const { data } = await licenseServerCloudApi.request.delete(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
||||
);
|
||||
return data;
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
message: `Failed to remove payment method: ${error.response?.data?.message}`
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: "Unable to remove payment method"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getOrgTaxIds = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgTaxIdDTO) => {
|
||||
|
@@ -579,6 +579,9 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
const hasEmailChanged = email?.toLowerCase() !== membership.email;
|
||||
const defaultEmailVerified =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
@@ -605,8 +608,7 @@ export const scimServiceFactory = ({
|
||||
firstName,
|
||||
email: email?.toLowerCase(),
|
||||
lastName,
|
||||
isEmailVerified:
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
|
||||
isEmailVerified: hasEmailChanged ? defaultEmailVerified : undefined
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -23,6 +23,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
|
||||
customFilter?: {
|
||||
sapId?: string;
|
||||
envId?: string;
|
||||
}
|
||||
) =>
|
||||
tx(TableName.SecretApprovalPolicy)
|
||||
@@ -33,7 +34,17 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
void qb.where(`${TableName.SecretApprovalPolicy}.id`, "=", customFilter.sapId);
|
||||
}
|
||||
})
|
||||
.join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicyEnvironment,
|
||||
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
|
||||
`${TableName.SecretApprovalPolicy}.id`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.SecretApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
|
||||
.where((qb) => {
|
||||
if (customFilter?.envId) {
|
||||
void qb.where(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
|
||||
}
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.SecretApprovalPolicyApprover,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
@@ -97,7 +108,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
.select(
|
||||
tx.ref("name").withSchema(TableName.Environment).as("envName"),
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("envSlug"),
|
||||
tx.ref("id").withSchema(TableName.Environment).as("envId"),
|
||||
tx.ref("id").withSchema(TableName.Environment).as("environmentId"),
|
||||
tx.ref("projectId").withSchema(TableName.Environment)
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
|
||||
@@ -146,6 +157,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
firstName,
|
||||
lastName
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "environmentId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ environmentId, envName, envSlug }) => ({
|
||||
id: environmentId,
|
||||
name: envName,
|
||||
slug: envSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -160,6 +180,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
|
||||
customFilter?: {
|
||||
sapId?: string;
|
||||
envId?: string;
|
||||
},
|
||||
tx?: Knex
|
||||
) => {
|
||||
@@ -221,6 +242,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
mapper: ({ approverGroupUserId: userId }) => ({
|
||||
userId
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "environmentId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ environmentId, envName, envSlug }) => ({
|
||||
id: environmentId,
|
||||
name: envName,
|
||||
slug: envSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -235,5 +265,74 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
return softDeletedPolicy;
|
||||
};
|
||||
|
||||
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById };
|
||||
const findPolicyByEnvIdAndSecretPath = async (
|
||||
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicy)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicyEnvironment,
|
||||
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
|
||||
`${TableName.SecretApprovalPolicy}.id`
|
||||
)
|
||||
.join(
|
||||
TableName.Environment,
|
||||
`${TableName.SecretApprovalPolicyEnvironment}.envId`,
|
||||
`${TableName.Environment}.id`
|
||||
)
|
||||
.where(
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
buildFindFilter(
|
||||
{
|
||||
$in: {
|
||||
envId: envIds
|
||||
}
|
||||
},
|
||||
TableName.SecretApprovalPolicyEnvironment
|
||||
)
|
||||
)
|
||||
.where(
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
buildFindFilter(
|
||||
{
|
||||
secretPath
|
||||
},
|
||||
TableName.SecretApprovalPolicy
|
||||
)
|
||||
)
|
||||
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
|
||||
.orderBy("deletedAt", "desc")
|
||||
.orderByRaw(`"deletedAt" IS NULL`)
|
||||
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
|
||||
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
|
||||
.select(db.ref("projectId").withSchema(TableName.Environment));
|
||||
const formattedDocs = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
projectId: data.projectId,
|
||||
...SecretApprovalPoliciesSchema.parse(data)
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "environmentId",
|
||||
label: "environments" as const,
|
||||
mapper: ({ environmentId: id, envName, envSlug }) => ({
|
||||
id,
|
||||
name: envName,
|
||||
slug: envSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
return formattedDocs?.[0];
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById, findPolicyByEnvIdAndSecretPath };
|
||||
};
|
||||
|
@@ -0,0 +1,32 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TSecretApprovalPolicyEnvironmentDALFactory = ReturnType<typeof secretApprovalPolicyEnvironmentDALFactory>;
|
||||
|
||||
export const secretApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
|
||||
const secretApprovalPolicyEnvironmentOrm = ormify(db, TableName.SecretApprovalPolicyEnvironment);
|
||||
|
||||
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicyEnvironment)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
|
||||
`${TableName.SecretApprovalPolicy}.id`
|
||||
)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter({ envId }, TableName.SecretApprovalPolicyEnvironment))
|
||||
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
|
||||
.select(selectAllTableCols(TableName.SecretApprovalPolicyEnvironment));
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...secretApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
|
||||
};
|
@@ -19,6 +19,7 @@ import {
|
||||
TSecretApprovalPolicyBypasserDALFactory
|
||||
} from "./secret-approval-policy-approver-dal";
|
||||
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
|
||||
import { TSecretApprovalPolicyEnvironmentDALFactory } from "./secret-approval-policy-environment-dal";
|
||||
import {
|
||||
TCreateSapDTO,
|
||||
TDeleteSapDTO,
|
||||
@@ -36,12 +37,13 @@ const getPolicyScore = (policy: { secretPath?: string | null }) =>
|
||||
type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "find">;
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
|
||||
secretApprovalPolicyBypasserDAL: TSecretApprovalPolicyBypasserDALFactory;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "update">;
|
||||
secretApprovalPolicyEnvironmentDAL: TSecretApprovalPolicyEnvironmentDALFactory;
|
||||
};
|
||||
|
||||
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
|
||||
@@ -51,27 +53,30 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
permissionService,
|
||||
secretApprovalPolicyApproverDAL,
|
||||
secretApprovalPolicyBypasserDAL,
|
||||
secretApprovalPolicyEnvironmentDAL,
|
||||
projectEnvDAL,
|
||||
userDAL,
|
||||
licenseService,
|
||||
secretApprovalRequestDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const $policyExists = async ({
|
||||
envIds,
|
||||
envId,
|
||||
secretPath,
|
||||
policyId
|
||||
}: {
|
||||
envId: string;
|
||||
envIds?: string[];
|
||||
envId?: string;
|
||||
secretPath: string;
|
||||
policyId?: string;
|
||||
}) => {
|
||||
const policy = await secretApprovalPolicyDAL
|
||||
.findOne({
|
||||
envId,
|
||||
secretPath,
|
||||
deletedAt: null
|
||||
})
|
||||
.catch(() => null);
|
||||
if (!envIds && !envId) {
|
||||
throw new BadRequestError({ message: "At least one environment should be provided" });
|
||||
}
|
||||
const policy = await secretApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
|
||||
envIds: envId ? [envId] : envIds || [],
|
||||
secretPath
|
||||
});
|
||||
|
||||
return policyId ? policy && policy.id !== policyId : Boolean(policy);
|
||||
};
|
||||
@@ -88,6 +93,7 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
projectId,
|
||||
secretPath,
|
||||
environment,
|
||||
environments,
|
||||
enforcementLevel,
|
||||
allowedSelfApprovals
|
||||
}: TCreateSapDTO) => {
|
||||
@@ -127,17 +133,23 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
|
||||
if (!env) {
|
||||
throw new NotFoundError({
|
||||
message: `Environment with slug '${environment}' not found in project with ID ${projectId}`
|
||||
});
|
||||
const mergedEnvs = (environment ? [environment] : environments) || [];
|
||||
if (mergedEnvs.length === 0) {
|
||||
throw new BadRequestError({ message: "Must provide either environment or environments" });
|
||||
}
|
||||
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId });
|
||||
if (!envs.length || envs.length !== mergedEnvs.length) {
|
||||
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
|
||||
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
|
||||
}
|
||||
|
||||
if (await $policyExists({ envId: env.id, secretPath })) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
|
||||
});
|
||||
for (const env of envs) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await $policyExists({ envId: env.id, secretPath })) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let groupBypassers: string[] = [];
|
||||
@@ -181,7 +193,7 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await secretApprovalPolicyDAL.create(
|
||||
{
|
||||
envId: env.id,
|
||||
envId: envs[0].id,
|
||||
approvals,
|
||||
secretPath,
|
||||
name,
|
||||
@@ -190,6 +202,13 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await secretApprovalPolicyEnvironmentDAL.insertMany(
|
||||
envs.map((env) => ({
|
||||
envId: env.id,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
|
||||
let userApproverIds = userApprovers;
|
||||
if (userApproverNames.length) {
|
||||
@@ -253,12 +272,13 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
return doc;
|
||||
});
|
||||
|
||||
return { ...secretApproval, environment: env, projectId };
|
||||
return { ...secretApproval, environments: envs, projectId, environment: envs[0] };
|
||||
};
|
||||
|
||||
const updateSecretApprovalPolicy = async ({
|
||||
approvers,
|
||||
bypassers,
|
||||
environments,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@@ -288,17 +308,26 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
message: `Secret approval policy with ID '${secretPolicyId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
let envs = secretApprovalPolicy.environments;
|
||||
if (
|
||||
await $policyExists({
|
||||
envId: secretApprovalPolicy.envId,
|
||||
secretPath: secretPath || secretApprovalPolicy.secretPath,
|
||||
policyId: secretApprovalPolicy.id
|
||||
})
|
||||
environments &&
|
||||
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath}' already exists in environment '${secretApprovalPolicy.environment.slug}'`
|
||||
});
|
||||
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: secretApprovalPolicy.projectId });
|
||||
}
|
||||
for (const env of envs) {
|
||||
if (
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await $policyExists({
|
||||
envId: env.id,
|
||||
secretPath: secretPath || secretApprovalPolicy.secretPath,
|
||||
policyId: secretApprovalPolicy.id
|
||||
})
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `A policy for secret path '${secretPath || secretApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
@@ -415,6 +444,17 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
if (environments) {
|
||||
await secretApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
|
||||
await secretApprovalPolicyEnvironmentDAL.insertMany(
|
||||
envs.map((env) => ({
|
||||
envId: env.id,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await secretApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
|
||||
|
||||
if (bypasserUserIds.length) {
|
||||
@@ -441,7 +481,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
return {
|
||||
...updatedSap,
|
||||
environment: secretApprovalPolicy.environment,
|
||||
environments: secretApprovalPolicy.environments,
|
||||
environment: secretApprovalPolicy.environments[0],
|
||||
projectId: secretApprovalPolicy.projectId
|
||||
};
|
||||
};
|
||||
@@ -487,7 +528,12 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
const updatedPolicy = await secretApprovalPolicyDAL.softDeleteById(secretPolicyId, tx);
|
||||
return updatedPolicy;
|
||||
});
|
||||
return { ...deletedPolicy, projectId: sapPolicy.projectId, environment: sapPolicy.environment };
|
||||
return {
|
||||
...deletedPolicy,
|
||||
projectId: sapPolicy.projectId,
|
||||
environments: sapPolicy.environments,
|
||||
environment: sapPolicy.environments[0]
|
||||
};
|
||||
};
|
||||
|
||||
const getSecretApprovalPolicyByProjectId = async ({
|
||||
@@ -520,7 +566,7 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const policies = await secretApprovalPolicyDAL.find({ envId: env.id, deletedAt: null });
|
||||
const policies = await secretApprovalPolicyDAL.find({ deletedAt: null }, { envId: env.id });
|
||||
if (!policies.length) return;
|
||||
// this will filter policies either without scoped to secret path or the one that matches with secret path
|
||||
const policiesFilteredByPath = policies.filter(
|
||||
|
@@ -5,7 +5,8 @@ import { ApproverType, BypasserType } from "../access-approval-policy/access-app
|
||||
export type TCreateSapDTO = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
environment?: string;
|
||||
environments?: string[];
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
|
||||
bypassers?: (
|
||||
| { type: BypasserType.Group; id: string }
|
||||
@@ -29,6 +30,7 @@ export type TUpdateSapDTO = {
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
allowedSelfApprovals?: boolean;
|
||||
environments?: string[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteSapDTO = {
|
||||
|
@@ -40,6 +40,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SecretApprovalRequest}.policyId`,
|
||||
`${TableName.SecretApprovalPolicy}.id`
|
||||
)
|
||||
.leftJoin(TableName.SecretApprovalPolicyEnvironment, (bd) => {
|
||||
bd.on(
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
"=",
|
||||
`${TableName.SecretApprovalPolicyEnvironment}.policyId`
|
||||
).andOn(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", `${TableName.SecretFolder}.envId`);
|
||||
})
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("statusChangedByUser"),
|
||||
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
|
||||
@@ -146,7 +153,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("projectId").withSchema(TableName.Environment),
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
|
||||
tx.ref("envId").withSchema(TableName.SecretApprovalPolicyEnvironment).as("policyEnvId"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
|
@@ -65,10 +65,14 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
|
||||
import {
|
||||
hasSecretReadValueOrDescribePermission,
|
||||
throwIfMissingSecretReadValueOrDescribePermission
|
||||
} from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
||||
import { scanSecretPolicyViolations } from "../secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
|
||||
import { sendApprovalEmailsFn } from "./secret-approval-request-fns";
|
||||
@@ -276,13 +280,19 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
) {
|
||||
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
|
||||
}
|
||||
|
||||
const hasSecretReadAccess = permission.can(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
|
||||
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath: secretPath || "/",
|
||||
secretTags: tags.map((i) => i.slug)
|
||||
});
|
||||
return canRead;
|
||||
};
|
||||
|
||||
let secrets;
|
||||
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
||||
secretApprovalRequest.folderId
|
||||
]);
|
||||
if (shouldUseSecretV2Bridge) {
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
@@ -298,8 +308,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
version: el.version,
|
||||
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
|
||||
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret && el.secret.isRotatedSecret
|
||||
? undefined
|
||||
@@ -314,8 +324,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretKey: el.secret.key,
|
||||
id: el.secret.id,
|
||||
version: el.secret.version,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
||||
@@ -330,8 +344,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretKey: el.secretVersion.key,
|
||||
id: el.secretVersion.id,
|
||||
version: el.secretVersion.version,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secretVersion.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
||||
@@ -349,7 +367,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
secrets = encryptedSecrets.map((el) => ({
|
||||
...el,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
...decryptSecretWithBot(el, botKey),
|
||||
secret: el.secret
|
||||
? {
|
||||
@@ -369,9 +387,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: undefined
|
||||
}));
|
||||
}
|
||||
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
||||
secretApprovalRequest.folderId
|
||||
]);
|
||||
|
||||
return { ...secretApprovalRequest, secretPath: secretPath?.[0]?.path || "/", commits: secrets };
|
||||
};
|
||||
@@ -537,6 +552,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
message: "The policy associated with this secret approval request has been deleted."
|
||||
});
|
||||
}
|
||||
if (!policy.envId) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this secret approval request is not linked to the environment."
|
||||
});
|
||||
}
|
||||
|
||||
const { hasRole } = await permissionService.getProjectPermission({
|
||||
actor: ActorType.USER,
|
||||
@@ -1407,6 +1427,20 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
projectId
|
||||
});
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
await scanSecretPolicyViolations(
|
||||
projectId,
|
||||
secretPath,
|
||||
[
|
||||
...(data[SecretOperations.Create] || []),
|
||||
...(data[SecretOperations.Update] || []).filter((el) => el.secretValue)
|
||||
].map((el) => ({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: el.secretValue as string
|
||||
})),
|
||||
project.secretDetectionIgnoreValues || []
|
||||
);
|
||||
|
||||
// for created secret approval change
|
||||
const createdSecrets = data[SecretOperations.Create];
|
||||
if (createdSecrets && createdSecrets?.length) {
|
||||
|
@@ -21,6 +21,8 @@ const GRAPH_API_BASE = "https://graph.microsoft.com/v1.0";
|
||||
|
||||
type AzureErrorResponse = { error: { message: string } };
|
||||
|
||||
const EXPIRY_PADDING_IN_DAYS = 3;
|
||||
|
||||
const sleep = async () =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 1000);
|
||||
@@ -33,7 +35,8 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
const {
|
||||
connection,
|
||||
parameters: { objectId, clientId: clientIdParam },
|
||||
secretsMapping
|
||||
secretsMapping,
|
||||
rotationInterval
|
||||
} = secretRotation;
|
||||
|
||||
/**
|
||||
@@ -50,7 +53,7 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
)}-${now.getFullYear()}`;
|
||||
|
||||
const endDateTime = new Date();
|
||||
endDateTime.setFullYear(now.getFullYear() + 5);
|
||||
endDateTime.setDate(now.getDate() + rotationInterval * 2 + EXPIRY_PADDING_IN_DAYS); // give 72 hour buffer
|
||||
|
||||
try {
|
||||
const { data } = await request.post<AzureAddPasswordResponse>(
|
||||
@@ -195,6 +198,12 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
callback
|
||||
) => {
|
||||
const credentials = await $rotateClientSecret();
|
||||
|
||||
// 2.5 years as expiry is set to x2 interval for the inactive period of credential
|
||||
if (rotationInterval > Math.floor(365 * 2.5) - EXPIRY_PADDING_IN_DAYS) {
|
||||
throw new BadRequestError({ message: "Azure does not support token duration over 5 years" });
|
||||
}
|
||||
|
||||
return callback(credentials);
|
||||
};
|
||||
|
||||
|
@@ -51,6 +51,7 @@ const baseSecretRotationV2Query = ({
|
||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
|
||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||
db
|
||||
@@ -104,6 +105,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
||||
connectionCreatedAt,
|
||||
connectionUpdatedAt,
|
||||
connectionVersion,
|
||||
connectionGatewayId,
|
||||
connectionIsPlatformManagedCredentials,
|
||||
...el
|
||||
} = secretRotation;
|
||||
@@ -123,6 +125,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
||||
createdAt: connectionCreatedAt,
|
||||
updatedAt: connectionUpdatedAt,
|
||||
version: connectionVersion,
|
||||
gatewayId: connectionGatewayId,
|
||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
||||
},
|
||||
folder: {
|
||||
|
@@ -7,12 +7,13 @@ import {
|
||||
TRotationFactoryRevokeCredentials,
|
||||
TRotationFactoryRotateCredentials
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
executeWithPotentialGateway,
|
||||
SQL_CONNECTION_ALTER_LOGIN_STATEMENT
|
||||
} from "@app/services/app-connection/shared/sql";
|
||||
|
||||
import { generatePassword } from "../utils";
|
||||
import { DEFAULT_PASSWORD_REQUIREMENTS, generatePassword } from "../utils";
|
||||
import {
|
||||
TSqlCredentialsRotationGeneratedCredentials,
|
||||
TSqlCredentialsRotationWithConnection
|
||||
@@ -32,6 +33,11 @@ const redactPasswords = (e: unknown, credentials: TSqlCredentialsRotationGenerat
|
||||
return redactedMessage;
|
||||
};
|
||||
|
||||
const ORACLE_PASSWORD_REQUIREMENTS = {
|
||||
...DEFAULT_PASSWORD_REQUIREMENTS,
|
||||
length: 30
|
||||
};
|
||||
|
||||
export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
TSqlCredentialsRotationWithConnection,
|
||||
TSqlCredentialsRotationGeneratedCredentials
|
||||
@@ -43,6 +49,9 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
secretsMapping
|
||||
} = secretRotation;
|
||||
|
||||
const passwordRequirement =
|
||||
connection.app === AppConnection.OracleDB ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
|
||||
|
||||
const executeOperation = <T>(
|
||||
operation: (client: Knex) => Promise<T>,
|
||||
credentialsOverride?: TSqlCredentialsRotationGeneratedCredentials[number]
|
||||
@@ -65,7 +74,7 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
const $validateCredentials = async (credentials: TSqlCredentialsRotationGeneratedCredentials[number]) => {
|
||||
try {
|
||||
await executeOperation(async (client) => {
|
||||
await client.raw("SELECT 1");
|
||||
await client.raw(connection.app === AppConnection.OracleDB ? `SELECT 1 FROM DUAL` : `Select 1`);
|
||||
}, credentials);
|
||||
} catch (error) {
|
||||
throw new Error(redactPasswords(error, [credentials]));
|
||||
@@ -75,11 +84,13 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
const issueCredentials: TRotationFactoryIssueCredentials<TSqlCredentialsRotationGeneratedCredentials> = async (
|
||||
callback
|
||||
) => {
|
||||
// For SQL, since we get existing users, we change both their passwords
|
||||
// on issue to invalidate their existing passwords
|
||||
// For SQL, since we get existing users, we change both their passwords
|
||||
// on issue to invalidate their existing passwords
|
||||
const credentialsSet = [
|
||||
{ username: username1, password: generatePassword() },
|
||||
{ username: username2, password: generatePassword() }
|
||||
{ username: username1, password: generatePassword(passwordRequirement) },
|
||||
{ username: username2, password: generatePassword(passwordRequirement) }
|
||||
];
|
||||
|
||||
try {
|
||||
@@ -105,7 +116,10 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
credentialsToRevoke,
|
||||
callback
|
||||
) => {
|
||||
const revokedCredentials = credentialsToRevoke.map(({ username }) => ({ username, password: generatePassword() }));
|
||||
const revokedCredentials = credentialsToRevoke.map(({ username }) => ({
|
||||
username,
|
||||
password: generatePassword(passwordRequirement)
|
||||
}));
|
||||
|
||||
try {
|
||||
await executeOperation(async (client) => {
|
||||
@@ -128,7 +142,10 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
callback
|
||||
) => {
|
||||
// generate new password for the next active user
|
||||
const credentials = { username: activeIndex === 0 ? username2 : username1, password: generatePassword() };
|
||||
const credentials = {
|
||||
username: activeIndex === 0 ? username2 : username1,
|
||||
password: generatePassword(passwordRequirement)
|
||||
};
|
||||
|
||||
try {
|
||||
await executeOperation(async (client) => {
|
||||
|
@@ -11,7 +11,7 @@ type TPasswordRequirements = {
|
||||
allowedSymbols?: string;
|
||||
};
|
||||
|
||||
const DEFAULT_PASSWORD_REQUIREMENTS: TPasswordRequirements = {
|
||||
export const DEFAULT_PASSWORD_REQUIREMENTS: TPasswordRequirements = {
|
||||
length: 48,
|
||||
required: {
|
||||
lowercase: 1,
|
||||
|
@@ -18,7 +18,8 @@ import {
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
@@ -302,6 +303,13 @@ export const BitbucketSecretScanningFactory = () => {
|
||||
);
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TBitbucketDataSourceInput["config"],
|
||||
TBitbucketDataSourceWithConnection
|
||||
> = async () => {
|
||||
// no validation required
|
||||
};
|
||||
|
||||
return {
|
||||
initialize,
|
||||
postInitialization,
|
||||
@@ -309,6 +317,7 @@ export const BitbucketSecretScanningFactory = () => {
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
teardown,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
||||
|
@@ -20,7 +20,8 @@ import {
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -64,7 +65,14 @@ export const GitHubSecretScanningFactory = () => {
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
|
||||
// no termination required
|
||||
// no teardown required
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TGitHubDataSourceInput["config"],
|
||||
TGitHubDataSourceWithConnection
|
||||
> = async () => {
|
||||
// no validation required
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||
@@ -238,6 +246,7 @@ export const GitHubSecretScanningFactory = () => {
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
teardown,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
||||
|
@@ -0,0 +1,9 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||
name: "GitLab",
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
connection: AppConnection.GitLab
|
||||
};
|
@@ -0,0 +1,8 @@
|
||||
export enum GitLabDataSourceScope {
|
||||
Project = "project",
|
||||
Group = "group"
|
||||
}
|
||||
|
||||
export enum GitLabWebHookEvent {
|
||||
Push = "Push Hook"
|
||||
}
|
@@ -0,0 +1,409 @@
|
||||
import { Camelize, GitbeakerRequestError, GroupHookSchema, ProjectHookSchema } from "@gitbeaker/rest";
|
||||
import { join } from "path";
|
||||
|
||||
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
SecretScanningFindingSeverity,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
cloneRepository,
|
||||
convertPatchLineToFileLineNumber,
|
||||
replaceNonChangesWithNewlines
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||
TSecretScanningFactoryGetFullScanPath,
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryParams,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||
import {
|
||||
getGitLabConnectionClient,
|
||||
getGitLabInstanceUrl,
|
||||
TGitLabConnection
|
||||
} from "@app/services/app-connection/gitlab";
|
||||
|
||||
import { GitLabDataSourceScope } from "./gitlab-secret-scanning-enums";
|
||||
import {
|
||||
TGitLabDataSourceCredentials,
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabDataSourceWithConnection,
|
||||
TQueueGitLabResourceDiffScan
|
||||
} from "./gitlab-secret-scanning-types";
|
||||
|
||||
const getMainDomain = (instanceUrl: string) => {
|
||||
const url = new URL(instanceUrl);
|
||||
const { hostname } = url;
|
||||
const parts = hostname.split(".");
|
||||
|
||||
if (parts.length >= 2) {
|
||||
return parts.slice(-2).join(".");
|
||||
}
|
||||
|
||||
return hostname;
|
||||
};
|
||||
|
||||
export const GitLabSecretScanningFactory = ({ appConnectionDAL, kmsService }: TSecretScanningFactoryParams) => {
|
||||
const initialize: TSecretScanningFactoryInitialize<
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ payload: { config, name }, connection }, callback) => {
|
||||
const token = alphaNumericNanoId(64);
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
const project = await client.Projects.show(projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||
}
|
||||
|
||||
let hook: Camelize<ProjectHookSchema>;
|
||||
try {
|
||||
hook = await client.ProjectHooks.add(projectId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||
token,
|
||||
pushEvents: true,
|
||||
enableSslVerification: true,
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${name}`
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof GitbeakerRequestError) {
|
||||
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
return await callback({
|
||||
credentials: {
|
||||
token,
|
||||
hookId: hook.id
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hook.id);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// group scope
|
||||
const { groupId } = config;
|
||||
|
||||
const group = await client.Groups.show(groupId);
|
||||
|
||||
if (!group) {
|
||||
throw new BadRequestError({ message: `Could not find group with ID ${groupId}.` });
|
||||
}
|
||||
|
||||
let hook: Camelize<GroupHookSchema>;
|
||||
try {
|
||||
hook = await client.GroupHooks.add(groupId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||
token,
|
||||
pushEvents: true,
|
||||
enableSslVerification: true,
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${name}`
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof GitbeakerRequestError) {
|
||||
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
return await callback({
|
||||
credentials: {
|
||||
token,
|
||||
hookId: hook.id
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hook.id);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ connection, dataSourceId, credentials, payload: { config } }) => {
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
const appCfg = getConfig();
|
||||
|
||||
const hookUrl = `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`;
|
||||
const { hookId } = credentials;
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
|
||||
try {
|
||||
await client.ProjectHooks.edit(projectId, hookId, hookUrl, {
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hookId);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// group-scope
|
||||
const { groupId } = config;
|
||||
|
||||
try {
|
||||
await client.GroupHooks.edit(groupId, hookId, hookUrl, {
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hookId);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitLabDataSourceWithConnection> = async (
|
||||
dataSource
|
||||
) => {
|
||||
const { connection, config } = dataSource;
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
|
||||
const project = await client.Projects.show(projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||
}
|
||||
|
||||
// scott: even though we have this data we want to get potentially updated name
|
||||
return [
|
||||
{
|
||||
name: project.pathWithNamespace,
|
||||
externalId: project.id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
// group-scope
|
||||
|
||||
const { groupId, includeProjects } = config;
|
||||
|
||||
const projects = await client.Groups.allProjects(groupId, {
|
||||
archived: false
|
||||
});
|
||||
|
||||
const filteredProjects: typeof projects = [];
|
||||
if (!includeProjects || includeProjects.includes("*")) {
|
||||
filteredProjects.push(...projects);
|
||||
} else {
|
||||
filteredProjects.push(...projects.filter((project) => includeProjects.includes(project.pathWithNamespace)));
|
||||
}
|
||||
|
||||
return filteredProjects.map(({ id, pathWithNamespace }) => ({
|
||||
name: pathWithNamespace,
|
||||
externalId: id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
}));
|
||||
};
|
||||
|
||||
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitLabDataSourceWithConnection> = async ({
|
||||
dataSource,
|
||||
resourceName,
|
||||
tempFolder
|
||||
}) => {
|
||||
const { connection } = dataSource;
|
||||
|
||||
const instanceUrl = await getGitLabInstanceUrl(connection.credentials.instanceUrl);
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
const user = await client.Users.showCurrentUser();
|
||||
|
||||
const repoPath = join(tempFolder, "repo.git");
|
||||
|
||||
if (!GitLabProjectRegex.test(resourceName)) {
|
||||
throw new Error("Invalid GitLab project name");
|
||||
}
|
||||
|
||||
await cloneRepository({
|
||||
cloneUrl: `https://${user.username}:${connection.credentials.accessToken}@${getMainDomain(instanceUrl)}/${resourceName}.git`,
|
||||
repoPath
|
||||
});
|
||||
|
||||
return repoPath;
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<
|
||||
TGitLabDataSourceWithConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ dataSource: { connection, config }, credentials: { hookId } }) => {
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hookId);
|
||||
} catch (error) {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const { groupId } = config;
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hookId);
|
||||
} catch (error) {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
};
|
||||
|
||||
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
TQueueGitLabResourceDiffScan["payload"]
|
||||
> = ({ project }) => {
|
||||
return {
|
||||
name: project.path_with_namespace,
|
||||
externalId: project.id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
};
|
||||
};
|
||||
|
||||
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
TGitLabDataSourceWithConnection,
|
||||
TQueueGitLabResourceDiffScan["payload"]
|
||||
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||
const { connection } = dataSource;
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
const { commits, project } = payload;
|
||||
|
||||
const allFindings: SecretMatch[] = [];
|
||||
|
||||
for (const commit of commits) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const commitDiffs = await client.Commits.showDiff(project.id, commit.id);
|
||||
|
||||
for (const commitDiff of commitDiffs) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (commitDiff.deletedFile) continue;
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const findings = await scanContentAndGetFindings(
|
||||
replaceNonChangesWithNewlines(`\n${commitDiff.diff}`),
|
||||
configPath
|
||||
);
|
||||
|
||||
const adjustedFindings = findings.map((finding) => {
|
||||
const startLine = convertPatchLineToFileLineNumber(commitDiff.diff, finding.StartLine);
|
||||
const endLine =
|
||||
finding.StartLine === finding.EndLine
|
||||
? startLine
|
||||
: convertPatchLineToFileLineNumber(commitDiff.diff, finding.EndLine);
|
||||
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||
const authorName = commit.author.name;
|
||||
const authorEmail = commit.author.email;
|
||||
|
||||
return {
|
||||
...finding,
|
||||
StartLine: startLine,
|
||||
EndLine: endLine,
|
||||
StartColumn: startColumn,
|
||||
EndColumn: endColumn,
|
||||
File: commitDiff.newPath,
|
||||
Commit: commit.id,
|
||||
Author: authorName,
|
||||
Email: authorEmail,
|
||||
Message: commit.message,
|
||||
Fingerprint: `${commit.id}:${commitDiff.newPath}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||
Date: commit.timestamp,
|
||||
Link: `https://gitlab.com/${resourceName}/blob/${commit.id}/${commitDiff.newPath}#L${startLine}`
|
||||
};
|
||||
});
|
||||
|
||||
allFindings.push(...adjustedFindings);
|
||||
}
|
||||
}
|
||||
|
||||
return allFindings.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: finding.Fingerprint,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TGitLabDataSourceInput["config"],
|
||||
TGitLabDataSourceWithConnection
|
||||
> = async ({ config, dataSource }) => {
|
||||
if (dataSource.config.scope !== config.scope) {
|
||||
throw new BadRequestError({ message: "Cannot change Data Source scope after creation." });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listRawResources,
|
||||
getFullScanPath,
|
||||
initialize,
|
||||
postInitialization,
|
||||
teardown,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
@@ -0,0 +1,101 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningFindingSchema,
|
||||
BaseUpdateSecretScanningDataSourceSchema,
|
||||
GitRepositoryScanFindingDetailsSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GitLabDataSourceConfigSchema = z.discriminatedUnion("scope", [
|
||||
z.object({
|
||||
scope: z.literal(GitLabDataSourceScope.Group).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||
groupId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.groupId),
|
||||
groupName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.groupName),
|
||||
includeProjects: z
|
||||
.array(
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(256)
|
||||
.refine((value) => value === "*" || GitLabProjectRegex.test(value), "Invalid project name format")
|
||||
)
|
||||
.nonempty("One or more projects required")
|
||||
.max(100, "Cannot configure more than 100 projects")
|
||||
.default(["*"])
|
||||
.describe(SecretScanningDataSources.CONFIG.GITLAB.includeProjects)
|
||||
}),
|
||||
z.object({
|
||||
scope: z.literal(GitLabDataSourceScope.Project).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||
projectName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.projectName),
|
||||
projectId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.projectId)
|
||||
})
|
||||
]);
|
||||
|
||||
export const GitLabDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const CreateGitLabDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateGitLabDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitLab)
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema.optional()
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitLabDataSourceListItemSchema = z
|
||||
.object({
|
||||
name: z.literal("GitLab"),
|
||||
connection: z.literal(AppConnection.GitLab),
|
||||
type: z.literal(SecretScanningDataSource.GitLab)
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitLabFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||
resourceType: z.literal(SecretScanningResource.Project),
|
||||
dataSourceType: z.literal(SecretScanningDataSource.GitLab),
|
||||
details: GitRepositoryScanFindingDetailsSchema
|
||||
});
|
||||
|
||||
export const GitLabDataSourceCredentialsSchema = z.object({
|
||||
token: z.string(),
|
||||
hookId: z.number()
|
||||
});
|
@@ -0,0 +1,94 @@
|
||||
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import {
|
||||
TGitLabDataSource,
|
||||
TGitLabDataSourceCredentials,
|
||||
THandleGitLabPushEvent
|
||||
} from "./gitlab-secret-scanning-types";
|
||||
|
||||
export const gitlabSecretScanningService = (
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const handlePushEvent = async ({ payload, token, dataSourceId }: THandleGitLabPushEvent) => {
|
||||
if (!payload.total_commits_count || !payload.project) {
|
||||
logger.warn(
|
||||
`secretScanningV2PushEvent: GitLab - Insufficient data [changes=${
|
||||
payload.total_commits_count ?? 0
|
||||
}] [projectName=${payload.project?.path_with_namespace ?? "unknown"}] [projectId=${payload.project?.id ?? "unknown"}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||
id: dataSourceId,
|
||||
type: SecretScanningDataSource.GitLab
|
||||
})) as TGitLabDataSource | undefined;
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitLab - Could not find data source [dataSourceId=${dataSourceId}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { isAutoScanEnabled, config, encryptedCredentials, projectId } = dataSource;
|
||||
|
||||
if (!encryptedCredentials) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
|
||||
|
||||
const credentials = JSON.parse(decryptedCredentials.toString()) as TGitLabDataSourceCredentials;
|
||||
|
||||
if (token !== credentials.token) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitLab - Invalid webhook token [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isAutoScanEnabled) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
config.scope === GitLabDataSourceScope.Project
|
||||
? config.projectId.toString() === payload.project_id.toString()
|
||||
: config.includeProjects.includes("*") || config.includeProjects.includes(payload.project.path_with_namespace)
|
||||
) {
|
||||
await secretScanningV2Queue.queueResourceDiffScan({
|
||||
dataSourceType: SecretScanningDataSource.GitLab,
|
||||
payload,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
} else {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - ignoring due to repository not being present in config [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
handlePushEvent
|
||||
};
|
||||
};
|
@@ -0,0 +1,97 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TGitLabConnection } from "@app/services/app-connection/gitlab";
|
||||
|
||||
import {
|
||||
CreateGitLabDataSourceSchema,
|
||||
GitLabDataSourceCredentialsSchema,
|
||||
GitLabDataSourceListItemSchema,
|
||||
GitLabDataSourceSchema,
|
||||
GitLabFindingSchema
|
||||
} from "./gitlab-secret-scanning-schemas";
|
||||
|
||||
export type TGitLabDataSource = z.infer<typeof GitLabDataSourceSchema>;
|
||||
|
||||
export type TGitLabDataSourceInput = z.infer<typeof CreateGitLabDataSourceSchema>;
|
||||
|
||||
export type TGitLabDataSourceListItem = z.infer<typeof GitLabDataSourceListItemSchema>;
|
||||
|
||||
export type TGitLabFinding = z.infer<typeof GitLabFindingSchema>;
|
||||
|
||||
export type TGitLabDataSourceWithConnection = TGitLabDataSource & {
|
||||
connection: TGitLabConnection;
|
||||
};
|
||||
|
||||
export type TGitLabDataSourceCredentials = z.infer<typeof GitLabDataSourceCredentialsSchema>;
|
||||
|
||||
export type TGitLabDataSourcePushEventPayload = {
|
||||
object_kind: "push";
|
||||
event_name: "push";
|
||||
before: string;
|
||||
after: string;
|
||||
ref: string;
|
||||
ref_protected: boolean;
|
||||
checkout_sha: string;
|
||||
user_id: number;
|
||||
user_name: string;
|
||||
user_username: string;
|
||||
user_email: string;
|
||||
user_avatar: string;
|
||||
project_id: number;
|
||||
project: {
|
||||
id: number;
|
||||
name: string;
|
||||
description: string;
|
||||
web_url: string;
|
||||
avatar_url: string | null;
|
||||
git_ssh_url: string;
|
||||
git_http_url: string;
|
||||
namespace: string;
|
||||
visibility_level: number;
|
||||
path_with_namespace: string;
|
||||
default_branch: string;
|
||||
homepage: string;
|
||||
url: string;
|
||||
ssh_url: string;
|
||||
http_url: string;
|
||||
};
|
||||
repository: {
|
||||
name: string;
|
||||
url: string;
|
||||
description: string;
|
||||
homepage: string;
|
||||
git_http_url: string;
|
||||
git_ssh_url: string;
|
||||
visibility_level: number;
|
||||
};
|
||||
commits: {
|
||||
id: string;
|
||||
message: string;
|
||||
title: string;
|
||||
timestamp: string;
|
||||
url: string;
|
||||
author: {
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
added: string[];
|
||||
modified: string[];
|
||||
removed: string[];
|
||||
}[];
|
||||
total_commits_count: number;
|
||||
};
|
||||
|
||||
export type THandleGitLabPushEvent = {
|
||||
payload: TGitLabDataSourcePushEventPayload;
|
||||
dataSourceId: string;
|
||||
token: string;
|
||||
};
|
||||
|
||||
export type TQueueGitLabResourceDiffScan = {
|
||||
dataSourceType: SecretScanningDataSource.GitLab;
|
||||
payload: TGitLabDataSourcePushEventPayload;
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
@@ -0,0 +1,3 @@
|
||||
export * from "./gitlab-secret-scanning-constants";
|
||||
export * from "./gitlab-secret-scanning-schemas";
|
||||
export * from "./gitlab-secret-scanning-types";
|
@@ -1,6 +1,7 @@
|
||||
export enum SecretScanningDataSource {
|
||||
GitHub = "github",
|
||||
Bitbucket = "bitbucket"
|
||||
Bitbucket = "bitbucket",
|
||||
GitLab = "gitlab"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanStatus {
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
|
||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||
import { GitLabSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory";
|
||||
|
||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||
import {
|
||||
@@ -19,5 +20,6 @@ type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
||||
|
||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||
[SecretScanningDataSource.GitLab]: GitLabSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
};
|
||||
|
@@ -1,11 +1,22 @@
|
||||
import { AxiosError } from "axios";
|
||||
import { exec } from "child_process";
|
||||
import { join } from "path";
|
||||
import picomatch from "picomatch";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import {
|
||||
createTempFolder,
|
||||
deleteTempFolder,
|
||||
readFindingsFile,
|
||||
writeTextToFile
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
|
||||
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
|
||||
@@ -13,7 +24,8 @@ import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListIte
|
||||
|
||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||
[SecretScanningDataSource.GitLab]: GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretScanningDataSourceOptions = () => {
|
||||
@@ -46,6 +58,19 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
|
||||
});
|
||||
}
|
||||
|
||||
export function scanFile(inputPath: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code === 77) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export const scanGitRepositoryAndGetFindings = async (
|
||||
scanPath: string,
|
||||
findingsPath: string,
|
||||
@@ -140,3 +165,47 @@ export const parseScanErrorMessage = (err: unknown): string => {
|
||||
? errorMessage
|
||||
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
||||
};
|
||||
|
||||
export const scanSecretPolicyViolations = async (
|
||||
projectId: string,
|
||||
secretPath: string,
|
||||
secrets: { secretKey: string; secretValue: string }[],
|
||||
ignoreValues: string[]
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENABLED) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = appCfg.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.find(
|
||||
(el) => el.projectId === projectId && picomatch.isMatch(secretPath, el.secretPath, { strictSlashes: false })
|
||||
);
|
||||
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
try {
|
||||
const scanPromises = secrets
|
||||
.filter((secret) => !ignoreValues.includes(secret.secretValue))
|
||||
.map(async (secret) => {
|
||||
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
||||
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
|
||||
|
||||
try {
|
||||
await scanFile(secretFilePath);
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
|
||||
name: "SecretPolicyViolation"
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(scanPromises);
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
}
|
||||
};
|
||||
|
@@ -3,15 +3,18 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||
[SecretScanningDataSource.GitHub]: "GitHub",
|
||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
|
||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket",
|
||||
[SecretScanningDataSource.GitLab]: "GitLab"
|
||||
};
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
|
||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
|
||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket,
|
||||
[SecretScanningDataSource.GitLab]: AppConnection.GitLab
|
||||
};
|
||||
|
||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
|
||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
|
||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" },
|
||||
[SecretScanningDataSource.GitLab]: { verb: "push", noun: "projects" }
|
||||
};
|
||||
|
@@ -16,6 +16,7 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
@@ -48,6 +49,7 @@ type TSecretRotationV2QueueServiceFactoryDep = {
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
|
||||
};
|
||||
@@ -62,7 +64,8 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
smtpService,
|
||||
kmsService,
|
||||
auditLogService,
|
||||
keyStore
|
||||
keyStore,
|
||||
appConnectionDAL
|
||||
}: TSecretRotationV2QueueServiceFactoryDep) => {
|
||||
const queueDataSourceFullScan = async (
|
||||
dataSource: TSecretScanningDataSourceWithConnection,
|
||||
@@ -71,7 +74,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
try {
|
||||
const { type } = dataSource;
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const rawResources = await factory.listRawResources(dataSource);
|
||||
|
||||
@@ -171,7 +177,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
@@ -329,7 +338,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
dataSourceId,
|
||||
dataSourceType
|
||||
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const resourcePayload = factory.getDiffScanResourcePayload(payload);
|
||||
|
||||
@@ -391,7 +403,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
|
||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
|
||||
|
@@ -46,6 +46,7 @@ import {
|
||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
@@ -53,12 +54,14 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
|
||||
import { gitlabSecretScanningService } from "./gitlab/gitlab-secret-scanning-service";
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||
|
||||
export type TSecretScanningV2ServiceFactoryDep = {
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
secretScanningV2Queue: Pick<
|
||||
@@ -76,6 +79,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
appConnectionService,
|
||||
licenseService,
|
||||
secretScanningV2Queue,
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}: TSecretScanningV2ServiceFactoryDep) => {
|
||||
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
|
||||
@@ -255,7 +259,10 @@ export const secretScanningV2ServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
try {
|
||||
const createdDataSource = await factory.initialize(
|
||||
@@ -363,6 +370,31 @@ export const secretScanningV2ServiceFactory = ({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connectionId) {
|
||||
// validates permission to connect and app is valid for data source
|
||||
connection = await appConnectionService.connectAppConnectionById(
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[dataSource.type],
|
||||
dataSource.connectionId,
|
||||
actor
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
if (payload.config) {
|
||||
await factory.validateConfigUpdate({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
config: payload.config as TSecretScanningDataSourceWithConnection["config"]
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
|
||||
|
||||
@@ -416,7 +448,10 @@ export const secretScanningV2ServiceFactory = ({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) {
|
||||
@@ -903,6 +938,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
findSecretScanningConfigByProjectId,
|
||||
upsertSecretScanningConfig,
|
||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
|
||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService),
|
||||
gitlab: gitlabSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||
};
|
||||
};
|
||||
|
@@ -21,14 +21,25 @@ import {
|
||||
TGitHubFinding,
|
||||
TQueueGitHubResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import {
|
||||
TGitLabDataSource,
|
||||
TGitLabDataSourceCredentials,
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabDataSourceListItem,
|
||||
TGitLabDataSourceWithConnection,
|
||||
TGitLabFinding,
|
||||
TQueueGitLabResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
|
||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource | TGitLabDataSource;
|
||||
|
||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||
lastScannedAt?: Date | null;
|
||||
@@ -52,15 +63,25 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
||||
|
||||
export type TSecretScanningDataSourceWithConnection =
|
||||
| TGitHubDataSourceWithConnection
|
||||
| TBitbucketDataSourceWithConnection;
|
||||
| TBitbucketDataSourceWithConnection
|
||||
| TGitLabDataSourceWithConnection;
|
||||
|
||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
|
||||
export type TSecretScanningDataSourceInput =
|
||||
| TGitHubDataSourceInput
|
||||
| TBitbucketDataSourceInput
|
||||
| TGitLabDataSourceInput;
|
||||
|
||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
|
||||
export type TSecretScanningDataSourceListItem =
|
||||
| TGitHubDataSourceListItem
|
||||
| TBitbucketDataSourceListItem
|
||||
| TGitLabDataSourceListItem;
|
||||
|
||||
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
|
||||
export type TSecretScanningDataSourceCredentials =
|
||||
| TBitbucketDataSourceCredentials
|
||||
| TGitLabDataSourceCredentials
|
||||
| undefined;
|
||||
|
||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
|
||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding | TGitLabFinding;
|
||||
|
||||
export type TListSecretScanningDataSourcesByProjectId = {
|
||||
projectId: string;
|
||||
@@ -112,7 +133,10 @@ export type TQueueSecretScanningDataSourceFullScan = {
|
||||
scanId: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
|
||||
export type TQueueSecretScanningResourceDiffScan =
|
||||
| TQueueGitHubResourceDiffScan
|
||||
| TQueueBitbucketResourceDiffScan
|
||||
| TQueueGitLabResourceDiffScan;
|
||||
|
||||
export type TQueueSecretScanningSendNotification = {
|
||||
dataSource: TSecretScanningDataSources;
|
||||
@@ -170,6 +194,11 @@ export type TSecretScanningFactoryInitialize<
|
||||
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
|
||||
) => Promise<TSecretScanningDataSourceRaw>;
|
||||
|
||||
export type TSecretScanningFactoryValidateConfigUpdate<
|
||||
C extends TSecretScanningDataSourceInput["config"],
|
||||
T extends TSecretScanningDataSourceWithConnection
|
||||
> = (params: { config: C; dataSource: T }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactoryPostInitialization<
|
||||
P extends TSecretScanningDataSourceInput,
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
@@ -181,17 +210,23 @@ export type TSecretScanningFactoryTeardown<
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactoryParams = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSecretScanningFactory<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"],
|
||||
I extends TSecretScanningDataSourceInput,
|
||||
C extends TSecretScanningDataSourceCredentials | undefined = undefined
|
||||
> = () => {
|
||||
> = (params: TSecretScanningFactoryParams) => {
|
||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
|
||||
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
|
||||
teardown: TSecretScanningFactoryTeardown<T, C>;
|
||||
validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<I["config"], T>;
|
||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||
};
|
||||
|
@@ -2,10 +2,12 @@ import { z } from "zod";
|
||||
|
||||
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GitLabDataSourceSchema, GitLabFindingSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
|
||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceSchema,
|
||||
BitbucketDataSourceSchema
|
||||
BitbucketDataSourceSchema,
|
||||
GitLabDataSourceSchema
|
||||
]);
|
||||
|
||||
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
|
||||
@@ -18,5 +20,10 @@ export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType"
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
),
|
||||
GitLabFindingSchema.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
)
|
||||
]);
|
||||
|
@@ -664,6 +664,10 @@ export const ORGANIZATIONS = {
|
||||
organizationId: "The ID of the organization to delete the membership from.",
|
||||
membershipId: "The ID of the membership to delete."
|
||||
},
|
||||
BULK_DELETE_USER_MEMBERSHIPS: {
|
||||
organizationId: "The ID of the organization to delete the memberships from.",
|
||||
membershipIds: "The IDs of the memberships to delete."
|
||||
},
|
||||
LIST_IDENTITY_MEMBERSHIPS: {
|
||||
orgId: "The ID of the organization to get identity memberships from.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
|
||||
@@ -704,7 +708,8 @@ export const PROJECTS = {
|
||||
hasDeleteProtection: "Enable or disable delete protection for the project.",
|
||||
secretSharing: "Enable or disable secret sharing for the project.",
|
||||
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project.",
|
||||
defaultProduct: "The default product in which the project will open"
|
||||
defaultProduct: "The default product in which the project will open",
|
||||
secretDetectionIgnoreValues: "The list of secret values to ignore for secret detection."
|
||||
},
|
||||
GET_KEY: {
|
||||
workspaceId: "The ID of the project to get the key from."
|
||||
@@ -2252,7 +2257,9 @@ export const AppConnections = {
|
||||
AZURE_DEVOPS: {
|
||||
code: "The OAuth code to use to connect with Azure DevOps.",
|
||||
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
|
||||
orgName: "The Organization name to use to connect with Azure DevOps."
|
||||
orgName: "The Organization name to use to connect with Azure DevOps.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
OCI: {
|
||||
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
|
||||
@@ -2295,6 +2302,9 @@ export const AppConnections = {
|
||||
DIGITAL_OCEAN_APP_PLATFORM: {
|
||||
apiToken: "The API token used to authenticate with Digital Ocean App Platform."
|
||||
},
|
||||
NETLIFY: {
|
||||
accessToken: "The Access token used to authenticate with Netlify."
|
||||
},
|
||||
OKTA: {
|
||||
instanceUrl: "The URL used to access your Okta organization.",
|
||||
apiToken: "The API token used to authenticate with Okta."
|
||||
@@ -2399,12 +2409,18 @@ export const SecretSyncs = {
|
||||
env: "The name of the GitHub environment."
|
||||
},
|
||||
AZURE_KEY_VAULT: {
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/",
|
||||
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
AZURE_APP_CONFIGURATION: {
|
||||
configurationUrl:
|
||||
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration."
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration.",
|
||||
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
AZURE_DEVOPS: {
|
||||
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
|
||||
@@ -2520,6 +2536,13 @@ export const SecretSyncs = {
|
||||
workspaceSlug: "The Bitbucket Workspace slug to sync secrets to.",
|
||||
repositorySlug: "The Bitbucket Repository slug to sync secrets to.",
|
||||
environmentId: "The Bitbucket Deployment Environment uuid to sync secrets to."
|
||||
},
|
||||
NETLIFY: {
|
||||
accountId: "The ID of the Netlify account to sync secrets to.",
|
||||
accountName: "The name of the Netlify account to sync secrets to.",
|
||||
siteName: "The name of the Netlify site to sync secrets to.",
|
||||
siteId: "The ID of the Netlify site to sync secrets to.",
|
||||
context: "The Netlify context to sync secrets to."
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -2701,6 +2724,14 @@ export const SecretScanningDataSources = {
|
||||
GITHUB: {
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
},
|
||||
GITLAB: {
|
||||
includeProjects: 'The projects to include when scanning. Defaults to all projects (["*"]).',
|
||||
scope: "The GitLab scope scanning should occur at (project or group level).",
|
||||
projectId: "The ID of the project to scan.",
|
||||
projectName: "The name of the project to scan.",
|
||||
groupId: "The ID of the group to scan projects from.",
|
||||
groupName: "The name of the group to scan projects from."
|
||||
},
|
||||
BITBUCKET: {
|
||||
workspaceSlug: "The workspace to scan.",
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
|
@@ -204,6 +204,17 @@ const envSchema = z
|
||||
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
|
||||
|
||||
// Special Detection Feature
|
||||
PARAMS_FOLDER_SECRET_DETECTION_PATHS: zpStr(
|
||||
z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => {
|
||||
if (!val) return undefined;
|
||||
return JSON.parse(val) as { secretPath: string; projectId: string }[];
|
||||
})
|
||||
),
|
||||
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
@@ -261,10 +272,26 @@ const envSchema = z
|
||||
// gcp app
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
|
||||
|
||||
// azure app
|
||||
// Legacy Single Multi Purpose Azure App Connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// Azure App Configuration App Connection
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// Azure Key Vault App Connection
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// Azure Client Secrets App Connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// Azure DevOps App Connection
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// datadog
|
||||
SHOULD_USE_DATADOG_TRACER: zodStrBool.default("false"),
|
||||
DATADOG_PROFILING_ENABLED: zodStrBool.default("false"),
|
||||
@@ -341,7 +368,24 @@ const envSchema = z
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
|
||||
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
|
||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(","),
|
||||
PARAMS_FOLDER_SECRET_DETECTION_ENABLED: (data.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.length ?? 0) > 0,
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID:
|
||||
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET:
|
||||
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID:
|
||||
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET:
|
||||
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID:
|
||||
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET:
|
||||
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID:
|
||||
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET:
|
||||
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET
|
||||
}));
|
||||
|
||||
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
|
||||
@@ -451,15 +495,54 @@ export const overwriteSchema: {
|
||||
}
|
||||
]
|
||||
},
|
||||
azure: {
|
||||
name: "Azure",
|
||||
azureAppConfiguration: {
|
||||
name: "Azure App Connection: App Configuration",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_ID",
|
||||
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
|
||||
description: "The Application (Client) ID of your Azure application."
|
||||
},
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRET",
|
||||
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET",
|
||||
description: "The Client Secret of your Azure application."
|
||||
}
|
||||
]
|
||||
},
|
||||
azureKeyVault: {
|
||||
name: "Azure App Connection: Key Vault",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
|
||||
description: "The Application (Client) ID of your Azure application."
|
||||
},
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET",
|
||||
description: "The Client Secret of your Azure application."
|
||||
}
|
||||
]
|
||||
},
|
||||
azureClientSecrets: {
|
||||
name: "Azure App Connection: Client Secrets",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
|
||||
description: "The Application (Client) ID of your Azure application."
|
||||
},
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET",
|
||||
description: "The Client Secret of your Azure application."
|
||||
}
|
||||
]
|
||||
},
|
||||
azureDevOps: {
|
||||
name: "Azure App Connection: DevOps",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",
|
||||
description: "The Application (Client) ID of your Azure application."
|
||||
},
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET",
|
||||
description: "The Client Secret of your Azure application."
|
||||
}
|
||||
]
|
||||
|
@@ -11,3 +11,5 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
|
||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||
|
||||
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||
|
||||
export const GitLabProjectRegex = new RE2(/^[a-zA-Z0-9._-]+(?:\/[a-zA-Z0-9._-]+)+$/);
|
||||
|
@@ -14,6 +14,11 @@ export const blockLocalAndPrivateIpAddresses = async (url: string) => {
|
||||
if (appCfg.isDevelopmentMode) return;
|
||||
|
||||
const validUrl = new URL(url);
|
||||
|
||||
if (validUrl.username || validUrl.password) {
|
||||
throw new BadRequestError({ message: "URLs with user credentials (e.g., user:pass@) are not allowed" });
|
||||
}
|
||||
|
||||
const inputHostIps: string[] = [];
|
||||
if (isIPv4(validUrl.hostname)) {
|
||||
inputHostIps.push(validUrl.hostname);
|
||||
|
@@ -4,6 +4,8 @@ import { Probot } from "probot";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
|
||||
import { TGitLabDataSourcePushEventPayload } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { GitLabWebHookEvent } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
@@ -113,4 +115,36 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
|
||||
// gitlab push event webhook
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/gitlab",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const event = req.headers["x-gitlab-event"] as GitLabWebHookEvent;
|
||||
const token = req.headers["x-gitlab-token"] as string;
|
||||
const dataSourceId = req.headers["x-data-source-id"] as string;
|
||||
|
||||
if (event !== GitLabWebHookEvent.Push) {
|
||||
return res.status(400).send({ message: `Event type not supported: ${event as string}` });
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).send({ message: "Unauthorized: Missing token" });
|
||||
}
|
||||
|
||||
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID header is required" });
|
||||
|
||||
await server.services.secretScanningV2.gitlab.handlePushEvent({
|
||||
dataSourceId,
|
||||
payload: req.body as TGitLabDataSourcePushEventPayload,
|
||||
token
|
||||
});
|
||||
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -11,6 +11,7 @@ import {
|
||||
accessApprovalPolicyBypasserDALFactory
|
||||
} from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal";
|
||||
import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal";
|
||||
import { accessApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-environment-dal";
|
||||
import { accessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { accessApprovalRequestDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-dal";
|
||||
import { accessApprovalRequestReviewerDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-reviewer-dal";
|
||||
@@ -76,6 +77,7 @@ import {
|
||||
secretApprovalPolicyBypasserDALFactory
|
||||
} from "@app/ee/services/secret-approval-policy/secret-approval-policy-approver-dal";
|
||||
import { secretApprovalPolicyDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-dal";
|
||||
import { secretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal";
|
||||
import { secretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
|
||||
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
|
||||
@@ -425,9 +427,11 @@ export const registerRoutes = async (
|
||||
const accessApprovalPolicyApproverDAL = accessApprovalPolicyApproverDALFactory(db);
|
||||
const accessApprovalPolicyBypasserDAL = accessApprovalPolicyBypasserDALFactory(db);
|
||||
const accessApprovalRequestReviewerDAL = accessApprovalRequestReviewerDALFactory(db);
|
||||
const accessApprovalPolicyEnvironmentDAL = accessApprovalPolicyEnvironmentDALFactory(db);
|
||||
|
||||
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
|
||||
const sapBypasserDAL = secretApprovalPolicyBypasserDALFactory(db);
|
||||
const sapEnvironmentDAL = secretApprovalPolicyEnvironmentDALFactory(db);
|
||||
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
|
||||
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
|
||||
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
|
||||
@@ -561,6 +565,7 @@ export const registerRoutes = async (
|
||||
projectEnvDAL,
|
||||
secretApprovalPolicyApproverDAL: sapApproverDAL,
|
||||
secretApprovalPolicyBypasserDAL: sapBypasserDAL,
|
||||
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL,
|
||||
permissionService,
|
||||
secretApprovalPolicyDAL,
|
||||
licenseService,
|
||||
@@ -1039,6 +1044,15 @@ export const registerRoutes = async (
|
||||
kmsService
|
||||
});
|
||||
|
||||
const gatewayService = gatewayServiceFactory({
|
||||
permissionService,
|
||||
gatewayDAL,
|
||||
kmsService,
|
||||
licenseService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const secretSyncQueue = secretSyncQueueFactory({
|
||||
queueService,
|
||||
secretSyncDAL,
|
||||
@@ -1062,7 +1076,8 @@ export const registerRoutes = async (
|
||||
secretVersionTagV2BridgeDAL,
|
||||
resourceMetadataDAL,
|
||||
appConnectionDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
gatewayService
|
||||
});
|
||||
|
||||
const secretQueueService = secretQueueFactory({
|
||||
@@ -1156,7 +1171,9 @@ export const registerRoutes = async (
|
||||
keyStore,
|
||||
licenseService,
|
||||
projectDAL,
|
||||
folderDAL
|
||||
folderDAL,
|
||||
accessApprovalPolicyEnvironmentDAL,
|
||||
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL
|
||||
});
|
||||
|
||||
const projectRoleService = projectRoleServiceFactory({
|
||||
@@ -1231,6 +1248,7 @@ export const registerRoutes = async (
|
||||
|
||||
const secretV2BridgeService = secretV2BridgeServiceFactory({
|
||||
folderDAL,
|
||||
projectDAL,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
folderCommitService,
|
||||
secretQueueService,
|
||||
@@ -1317,6 +1335,7 @@ export const registerRoutes = async (
|
||||
accessApprovalPolicyDAL,
|
||||
accessApprovalPolicyApproverDAL,
|
||||
accessApprovalPolicyBypasserDAL,
|
||||
accessApprovalPolicyEnvironmentDAL,
|
||||
groupDAL,
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
@@ -1481,15 +1500,6 @@ export const registerRoutes = async (
|
||||
licenseService
|
||||
});
|
||||
|
||||
const gatewayService = gatewayServiceFactory({
|
||||
permissionService,
|
||||
gatewayDAL,
|
||||
kmsService,
|
||||
licenseService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
@@ -1923,7 +1933,8 @@ export const registerRoutes = async (
|
||||
projectMembershipDAL,
|
||||
smtpService,
|
||||
kmsService,
|
||||
keyStore
|
||||
keyStore,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const secretScanningV2Service = secretScanningV2ServiceFactory({
|
||||
@@ -1932,7 +1943,8 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
secretScanningV2DAL,
|
||||
secretScanningV2Queue,
|
||||
kmsService
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
// setup the communication with license key server
|
||||
|
@@ -93,6 +93,13 @@ export const sapPubSchema = SecretApprovalPoliciesSchema.merge(
|
||||
name: z.string(),
|
||||
slug: z.string()
|
||||
}),
|
||||
environments: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
),
|
||||
projectId: z.string()
|
||||
})
|
||||
);
|
||||
@@ -264,7 +271,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
auditLogsRetentionDays: true,
|
||||
hasDeleteProtection: true,
|
||||
secretSharing: true,
|
||||
showSnapshotsLegacy: true
|
||||
showSnapshotsLegacy: true,
|
||||
secretDetectionIgnoreValues: true
|
||||
});
|
||||
|
||||
export const SanitizedTagSchema = SecretTagsSchema.pick({
|
||||
|
@@ -52,7 +52,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
|
||||
defaultAuthOrgAuthMethod: z.string().nullish(),
|
||||
isSecretScanningDisabled: z.boolean(),
|
||||
kubernetesAutoFetchServiceAccountToken: z.boolean()
|
||||
kubernetesAutoFetchServiceAccountToken: z.boolean(),
|
||||
paramsFolderSecretDetectionEnabled: z.boolean()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -67,7 +68,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
fipsEnabled: crypto.isFipsModeEnabled(),
|
||||
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
|
||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -685,6 +687,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
body: z.object({
|
||||
email: z.string().email().trim().min(1),
|
||||
password: z.string().trim().min(1),
|
||||
|
@@ -75,6 +75,10 @@ import {
|
||||
import { LdapConnectionListItemSchema, SanitizedLdapConnectionSchema } from "@app/services/app-connection/ldap";
|
||||
import { MsSqlConnectionListItemSchema, SanitizedMsSqlConnectionSchema } from "@app/services/app-connection/mssql";
|
||||
import { MySqlConnectionListItemSchema, SanitizedMySqlConnectionSchema } from "@app/services/app-connection/mysql";
|
||||
import {
|
||||
NetlifyConnectionListItemSchema,
|
||||
SanitizedNetlifyConnectionSchema
|
||||
} from "@app/services/app-connection/netlify";
|
||||
import { OktaConnectionListItemSchema, SanitizedOktaConnectionSchema } from "@app/services/app-connection/okta";
|
||||
import {
|
||||
PostgresConnectionListItemSchema,
|
||||
@@ -145,6 +149,7 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedChecklyConnectionSchema.options,
|
||||
...SanitizedSupabaseConnectionSchema.options,
|
||||
...SanitizedDigitalOceanConnectionSchema.options,
|
||||
...SanitizedNetlifyConnectionSchema.options,
|
||||
...SanitizedOktaConnectionSchema.options
|
||||
]);
|
||||
|
||||
@@ -184,6 +189,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
ChecklyConnectionListItemSchema,
|
||||
SupabaseConnectionListItemSchema,
|
||||
DigitalOceanConnectionListItemSchema,
|
||||
NetlifyConnectionListItemSchema,
|
||||
OktaConnectionListItemSchema
|
||||
]);
|
||||
|
||||
|
@@ -26,6 +26,7 @@ import { registerHumanitecConnectionRouter } from "./humanitec-connection-router
|
||||
import { registerLdapConnectionRouter } from "./ldap-connection-router";
|
||||
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
|
||||
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
|
||||
import { registerNetlifyConnectionRouter } from "./netlify-connection-router";
|
||||
import { registerOktaConnectionRouter } from "./okta-connection-router";
|
||||
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
|
||||
import { registerRailwayConnectionRouter } from "./railway-connection-router";
|
||||
@@ -76,5 +77,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.Checkly]: registerChecklyConnectionRouter,
|
||||
[AppConnection.Supabase]: registerSupabaseConnectionRouter,
|
||||
[AppConnection.DigitalOcean]: registerDigitalOceanConnectionRouter,
|
||||
[AppConnection.Netlify]: registerNetlifyConnectionRouter,
|
||||
[AppConnection.Okta]: registerOktaConnectionRouter
|
||||
};
|
||||
|
@@ -0,0 +1,87 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateNetlifyConnectionSchema,
|
||||
SanitizedNetlifyConnectionSchema,
|
||||
UpdateNetlifyConnectionSchema
|
||||
} from "@app/services/app-connection/netlify";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerNetlifyConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.Netlify,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedNetlifyConnectionSchema,
|
||||
createSchema: CreateNetlifyConnectionSchema,
|
||||
updateSchema: UpdateNetlifyConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/accounts`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
accounts: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const accounts = await server.services.appConnection.netlify.listAccounts(connectionId, req.permission);
|
||||
|
||||
return { accounts };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/accounts/:accountId/sites`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid(),
|
||||
accountId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
sites: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId, accountId } = req.params;
|
||||
|
||||
const sites = await server.services.appConnection.netlify.listSites(connectionId, req.permission, accountId);
|
||||
|
||||
return { sites };
|
||||
}
|
||||
});
|
||||
};
|
@@ -369,7 +369,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECTS.UPDATE.slug),
|
||||
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
|
||||
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy),
|
||||
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct)
|
||||
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct),
|
||||
secretDetectionIgnoreValues: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.secretDetectionIgnoreValues)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -392,7 +396,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
hasDeleteProtection: req.body.hasDeleteProtection,
|
||||
slug: req.body.slug,
|
||||
secretSharing: req.body.secretSharing,
|
||||
showSnapshotsLegacy: req.body.showSnapshotsLegacy
|
||||
showSnapshotsLegacy: req.body.showSnapshotsLegacy,
|
||||
secretDetectionIgnoreValues: req.body.secretDetectionIgnoreValues
|
||||
},
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
|
@@ -21,6 +21,7 @@ import { registerGitLabSyncRouter } from "./gitlab-sync-router";
|
||||
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
|
||||
import { registerHerokuSyncRouter } from "./heroku-sync-router";
|
||||
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
|
||||
import { registerNetlifySyncRouter } from "./netlify-sync-router";
|
||||
import { registerRailwaySyncRouter } from "./railway-sync-router";
|
||||
import { registerRenderSyncRouter } from "./render-sync-router";
|
||||
import { registerSupabaseSyncRouter } from "./supabase-sync-router";
|
||||
@@ -61,5 +62,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
||||
[SecretSync.Railway]: registerRailwaySyncRouter,
|
||||
[SecretSync.Checkly]: registerChecklySyncRouter,
|
||||
[SecretSync.DigitalOceanAppPlatform]: registerDigitalOceanAppPlatformSyncRouter,
|
||||
[SecretSync.Netlify]: registerNetlifySyncRouter,
|
||||
[SecretSync.Bitbucket]: registerBitbucketSyncRouter
|
||||
};
|
||||
|
@@ -0,0 +1,17 @@
|
||||
import {
|
||||
CreateNetlifySyncSchema,
|
||||
NetlifySyncSchema,
|
||||
UpdateNetlifySyncSchema
|
||||
} from "@app/services/secret-sync/netlify/netlify-sync-schemas";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
|
||||
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||
|
||||
export const registerNetlifySyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncSecretsEndpoints({
|
||||
destination: SecretSync.Netlify,
|
||||
server,
|
||||
responseSchema: NetlifySyncSchema,
|
||||
createSchema: CreateNetlifySyncSchema,
|
||||
updateSchema: UpdateNetlifySyncSchema
|
||||
});
|
@@ -44,6 +44,7 @@ import { GitLabSyncListItemSchema, GitLabSyncSchema } from "@app/services/secret
|
||||
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
|
||||
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
|
||||
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
|
||||
import { NetlifySyncListItemSchema, NetlifySyncSchema } from "@app/services/secret-sync/netlify";
|
||||
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
|
||||
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
|
||||
import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase";
|
||||
@@ -82,6 +83,7 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
||||
RailwaySyncSchema,
|
||||
ChecklySyncSchema,
|
||||
DigitalOceanAppPlatformSyncSchema,
|
||||
NetlifySyncSchema,
|
||||
BitbucketSyncSchema
|
||||
]);
|
||||
|
||||
@@ -114,6 +116,7 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
RailwaySyncListItemSchema,
|
||||
ChecklySyncListItemSchema,
|
||||
SupabaseSyncListItemSchema,
|
||||
NetlifySyncListItemSchema,
|
||||
BitbucketSyncListItemSchema
|
||||
]);
|
||||
|
||||
|
@@ -264,6 +264,48 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:organizationId/memberships",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Organizations],
|
||||
description: "Bulk delete organization user memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
organizationId: z.string().trim().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.organizationId)
|
||||
}),
|
||||
body: z.object({
|
||||
membershipIds: z.string().trim().array().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.membershipIds)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
memberships: OrgMembershipsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
if (req.auth.actor !== ActorType.USER) return;
|
||||
|
||||
const memberships = await server.services.org.bulkDeleteOrgMemberships({
|
||||
userId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
orgId: req.params.organizationId,
|
||||
membershipIds: req.body.membershipIds,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
return { memberships };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
// TODO: re-think endpoint structure in future so users only need to pass in membershipId bc organizationId is redundant
|
||||
method: "GET",
|
||||
|
@@ -34,6 +34,7 @@ export enum AppConnection {
|
||||
Checkly = "checkly",
|
||||
Supabase = "supabase",
|
||||
DigitalOcean = "digital-ocean",
|
||||
Netlify = "netlify",
|
||||
Okta = "okta"
|
||||
}
|
||||
|
||||
|
@@ -97,6 +97,7 @@ import { getLdapConnectionListItem, LdapConnectionMethod, validateLdapConnection
|
||||
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
|
||||
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
|
||||
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
|
||||
import { getNetlifyConnectionListItem, validateNetlifyConnectionCredentials } from "./netlify";
|
||||
import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta";
|
||||
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
|
||||
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
|
||||
@@ -163,6 +164,7 @@ export const listAppConnectionOptions = () => {
|
||||
getChecklyConnectionListItem(),
|
||||
getSupabaseConnectionListItem(),
|
||||
getDigitalOceanConnectionListItem(),
|
||||
getNetlifyConnectionListItem(),
|
||||
getOktaConnectionListItem()
|
||||
].sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
@@ -251,7 +253,8 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.Checkly]: validateChecklyConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Supabase]: validateSupabaseConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.DigitalOcean]: validateDigitalOceanConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Netlify]: validateNetlifyConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
};
|
||||
|
||||
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService);
|
||||
@@ -381,6 +384,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.Checkly]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Supabase]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.DigitalOcean]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Netlify]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Okta]: platformManagedCredentialsNotSupported
|
||||
};
|
||||
|
||||
|
@@ -36,6 +36,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.Checkly]: "Checkly",
|
||||
[AppConnection.Supabase]: "Supabase",
|
||||
[AppConnection.DigitalOcean]: "DigitalOcean App Platform",
|
||||
[AppConnection.Netlify]: "Netlify",
|
||||
[AppConnection.Okta]: "Okta"
|
||||
};
|
||||
|
||||
@@ -75,5 +76,6 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
||||
[AppConnection.Checkly]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Supabase]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.DigitalOcean]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Netlify]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Okta]: AppConnectionPlanType.Regular
|
||||
};
|
||||
|
@@ -81,6 +81,8 @@ import { humanitecConnectionService } from "./humanitec/humanitec-connection-ser
|
||||
import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
|
||||
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
||||
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
||||
import { ValidateNetlifyConnectionCredentialsSchema } from "./netlify";
|
||||
import { netlifyConnectionService } from "./netlify/netlify-connection-service";
|
||||
import { ValidateOktaConnectionCredentialsSchema } from "./okta";
|
||||
import { oktaConnectionService } from "./okta/okta-connection-service";
|
||||
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
|
||||
@@ -148,6 +150,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.Checkly]: ValidateChecklyConnectionCredentialsSchema,
|
||||
[AppConnection.Supabase]: ValidateSupabaseConnectionCredentialsSchema,
|
||||
[AppConnection.DigitalOcean]: ValidateDigitalOceanConnectionCredentialsSchema,
|
||||
[AppConnection.Netlify]: ValidateNetlifyConnectionCredentialsSchema,
|
||||
[AppConnection.Okta]: ValidateOktaConnectionCredentialsSchema
|
||||
};
|
||||
|
||||
@@ -583,7 +586,7 @@ export const appConnectionServiceFactory = ({
|
||||
deleteAppConnection,
|
||||
connectAppConnectionById,
|
||||
listAvailableAppConnectionsForUser,
|
||||
github: githubConnectionService(connectAppConnectionById),
|
||||
github: githubConnectionService(connectAppConnectionById, gatewayService),
|
||||
githubRadar: githubRadarConnectionService(connectAppConnectionById),
|
||||
gcp: gcpConnectionService(connectAppConnectionById),
|
||||
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
@@ -611,6 +614,7 @@ export const appConnectionServiceFactory = ({
|
||||
checkly: checklyConnectionService(connectAppConnectionById),
|
||||
supabase: supabaseConnectionService(connectAppConnectionById),
|
||||
digitalOcean: digitalOceanAppPlatformConnectionService(connectAppConnectionById),
|
||||
netlify: netlifyConnectionService(connectAppConnectionById),
|
||||
okta: oktaConnectionService(connectAppConnectionById)
|
||||
};
|
||||
};
|
||||
|
@@ -149,6 +149,12 @@ import {
|
||||
} from "./ldap";
|
||||
import { TMsSqlConnection, TMsSqlConnectionInput, TValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
||||
import { TMySqlConnection, TMySqlConnectionInput, TValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
||||
import {
|
||||
TNetlifyConnection,
|
||||
TNetlifyConnectionConfig,
|
||||
TNetlifyConnectionInput,
|
||||
TValidateNetlifyConnectionCredentialsSchema
|
||||
} from "./netlify";
|
||||
import {
|
||||
TOktaConnection,
|
||||
TOktaConnectionConfig,
|
||||
@@ -245,6 +251,7 @@ export type TAppConnection = { id: string } & (
|
||||
| TChecklyConnection
|
||||
| TSupabaseConnection
|
||||
| TDigitalOceanConnection
|
||||
| TNetlifyConnection
|
||||
| TOktaConnection
|
||||
);
|
||||
|
||||
@@ -288,6 +295,7 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TChecklyConnectionInput
|
||||
| TSupabaseConnectionInput
|
||||
| TDigitalOceanConnectionInput
|
||||
| TNetlifyConnectionInput
|
||||
| TOktaConnectionInput
|
||||
);
|
||||
|
||||
@@ -339,6 +347,7 @@ export type TAppConnectionConfig =
|
||||
| TChecklyConnectionConfig
|
||||
| TSupabaseConnectionConfig
|
||||
| TDigitalOceanConnectionConfig
|
||||
| TNetlifyConnectionConfig
|
||||
| TOktaConnectionConfig;
|
||||
|
||||
export type TValidateAppConnectionCredentialsSchema =
|
||||
@@ -377,6 +386,7 @@ export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateChecklyConnectionCredentialsSchema
|
||||
| TValidateSupabaseConnectionCredentialsSchema
|
||||
| TValidateDigitalOceanCredentialsSchema
|
||||
| TValidateNetlifyConnectionCredentialsSchema
|
||||
| TValidateOktaConnectionCredentialsSchema;
|
||||
|
||||
export type TListAwsConnectionKmsKeys = {
|
||||
|
@@ -1,3 +1,4 @@
|
||||
export enum AzureAppConfigurationConnectionMethod {
|
||||
OAuth = "oauth"
|
||||
OAuth = "oauth",
|
||||
ClientSecret = "client-secret"
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
import { AxiosError, AxiosResponse } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -14,13 +15,16 @@ import {
|
||||
} from "./azure-app-configuration-connection-types";
|
||||
|
||||
export const getAzureAppConfigurationConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
|
||||
const { INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID } = getConfig();
|
||||
|
||||
return {
|
||||
name: "Azure App Configuration" as const,
|
||||
app: AppConnection.AzureAppConfiguration as const,
|
||||
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
|
||||
methods: Object.values(AzureAppConfigurationConnectionMethod) as [
|
||||
AzureAppConfigurationConnectionMethod.OAuth,
|
||||
AzureAppConfigurationConnectionMethod.ClientSecret
|
||||
],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
|
||||
};
|
||||
};
|
||||
|
||||
@@ -29,70 +33,117 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
|
||||
) => {
|
||||
const { credentials: inputCredentials, method } = config;
|
||||
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
|
||||
|
||||
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: inputCredentials.code,
|
||||
scope: `openid offline_access https://azconfig.io/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
const {
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
|
||||
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
|
||||
SITE_URL
|
||||
} = getConfig();
|
||||
|
||||
switch (method) {
|
||||
case AzureAppConfigurationConnectionMethod.OAuth:
|
||||
if (
|
||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
|
||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
|
||||
) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: oauthCredentials.code,
|
||||
scope: `openid offline_access https://azconfig.io/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tenantId: inputCredentials.tenantId,
|
||||
tenantId: oauthCredentials.tenantId,
|
||||
accessToken: tokenResp.data.access_token,
|
||||
refreshToken: tokenResp.data.refresh_token,
|
||||
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
||||
};
|
||||
|
||||
case AzureAppConfigurationConnectionMethod.ClientSecret:
|
||||
const { tenantId, clientId, clientSecret } = inputCredentials as {
|
||||
tenantId: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
};
|
||||
|
||||
try {
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://azconfig.io/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
tenantId,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: Date.now() + clientData.expires_in * 1000,
|
||||
clientId,
|
||||
clientSecret
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure connection method: ${method as AzureAppConfigurationConnectionMethod}`
|
||||
message: `Unhandled Azure App Configuration connection method: ${method as AzureAppConfigurationConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -22,6 +22,29 @@ export const AzureAppConfigurationConnectionOAuthOutputCredentialsSchema = z.obj
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const AzureAppConfigurationConnectionClientSecretInputCredentialsSchema = z.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Client ID required")
|
||||
.max(50, "Client ID must be at most 50 characters long"),
|
||||
clientSecret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Client Secret required")
|
||||
.max(50, "Client Secret must be at most 50 characters long"),
|
||||
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
|
||||
});
|
||||
|
||||
export const AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string(),
|
||||
tenantId: z.string(),
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
@@ -30,6 +53,14 @@ export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discri
|
||||
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureAppConfigurationConnectionMethod.ClientSecret)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureAppConfiguration).method),
|
||||
credentials: AzureAppConfigurationConnectionClientSecretInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -39,9 +70,13 @@ export const CreateAzureAppConfigurationConnectionSchema = ValidateAzureAppConfi
|
||||
|
||||
export const UpdateAzureAppConfigurationConnectionSchema = z
|
||||
.object({
|
||||
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials
|
||||
)
|
||||
credentials: z
|
||||
.union([
|
||||
AzureAppConfigurationConnectionOAuthInputCredentialsSchema,
|
||||
AzureAppConfigurationConnectionClientSecretInputCredentialsSchema
|
||||
])
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureAppConfiguration));
|
||||
|
||||
@@ -55,6 +90,10 @@ export const AzureAppConfigurationConnectionSchema = z.intersection(
|
||||
z.object({
|
||||
method: z.literal(AzureAppConfigurationConnectionMethod.OAuth),
|
||||
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
||||
}),
|
||||
z.object({
|
||||
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
|
||||
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
@@ -65,6 +104,13 @@ export const SanitizedAzureAppConfigurationConnectionSchema = z.discriminatedUni
|
||||
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema.pick({
|
||||
tenantId: true
|
||||
})
|
||||
}),
|
||||
BaseAzureAppConfigurationConnectionSchema.extend({
|
||||
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
|
||||
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
tenantId: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema,
|
||||
AzureAppConfigurationConnectionOAuthOutputCredentialsSchema,
|
||||
AzureAppConfigurationConnectionSchema,
|
||||
CreateAzureAppConfigurationConnectionSchema,
|
||||
@@ -39,3 +40,7 @@ export type ExchangeCodeAzureResponse = {
|
||||
export type TAzureAppConfigurationConnectionCredentials = z.infer<
|
||||
typeof AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
||||
>;
|
||||
|
||||
export type TAzureAppConfigurationConnectionClientSecretCredentials = z.infer<
|
||||
typeof AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
|
||||
>;
|
||||
|
@@ -23,7 +23,7 @@ import {
|
||||
} from "./azure-client-secrets-connection-types";
|
||||
|
||||
export const getAzureClientSecretsConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID } = getConfig();
|
||||
|
||||
return {
|
||||
name: "Azure Client Secrets" as const,
|
||||
@@ -32,7 +32,7 @@ export const getAzureClientSecretsConnectionListItem = () => {
|
||||
AzureClientSecretsConnectionMethod.OAuth,
|
||||
AzureClientSecretsConnectionMethod.ClientSecret
|
||||
],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID
|
||||
};
|
||||
};
|
||||
|
||||
@@ -64,7 +64,10 @@ export const getAzureConnectionAccessToken = async (
|
||||
const currentTime = Date.now();
|
||||
switch (appConnection.method) {
|
||||
case AzureClientSecretsConnectionMethod.OAuth:
|
||||
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
|
||||
if (
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `Azure OAuth environment variables have not been configured`
|
||||
});
|
||||
@@ -74,8 +77,8 @@ export const getAzureConnectionAccessToken = async (
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
scope: `openid offline_access https://graph.microsoft.com/.default`,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
|
||||
refresh_token: refreshToken
|
||||
})
|
||||
);
|
||||
@@ -142,7 +145,11 @@ export const getAzureConnectionAccessToken = async (
|
||||
export const validateAzureClientSecretsConnectionCredentials = async (config: TAzureClientSecretsConnectionConfig) => {
|
||||
const { credentials: inputCredentials, method } = config;
|
||||
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
|
||||
const {
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
|
||||
SITE_URL
|
||||
} = getConfig();
|
||||
|
||||
switch (method) {
|
||||
case AzureClientSecretsConnectionMethod.OAuth:
|
||||
@@ -150,7 +157,10 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
|
||||
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
|
||||
}
|
||||
|
||||
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
|
||||
if (
|
||||
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
|
||||
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
|
||||
) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
@@ -166,8 +176,8 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
|
||||
grant_type: "authorization_code",
|
||||
code: inputCredentials.code,
|
||||
scope: `openid offline_access https://graph.microsoft.com/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
client_id: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
|
@@ -1,4 +1,5 @@
|
||||
export enum AzureDevOpsConnectionMethod {
|
||||
OAuth = "oauth",
|
||||
AccessToken = "access-token"
|
||||
AccessToken = "access-token",
|
||||
ClientSecret = "client-secret"
|
||||
}
|
||||
|
@@ -18,21 +18,23 @@ import { AppConnection } from "../app-connection-enums";
|
||||
import { AzureDevOpsConnectionMethod } from "./azure-devops-enums";
|
||||
import {
|
||||
ExchangeCodeAzureResponse,
|
||||
TAzureDevOpsConnectionClientSecretCredentials,
|
||||
TAzureDevOpsConnectionConfig,
|
||||
TAzureDevOpsConnectionCredentials
|
||||
} from "./azure-devops-types";
|
||||
|
||||
export const getAzureDevopsConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
|
||||
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID } = getConfig();
|
||||
|
||||
return {
|
||||
name: "Azure DevOps" as const,
|
||||
app: AppConnection.AzureDevOps as const,
|
||||
methods: Object.values(AzureDevOpsConnectionMethod) as [
|
||||
AzureDevOpsConnectionMethod.OAuth,
|
||||
AzureDevOpsConnectionMethod.AccessToken
|
||||
AzureDevOpsConnectionMethod.AccessToken,
|
||||
AzureDevOpsConnectionMethod.ClientSecret
|
||||
],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
|
||||
};
|
||||
};
|
||||
|
||||
@@ -53,63 +55,110 @@ export const getAzureDevopsConnection = async (
|
||||
});
|
||||
}
|
||||
|
||||
const credentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureDevOpsConnectionCredentials;
|
||||
const currentTime = Date.now();
|
||||
|
||||
// Handle different connection methods
|
||||
switch (appConnection.method) {
|
||||
case AzureDevOpsConnectionMethod.OAuth:
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
|
||||
if (!appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
|
||||
throw new BadRequestError({
|
||||
message: `Azure environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
if (!("refreshToken" in credentials)) {
|
||||
const oauthCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureDevOpsConnectionCredentials;
|
||||
|
||||
if (!("refreshToken" in oauthCredentials)) {
|
||||
throw new BadRequestError({ message: "Invalid OAuth credentials" });
|
||||
}
|
||||
|
||||
const { refreshToken, tenantId } = credentials;
|
||||
const currentTime = Date.now();
|
||||
const { refreshToken, tenantId } = oauthCredentials;
|
||||
|
||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
|
||||
refresh_token: refreshToken
|
||||
})
|
||||
);
|
||||
|
||||
const updatedCredentials = {
|
||||
...credentials,
|
||||
const updatedOAuthCredentials = {
|
||||
...oauthCredentials,
|
||||
accessToken: data.access_token,
|
||||
expiresAt: currentTime + data.expires_in * 1000,
|
||||
refreshToken: data.refresh_token
|
||||
};
|
||||
|
||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedCredentials,
|
||||
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedOAuthCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
|
||||
|
||||
return data.access_token;
|
||||
|
||||
case AzureDevOpsConnectionMethod.AccessToken:
|
||||
if (!("accessToken" in credentials)) {
|
||||
const accessTokenCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as { accessToken: string };
|
||||
|
||||
if (!("accessToken" in accessTokenCredentials)) {
|
||||
throw new BadRequestError({ message: "Invalid API token credentials" });
|
||||
}
|
||||
// For access token, return the basic auth token directly
|
||||
return credentials.accessToken;
|
||||
return accessTokenCredentials.accessToken;
|
||||
|
||||
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||
const clientSecretCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureDevOpsConnectionClientSecretCredentials;
|
||||
|
||||
const { accessToken, expiresAt, clientId, clientSecret, tenantId: clientTenantId } = clientSecretCredentials;
|
||||
|
||||
// Check if token is still valid (with 5 minute buffer)
|
||||
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", clientTenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
const updatedClientCredentials = {
|
||||
...clientSecretCredentials,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: currentTime + clientData.expires_in * 1000
|
||||
};
|
||||
|
||||
const encryptedClientCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedClientCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
|
||||
|
||||
return clientData.access_token;
|
||||
|
||||
default:
|
||||
throw new BadRequestError({ message: `Unsupported connection method` });
|
||||
@@ -119,7 +168,8 @@ export const getAzureDevopsConnection = async (
|
||||
export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDevOpsConnectionConfig) => {
|
||||
const { credentials: inputCredentials, method } = config;
|
||||
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
|
||||
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID, INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET, SITE_URL } =
|
||||
getConfig();
|
||||
|
||||
switch (method) {
|
||||
case AzureDevOpsConnectionMethod.OAuth:
|
||||
@@ -127,7 +177,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
|
||||
}
|
||||
|
||||
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
|
||||
if (!INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
@@ -137,15 +187,15 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
let tokenError: AxiosError | null = null;
|
||||
|
||||
try {
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId: string };
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId: string; orgName: string };
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: oauthCredentials.code,
|
||||
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
client_id: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
@@ -261,9 +311,67 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
});
|
||||
}
|
||||
|
||||
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||
const { tenantId, clientId, clientSecret, orgName } = inputCredentials as {
|
||||
tenantId: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
orgName: string;
|
||||
};
|
||||
|
||||
try {
|
||||
// First, get the access token using client credentials flow
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
// Validate access to the specific organization
|
||||
const response = await request.get(
|
||||
`${IntegrationUrls.AZURE_DEVOPS_API_URL}/${encodeURIComponent(orgName)}/_apis/projects?api-version=7.2-preview.2&$top=1`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${clientData.access_token}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate connection to organization '${orgName}': ${response.status}`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tenantId,
|
||||
clientId,
|
||||
clientSecret,
|
||||
orgName,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: Date.now() + clientData.expires_in * 1000
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to authenticate with Azure DevOps using client credentials: ${
|
||||
(e?.response?.data as { error_description?: string })?.error_description || e.message
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to validate Azure DevOps client credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure connection method: ${method as AzureDevOpsConnectionMethod}`
|
||||
message: `Unhandled Azure DevOps connection method: ${method as AzureDevOpsConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -38,6 +38,42 @@ export const AzureDevOpsConnectionAccessTokenOutputCredentialsSchema = z.object(
|
||||
orgName: z.string()
|
||||
});
|
||||
|
||||
export const AzureDevOpsConnectionClientSecretInputCredentialsSchema = z.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Client ID required")
|
||||
.max(50, "Client ID must be at most 50 characters long")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientId),
|
||||
clientSecret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Client Secret required")
|
||||
.max(50, "Client Secret must be at most 50 characters long")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientSecret),
|
||||
tenantId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Tenant ID required")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.tenantId),
|
||||
orgName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Organization name required")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.orgName)
|
||||
});
|
||||
|
||||
export const AzureDevOpsConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string(),
|
||||
tenantId: z.string(),
|
||||
orgName: z.string(),
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
@@ -54,6 +90,14 @@ export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUni
|
||||
credentials: AzureDevOpsConnectionAccessTokenInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureDevOpsConnectionMethod.ClientSecret)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureDevOps).method),
|
||||
credentials: AzureDevOpsConnectionClientSecretInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -64,7 +108,11 @@ export const CreateAzureDevOpsConnectionSchema = ValidateAzureDevOpsConnectionCr
|
||||
export const UpdateAzureDevOpsConnectionSchema = z
|
||||
.object({
|
||||
credentials: z
|
||||
.union([AzureDevOpsConnectionOAuthInputCredentialsSchema, AzureDevOpsConnectionAccessTokenInputCredentialsSchema])
|
||||
.union([
|
||||
AzureDevOpsConnectionOAuthInputCredentialsSchema,
|
||||
AzureDevOpsConnectionAccessTokenInputCredentialsSchema,
|
||||
AzureDevOpsConnectionClientSecretInputCredentialsSchema
|
||||
])
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.AzureDevOps).credentials)
|
||||
})
|
||||
@@ -84,6 +132,10 @@ export const AzureDevOpsConnectionSchema = z.intersection(
|
||||
z.object({
|
||||
method: z.literal(AzureDevOpsConnectionMethod.AccessToken),
|
||||
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema
|
||||
}),
|
||||
z.object({
|
||||
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
|
||||
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
@@ -101,6 +153,14 @@ export const SanitizedAzureDevOpsConnectionSchema = z.discriminatedUnion("method
|
||||
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema.pick({
|
||||
orgName: true
|
||||
})
|
||||
}),
|
||||
BaseAzureDevOpsConnectionSchema.extend({
|
||||
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
|
||||
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
tenantId: true,
|
||||
orgName: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
|
@@ -52,6 +52,11 @@ const getAuthHeaders = (appConnection: TAzureDevOpsConnection, accessToken: stri
|
||||
Authorization: `Basic ${basicAuthToken}`,
|
||||
Accept: "application/json"
|
||||
};
|
||||
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||
return {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json"
|
||||
};
|
||||
default:
|
||||
throw new BadRequestError({ message: "Unsupported connection method" });
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureDevOpsConnectionClientSecretOutputCredentialsSchema,
|
||||
AzureDevOpsConnectionOAuthOutputCredentialsSchema,
|
||||
AzureDevOpsConnectionSchema,
|
||||
CreateAzureDevOpsConnectionSchema,
|
||||
@@ -27,6 +28,10 @@ export type TAzureDevOpsConnectionConfig = DiscriminativePick<
|
||||
|
||||
export type TAzureDevOpsConnectionCredentials = z.infer<typeof AzureDevOpsConnectionOAuthOutputCredentialsSchema>;
|
||||
|
||||
export type TAzureDevOpsConnectionClientSecretCredentials = z.infer<
|
||||
typeof AzureDevOpsConnectionClientSecretOutputCredentialsSchema
|
||||
>;
|
||||
|
||||
export interface ExchangeCodeAzureResponse {
|
||||
token_type: string;
|
||||
scope: string;
|
||||
|
@@ -1,3 +1,4 @@
|
||||
export enum AzureKeyVaultConnectionMethod {
|
||||
OAuth = "oauth"
|
||||
OAuth = "oauth",
|
||||
ClientSecret = "client-secret"
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
import { AxiosError, AxiosResponse } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -16,22 +17,16 @@ import { AppConnection } from "../app-connection-enums";
|
||||
import { AzureKeyVaultConnectionMethod } from "./azure-key-vault-connection-enums";
|
||||
import {
|
||||
ExchangeCodeAzureResponse,
|
||||
TAzureKeyVaultConnectionClientSecretCredentials,
|
||||
TAzureKeyVaultConnectionConfig,
|
||||
TAzureKeyVaultConnectionCredentials
|
||||
} from "./azure-key-vault-connection-types";
|
||||
|
||||
export const getAzureConnectionAccessToken = async (
|
||||
connectionId: string,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
|
||||
throw new BadRequestError({
|
||||
message: `Azure environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
const appConnection = await appConnectionDAL.findById(connectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -46,129 +41,225 @@ export const getAzureConnectionAccessToken = async (
|
||||
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not a valid Azure connection` });
|
||||
}
|
||||
|
||||
const credentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureKeyVaultConnectionCredentials;
|
||||
const currentTime = Date.now();
|
||||
|
||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
scope: `openid offline_access`,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
refresh_token: credentials.refreshToken
|
||||
})
|
||||
);
|
||||
switch (appConnection.method) {
|
||||
case AzureKeyVaultConnectionMethod.OAuth:
|
||||
const appCfg = getConfig();
|
||||
if (
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `Azure environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
const accessExpiresAt = new Date();
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
const oauthCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureKeyVaultConnectionCredentials;
|
||||
|
||||
const updatedCredentials = {
|
||||
...credentials,
|
||||
accessToken: data.access_token,
|
||||
expiresAt: accessExpiresAt.getTime(),
|
||||
refreshToken: data.refresh_token
|
||||
};
|
||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||
refresh_token: oauthCredentials.refreshToken
|
||||
})
|
||||
);
|
||||
|
||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
const updatedOAuthCredentials = {
|
||||
...oauthCredentials,
|
||||
accessToken: data.access_token,
|
||||
expiresAt: currentTime + data.expires_in * 1000,
|
||||
refreshToken: data.refresh_token
|
||||
};
|
||||
|
||||
await appConnectionDAL.update(
|
||||
{ id: connectionId },
|
||||
{
|
||||
encryptedCredentials
|
||||
}
|
||||
);
|
||||
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedOAuthCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
return {
|
||||
accessToken: data.access_token
|
||||
};
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
|
||||
|
||||
return {
|
||||
accessToken: data.access_token
|
||||
};
|
||||
|
||||
case AzureKeyVaultConnectionMethod.ClientSecret:
|
||||
const clientSecretCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureKeyVaultConnectionClientSecretCredentials;
|
||||
|
||||
const { accessToken, expiresAt, clientId, clientSecret, tenantId } = clientSecretCredentials;
|
||||
|
||||
// Check if token is still valid (with 5 minute buffer)
|
||||
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
|
||||
return { accessToken };
|
||||
}
|
||||
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://vault.azure.net/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
const updatedClientCredentials = {
|
||||
...clientSecretCredentials,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: currentTime + clientData.expires_in * 1000
|
||||
};
|
||||
|
||||
const encryptedClientCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedClientCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
|
||||
|
||||
return { accessToken: clientData.access_token };
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure Key Vault connection method: ${appConnection.method as AzureKeyVaultConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getAzureKeyVaultConnectionListItem = () => {
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
|
||||
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID } = getConfig();
|
||||
|
||||
return {
|
||||
name: "Azure Key Vault" as const,
|
||||
app: AppConnection.AzureKeyVault as const,
|
||||
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
|
||||
methods: Object.values(AzureKeyVaultConnectionMethod) as [
|
||||
AzureKeyVaultConnectionMethod.OAuth,
|
||||
AzureKeyVaultConnectionMethod.ClientSecret
|
||||
],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
|
||||
};
|
||||
};
|
||||
|
||||
export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureKeyVaultConnectionConfig) => {
|
||||
const { credentials: inputCredentials, method } = config;
|
||||
|
||||
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
|
||||
|
||||
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: inputCredentials.code,
|
||||
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
|
||||
getConfig();
|
||||
|
||||
switch (method) {
|
||||
case AzureKeyVaultConnectionMethod.OAuth:
|
||||
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: oauthCredentials.code,
|
||||
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tenantId: inputCredentials.tenantId,
|
||||
tenantId: oauthCredentials.tenantId,
|
||||
accessToken: tokenResp.data.access_token,
|
||||
refreshToken: tokenResp.data.refresh_token,
|
||||
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
||||
};
|
||||
|
||||
case AzureKeyVaultConnectionMethod.ClientSecret:
|
||||
const { tenantId, clientId, clientSecret } = inputCredentials as {
|
||||
tenantId: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
};
|
||||
|
||||
try {
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://vault.azure.net/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
tenantId,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: Date.now() + clientData.expires_in * 1000,
|
||||
clientId,
|
||||
clientSecret
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure connection method: ${method as AzureKeyVaultConnectionMethod}`
|
||||
message: `Unhandled Azure Key Vault connection method: ${method as AzureKeyVaultConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -22,6 +22,29 @@ export const AzureKeyVaultConnectionOAuthOutputCredentialsSchema = z.object({
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const AzureKeyVaultConnectionClientSecretInputCredentialsSchema = z.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Client ID required")
|
||||
.max(50, "Client ID must be at most 50 characters long"),
|
||||
clientSecret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Client Secret required")
|
||||
.max(50, "Client Secret must be at most 50 characters long"),
|
||||
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
|
||||
});
|
||||
|
||||
export const AzureKeyVaultConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string(),
|
||||
tenantId: z.string(),
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
@@ -30,6 +53,14 @@ export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedU
|
||||
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureKeyVaultConnectionMethod.ClientSecret)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureKeyVault).method),
|
||||
credentials: AzureKeyVaultConnectionClientSecretInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -39,9 +70,13 @@ export const CreateAzureKeyVaultConnectionSchema = ValidateAzureKeyVaultConnecti
|
||||
|
||||
export const UpdateAzureKeyVaultConnectionSchema = z
|
||||
.object({
|
||||
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials
|
||||
)
|
||||
credentials: z
|
||||
.union([
|
||||
AzureKeyVaultConnectionOAuthInputCredentialsSchema,
|
||||
AzureKeyVaultConnectionClientSecretInputCredentialsSchema
|
||||
])
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureKeyVault));
|
||||
|
||||
@@ -55,6 +90,10 @@ export const AzureKeyVaultConnectionSchema = z.intersection(
|
||||
z.object({
|
||||
method: z.literal(AzureKeyVaultConnectionMethod.OAuth),
|
||||
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema
|
||||
}),
|
||||
z.object({
|
||||
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
|
||||
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
@@ -65,6 +104,13 @@ export const SanitizedAzureKeyVaultConnectionSchema = z.discriminatedUnion("meth
|
||||
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema.pick({
|
||||
tenantId: true
|
||||
})
|
||||
}),
|
||||
BaseAzureKeyVaultConnectionSchema.extend({
|
||||
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
|
||||
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
tenantId: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureKeyVaultConnectionClientSecretOutputCredentialsSchema,
|
||||
AzureKeyVaultConnectionOAuthOutputCredentialsSchema,
|
||||
AzureKeyVaultConnectionSchema,
|
||||
CreateAzureKeyVaultConnectionSchema,
|
||||
@@ -36,3 +37,7 @@ export type ExchangeCodeAzureResponse = {
|
||||
};
|
||||
|
||||
export type TAzureKeyVaultConnectionCredentials = z.infer<typeof AzureKeyVaultConnectionOAuthOutputCredentialsSchema>;
|
||||
|
||||
export type TAzureKeyVaultConnectionClientSecretCredentials = z.infer<
|
||||
typeof AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
|
||||
>;
|
||||
|
@@ -1,10 +1,16 @@
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { AxiosResponse } from "axios";
|
||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
import https from "https";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { request as httpRequest } from "@app/lib/config/request";
|
||||
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
@@ -24,123 +30,224 @@ export const getGitHubConnectionListItem = () => {
|
||||
};
|
||||
};
|
||||
|
||||
export const getGitHubClient = (appConnection: TGitHubConnection) => {
|
||||
export const requestWithGitHubGateway = async <T>(
|
||||
appConnection: { gatewayId?: string | null },
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
requestConfig: AxiosRequestConfig
|
||||
): Promise<AxiosResponse<T>> => {
|
||||
const { gatewayId } = appConnection;
|
||||
|
||||
// If gateway isn't set up, don't proxy request
|
||||
if (!gatewayId) {
|
||||
return httpRequest.request(requestConfig);
|
||||
}
|
||||
|
||||
const url = new URL(requestConfig.url as string);
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url.toString());
|
||||
|
||||
const [targetHost] = await verifyHostInputValidity(url.host, true);
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
return withGatewayProxy(
|
||||
async (proxyPort) => {
|
||||
const httpsAgent = new https.Agent({
|
||||
servername: targetHost
|
||||
});
|
||||
|
||||
url.protocol = "https:";
|
||||
url.host = `localhost:${proxyPort}`;
|
||||
|
||||
const finalRequestConfig: AxiosRequestConfig = {
|
||||
...requestConfig,
|
||||
url: url.toString(),
|
||||
httpsAgent,
|
||||
headers: {
|
||||
...requestConfig.headers,
|
||||
Host: targetHost
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
return await httpRequest.request(finalRequestConfig);
|
||||
} catch (error) {
|
||||
const axiosError = error as AxiosError;
|
||||
logger.error("Error during GitHub gateway request:", axiosError.message, axiosError.response?.data);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost,
|
||||
targetPort: 443,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const { method, credentials } = appConnection;
|
||||
|
||||
let client: Octokit;
|
||||
|
||||
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
|
||||
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
|
||||
|
||||
switch (method) {
|
||||
case GitHubConnectionMethod.App:
|
||||
if (!appId || !appPrivateKey) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub ${getAppConnectionMethodName(method).replace("GitHub", "")} has not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
client = new Octokit({
|
||||
authStrategy: createAppAuth,
|
||||
auth: {
|
||||
appId,
|
||||
privateKey: appPrivateKey,
|
||||
installationId: credentials.installationId
|
||||
}
|
||||
});
|
||||
break;
|
||||
case GitHubConnectionMethod.OAuth:
|
||||
client = new Octokit({
|
||||
auth: credentials.accessToken
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled GitHub connection method: ${method as GitHubConnectionMethod}`
|
||||
});
|
||||
if (!appId || !appPrivateKey) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub App keys are not configured.`
|
||||
});
|
||||
}
|
||||
|
||||
return client;
|
||||
if (appConnection.method !== GitHubConnectionMethod.App) {
|
||||
throw new InternalServerError({ message: "Cannot generate GitHub App token for non-app connection" });
|
||||
}
|
||||
|
||||
const appAuth = createAppAuth({
|
||||
appId,
|
||||
privateKey: appPrivateKey,
|
||||
installationId: appConnection.credentials.installationId
|
||||
});
|
||||
|
||||
const { token } = await appAuth({ type: "installation" });
|
||||
return token;
|
||||
};
|
||||
|
||||
function extractNextPageUrl(linkHeader: string | undefined): string | null {
|
||||
if (!linkHeader) return null;
|
||||
|
||||
const links = linkHeader.split(",");
|
||||
const nextLink = links.find((link) => link.includes('rel="next"'));
|
||||
|
||||
if (!nextLink) return null;
|
||||
|
||||
const match = new RE2(/<([^>]+)>/).exec(nextLink);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
path: string,
|
||||
dataMapper?: (data: R) => T[]
|
||||
): Promise<T[]> => {
|
||||
const { credentials, method } = appConnection;
|
||||
|
||||
const token =
|
||||
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
||||
let url: string | null = `https://api.${credentials.host || "github.com"}${path}`;
|
||||
let results: T[] = [];
|
||||
let i = 0;
|
||||
|
||||
while (url && i < 1000) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const response: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
|
||||
url,
|
||||
method: "GET",
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${token}`,
|
||||
"X-GitHub-Api-Version": "2022-11-28"
|
||||
}
|
||||
});
|
||||
|
||||
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
|
||||
results = results.concat(items);
|
||||
|
||||
url = extractNextPageUrl(response.headers.link as string | undefined);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return results;
|
||||
};
|
||||
|
||||
type GitHubOrganization = {
|
||||
login: string;
|
||||
id: number;
|
||||
type: string;
|
||||
};
|
||||
|
||||
type GitHubRepository = {
|
||||
id: number;
|
||||
name: string;
|
||||
owner: GitHubOrganization;
|
||||
permissions?: {
|
||||
admin: boolean;
|
||||
maintain: boolean;
|
||||
push: boolean;
|
||||
triage: boolean;
|
||||
pull: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export const getGitHubRepositories = async (appConnection: TGitHubConnection) => {
|
||||
const client = getGitHubClient(appConnection);
|
||||
type GitHubEnvironment = {
|
||||
id: number;
|
||||
name: string;
|
||||
};
|
||||
|
||||
let repositories: GitHubRepository[];
|
||||
|
||||
switch (appConnection.method) {
|
||||
case GitHubConnectionMethod.App:
|
||||
repositories = await client.paginate("GET /installation/repositories");
|
||||
break;
|
||||
case GitHubConnectionMethod.OAuth:
|
||||
default:
|
||||
repositories = (await client.paginate("GET /user/repos")).filter((repo) => repo.permissions?.admin);
|
||||
break;
|
||||
export const getGitHubRepositories = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
if (appConnection.method === GitHubConnectionMethod.App) {
|
||||
return makePaginatedGitHubRequest<GitHubRepository, { repositories: GitHubRepository[] }>(
|
||||
appConnection,
|
||||
gatewayService,
|
||||
"/installation/repositories",
|
||||
(data) => data.repositories
|
||||
);
|
||||
}
|
||||
|
||||
return repositories;
|
||||
const repos = await makePaginatedGitHubRequest<GitHubRepository>(appConnection, gatewayService, "/user/repos");
|
||||
return repos.filter((repo) => repo.permissions?.admin);
|
||||
};
|
||||
|
||||
export const getGitHubOrganizations = async (appConnection: TGitHubConnection) => {
|
||||
const client = getGitHubClient(appConnection);
|
||||
export const getGitHubOrganizations = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
if (appConnection.method === GitHubConnectionMethod.App) {
|
||||
const installationRepositories = await makePaginatedGitHubRequest<
|
||||
GitHubRepository,
|
||||
{ repositories: GitHubRepository[] }
|
||||
>(appConnection, gatewayService, "/installation/repositories", (data) => data.repositories);
|
||||
|
||||
let organizations: GitHubOrganization[];
|
||||
|
||||
switch (appConnection.method) {
|
||||
case GitHubConnectionMethod.App: {
|
||||
const installationRepositories = await client.paginate("GET /installation/repositories");
|
||||
|
||||
const organizationMap: Record<string, GitHubOrganization> = {};
|
||||
|
||||
installationRepositories.forEach((repo) => {
|
||||
if (repo.owner.type === "Organization") {
|
||||
organizationMap[repo.owner.id] = repo.owner;
|
||||
}
|
||||
});
|
||||
|
||||
organizations = Object.values(organizationMap);
|
||||
|
||||
break;
|
||||
}
|
||||
case GitHubConnectionMethod.OAuth:
|
||||
default:
|
||||
organizations = await client.paginate("GET /user/orgs");
|
||||
break;
|
||||
}
|
||||
|
||||
return organizations;
|
||||
};
|
||||
|
||||
export const getGitHubEnvironments = async (appConnection: TGitHubConnection, owner: string, repo: string) => {
|
||||
const client = getGitHubClient(appConnection);
|
||||
|
||||
try {
|
||||
const environments = await client.paginate("GET /repos/{owner}/{repo}/environments", {
|
||||
owner,
|
||||
repo
|
||||
const organizationMap: Record<string, GitHubOrganization> = {};
|
||||
installationRepositories.forEach((repo) => {
|
||||
if (repo.owner.type === "Organization") {
|
||||
organizationMap[repo.owner.id] = repo.owner;
|
||||
}
|
||||
});
|
||||
|
||||
return environments;
|
||||
} catch (e) {
|
||||
// repo doesn't have envs
|
||||
if ((e as { status: number }).status === 404) {
|
||||
return [];
|
||||
}
|
||||
return Object.values(organizationMap);
|
||||
}
|
||||
|
||||
throw e;
|
||||
return makePaginatedGitHubRequest<GitHubOrganization>(appConnection, gatewayService, "/user/orgs");
|
||||
};
|
||||
|
||||
export const getGitHubEnvironments = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
owner: string,
|
||||
repo: string
|
||||
) => {
|
||||
try {
|
||||
return await makePaginatedGitHubRequest<GitHubEnvironment, { environments: GitHubEnvironment[] }>(
|
||||
appConnection,
|
||||
gatewayService,
|
||||
`/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/environments`,
|
||||
(data) => data.environments
|
||||
);
|
||||
} catch (error) {
|
||||
const axiosError = error as AxiosError;
|
||||
if (axiosError.response?.status === 404) return [];
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -159,9 +266,11 @@ export function isGithubErrorResponse(data: GithubTokenRespData): data is Github
|
||||
return "error" in data;
|
||||
}
|
||||
|
||||
export const validateGitHubConnectionCredentials = async (config: TGitHubConnectionConfig) => {
|
||||
export const validateGitHubConnectionCredentials = async (
|
||||
config: TGitHubConnectionConfig,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const { credentials, method } = config;
|
||||
|
||||
const {
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID,
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET,
|
||||
@@ -192,10 +301,13 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<GithubTokenRespData>;
|
||||
const host = credentials.host || "github.com";
|
||||
|
||||
try {
|
||||
tokenResp = await request.get<GithubTokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
params: {
|
||||
tokenResp = await requestWithGitHubGateway<GithubTokenRespData>(config, gatewayService, {
|
||||
url: `https://${host}/login/oauth/access_token`,
|
||||
method: "POST",
|
||||
data: {
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret,
|
||||
code: credentials.code,
|
||||
@@ -203,7 +315,7 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
},
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json"
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
@@ -233,7 +345,7 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
const installationsResp = await request.get<{
|
||||
const installationsResp = await requestWithGitHubGateway<{
|
||||
installations: {
|
||||
id: number;
|
||||
account: {
|
||||
@@ -242,7 +354,8 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
id: number;
|
||||
};
|
||||
}[];
|
||||
}>(IntegrationUrls.GITHUB_USER_INSTALLATIONS, {
|
||||
}>(config, gatewayService, {
|
||||
url: IntegrationUrls.GITHUB_USER_INSTALLATIONS.replace("api.github.com", `api.${host}`),
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${tokenResp.data.access_token}`,
|
||||
|
@@ -11,20 +11,24 @@ import {
|
||||
import { GitHubConnectionMethod } from "./github-connection-enums";
|
||||
|
||||
export const GitHubConnectionOAuthInputCredentialsSchema = z.object({
|
||||
code: z.string().trim().min(1, "OAuth code required")
|
||||
code: z.string().trim().min(1, "OAuth code required"),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const GitHubConnectionAppInputCredentialsSchema = z.object({
|
||||
code: z.string().trim().min(1, "GitHub App code required"),
|
||||
installationId: z.string().min(1, "GitHub App Installation ID required")
|
||||
installationId: z.string().min(1, "GitHub App Installation ID required"),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const GitHubConnectionOAuthOutputCredentialsSchema = z.object({
|
||||
accessToken: z.string()
|
||||
accessToken: z.string(),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const GitHubConnectionAppOutputCredentialsSchema = z.object({
|
||||
installationId: z.string()
|
||||
installationId: z.string(),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
@@ -43,7 +47,9 @@ export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("m
|
||||
]);
|
||||
|
||||
export const CreateGitHubConnectionSchema = ValidateGitHubConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHub)
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHub, {
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateGitHubConnectionSchema = z
|
||||
@@ -53,7 +59,11 @@ export const UpdateGitHubConnectionSchema = z
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.GitHub).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHub));
|
||||
.and(
|
||||
GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHub, {
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
const BaseGitHubConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.GitHub) });
|
||||
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
@@ -19,11 +20,14 @@ type TListGitHubEnvironmentsDTO = {
|
||||
owner: string;
|
||||
};
|
||||
|
||||
export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
export const githubConnectionService = (
|
||||
getAppConnection: TGetAppConnectionFunc,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const listRepositories = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
|
||||
|
||||
const repositories = await getGitHubRepositories(appConnection);
|
||||
const repositories = await getGitHubRepositories(appConnection, gatewayService);
|
||||
|
||||
return repositories;
|
||||
};
|
||||
@@ -31,7 +35,7 @@ export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc)
|
||||
const listOrganizations = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
|
||||
|
||||
const organizations = await getGitHubOrganizations(appConnection);
|
||||
const organizations = await getGitHubOrganizations(appConnection, gatewayService);
|
||||
|
||||
return organizations;
|
||||
};
|
||||
@@ -42,7 +46,7 @@ export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc)
|
||||
) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
|
||||
|
||||
const environments = await getGitHubEnvironments(appConnection, owner, repo);
|
||||
const environments = await getGitHubEnvironments(appConnection, gatewayService, owner, repo);
|
||||
|
||||
return environments;
|
||||
};
|
||||
|
@@ -17,4 +17,7 @@ export type TGitHubConnectionInput = z.infer<typeof CreateGitHubConnectionSchema
|
||||
|
||||
export type TValidateGitHubConnectionCredentialsSchema = typeof ValidateGitHubConnectionCredentialsSchema;
|
||||
|
||||
export type TGitHubConnectionConfig = DiscriminativePick<TGitHubConnectionInput, "method" | "app" | "credentials">;
|
||||
export type TGitHubConnectionConfig = DiscriminativePick<
|
||||
TGitHubConnectionInput,
|
||||
"method" | "app" | "credentials" | "gatewayId"
|
||||
>;
|
||||
|
@@ -222,6 +222,37 @@ export const validateGitLabConnectionCredentials = async (config: TGitLabConnect
|
||||
return inputCredentials;
|
||||
};
|
||||
|
||||
export const getGitLabConnectionClient = async (
|
||||
appConnection: TGitLabConnection,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
let { accessToken } = appConnection.credentials;
|
||||
|
||||
if (
|
||||
appConnection.method === GitLabConnectionMethod.OAuth &&
|
||||
appConnection.credentials.refreshToken &&
|
||||
new Date(appConnection.credentials.expiresAt) < new Date()
|
||||
) {
|
||||
accessToken = await refreshGitLabToken(
|
||||
appConnection.credentials.refreshToken,
|
||||
appConnection.id,
|
||||
appConnection.orgId,
|
||||
appConnectionDAL,
|
||||
kmsService,
|
||||
appConnection.credentials.instanceUrl
|
||||
);
|
||||
}
|
||||
|
||||
const client = await getGitLabClient(
|
||||
accessToken,
|
||||
appConnection.credentials.instanceUrl,
|
||||
appConnection.method === GitLabConnectionMethod.OAuth
|
||||
);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
export const listGitLabProjects = async ({
|
||||
appConnection,
|
||||
appConnectionDAL,
|
||||
|
4
backend/src/services/app-connection/netlify/index.ts
Normal file
4
backend/src/services/app-connection/netlify/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./netlify-connection-constants";
|
||||
export * from "./netlify-connection-fns";
|
||||
export * from "./netlify-connection-schemas";
|
||||
export * from "./netlify-connection-types";
|
@@ -0,0 +1,3 @@
|
||||
export enum NetlifyConnectionMethod {
|
||||
AccessToken = "access-token"
|
||||
}
|
@@ -0,0 +1,35 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
|
||||
import { NetlifyPublicAPI } from "./netlify-connection-public-client";
|
||||
import { TNetlifyConnectionConfig } from "./netlify-connection-types";
|
||||
|
||||
export const getNetlifyConnectionListItem = () => {
|
||||
return {
|
||||
name: "Netlify" as const,
|
||||
app: AppConnection.Netlify as const,
|
||||
methods: Object.values(NetlifyConnectionMethod)
|
||||
};
|
||||
};
|
||||
|
||||
export const validateNetlifyConnectionCredentials = async (config: TNetlifyConnectionConfig) => {
|
||||
try {
|
||||
await NetlifyPublicAPI.healthcheck(config);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to validate connection - verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
return config.credentials;
|
||||
};
|
@@ -0,0 +1,261 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
/* eslint-disable class-methods-use-this */
|
||||
import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxiosError } from "axios";
|
||||
|
||||
import { createRequestClient } from "@app/lib/config/request";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
|
||||
import { TNetlifyAccount, TNetlifyConnectionConfig, TNetlifySite, TNetlifyVariable } from "./netlify-connection-types";
|
||||
|
||||
export function getNetlifyAuthHeaders(connection: TNetlifyConnectionConfig): Record<string, string> {
|
||||
switch (connection.method) {
|
||||
case NetlifyConnectionMethod.AccessToken:
|
||||
return {
|
||||
Authorization: `Bearer ${connection.credentials.accessToken}`
|
||||
};
|
||||
default:
|
||||
throw new Error(`Unsupported Netlify connection method`);
|
||||
}
|
||||
}
|
||||
|
||||
export function getNetlifyRatelimiter(response: AxiosResponse): {
|
||||
maxAttempts: number;
|
||||
isRatelimited: boolean;
|
||||
wait: () => Promise<void>;
|
||||
} {
|
||||
const wait = (seconds: number = 60) => {
|
||||
return new Promise<void>((res) => {
|
||||
setTimeout(res, seconds * 1000);
|
||||
});
|
||||
};
|
||||
|
||||
let remaining = parseInt(response.headers["X-RateLimit-Remaining"] as string, 10);
|
||||
let isRatelimited = response.status === HttpStatusCode.TooManyRequests;
|
||||
|
||||
if (isRatelimited) {
|
||||
if (Math.round(remaining) > 0) {
|
||||
isRatelimited = true;
|
||||
remaining += 1; // Jitter to ensure we wait at least 1 second
|
||||
} else {
|
||||
remaining = 60;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isRatelimited,
|
||||
wait: () => wait(remaining),
|
||||
maxAttempts: 3
|
||||
};
|
||||
}
|
||||
|
||||
type NetlifyParams = {
|
||||
account_id: string;
|
||||
context_name?: string;
|
||||
site_id?: string;
|
||||
};
|
||||
|
||||
class NetlifyPublicClient {
|
||||
private client: AxiosInstance;
|
||||
|
||||
constructor() {
|
||||
this.client = createRequestClient({
|
||||
baseURL: `${IntegrationUrls.NETLIFY_API_URL}/api/v1`,
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async send<T>(connection: TNetlifyConnectionConfig, config: AxiosRequestConfig, retryAttempt = 0): Promise<T> {
|
||||
const response = await this.client.request<T>({
|
||||
...config,
|
||||
timeout: 1000 * 60, // 60 seconds timeout
|
||||
validateStatus: (status) => (status >= 200 && status < 300) || status === HttpStatusCode.TooManyRequests,
|
||||
headers: getNetlifyAuthHeaders(connection)
|
||||
});
|
||||
const limiter = getNetlifyRatelimiter(response);
|
||||
|
||||
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
|
||||
await limiter.wait();
|
||||
return this.send(connection, config, retryAttempt + 1);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
healthcheck(connection: TNetlifyConnectionConfig) {
|
||||
switch (connection.method) {
|
||||
case NetlifyConnectionMethod.AccessToken:
|
||||
return this.getNetlifyAccounts(connection);
|
||||
default:
|
||||
throw new Error(`Unsupported Netlify connection method`);
|
||||
}
|
||||
}
|
||||
|
||||
async getVariables(
|
||||
connection: TNetlifyConnectionConfig,
|
||||
{ account_id, ...params }: NetlifyParams,
|
||||
limit: number = 50,
|
||||
page: number = 1
|
||||
) {
|
||||
const res = await this.send<TNetlifyVariable[]>(connection, {
|
||||
method: "GET",
|
||||
url: `/accounts/${account_id}/env`,
|
||||
params: {
|
||||
...params,
|
||||
limit,
|
||||
page
|
||||
}
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async createVariable(
|
||||
connection: TNetlifyConnectionConfig,
|
||||
{ account_id, ...params }: NetlifyParams,
|
||||
...variables: TNetlifyVariable[]
|
||||
) {
|
||||
const res = await this.send<TNetlifyVariable>(connection, {
|
||||
method: "POST",
|
||||
url: `/accounts/${account_id}/env`,
|
||||
data: variables,
|
||||
params
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async updateVariableValue(
|
||||
connection: TNetlifyConnectionConfig,
|
||||
{ account_id, ...params }: NetlifyParams,
|
||||
variable: TNetlifyVariable
|
||||
) {
|
||||
const res = await this.send<TNetlifyVariable>(connection, {
|
||||
method: "PATCH",
|
||||
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||
data: variable,
|
||||
params
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async updateVariable(
|
||||
connection: TNetlifyConnectionConfig,
|
||||
{ account_id, ...params }: NetlifyParams,
|
||||
variable: TNetlifyVariable
|
||||
) {
|
||||
const res = await this.send<TNetlifyVariable>(connection, {
|
||||
method: "PUT",
|
||||
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||
data: variable,
|
||||
params
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async getVariable(
|
||||
connection: TNetlifyConnectionConfig,
|
||||
{ account_id, ...params }: NetlifyParams,
|
||||
variable: Pick<TNetlifyVariable, "key">
|
||||
) {
|
||||
try {
|
||||
const res = await this.send<TNetlifyVariable>(connection, {
|
||||
method: "GET",
|
||||
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||
params
|
||||
});
|
||||
|
||||
return res;
|
||||
} catch (error) {
|
||||
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async upsertVariable(connection: TNetlifyConnectionConfig, params: NetlifyParams, variable: TNetlifyVariable) {
|
||||
const res = await this.getVariable(connection, params, variable);
|
||||
|
||||
if (!res) {
|
||||
return this.createVariable(connection, params, variable);
|
||||
}
|
||||
|
||||
if (res.is_secret) {
|
||||
await this.deleteVariable(connection, params, variable);
|
||||
return this.createVariable(connection, params, variable);
|
||||
}
|
||||
|
||||
return this.updateVariable(connection, params, variable);
|
||||
}
|
||||
|
||||
async deleteVariable(
|
||||
connection: TNetlifyConnectionConfig,
|
||||
{ account_id, ...params }: NetlifyParams,
|
||||
variable: Pick<TNetlifyVariable, "key">
|
||||
) {
|
||||
try {
|
||||
const res = await this.send<TNetlifyVariable>(connection, {
|
||||
method: "DELETE",
|
||||
url: `/accounts/${account_id}/env/${variable.key}`,
|
||||
params
|
||||
});
|
||||
|
||||
return res;
|
||||
} catch (error) {
|
||||
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteVariableValue(
|
||||
connection: TNetlifyConnectionConfig,
|
||||
{ account_id, value_id, ...params }: NetlifyParams & { value_id: string },
|
||||
variable: Pick<TNetlifyVariable, "key" | "id">
|
||||
) {
|
||||
try {
|
||||
const res = await this.send<TNetlifyVariable>(connection, {
|
||||
method: "DELETE",
|
||||
url: `/accounts/${account_id}/${variable.key}/value/${value_id}`,
|
||||
params
|
||||
});
|
||||
|
||||
return res;
|
||||
} catch (error) {
|
||||
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getSites(connection: TNetlifyConnectionConfig, accountId: string) {
|
||||
const res = await this.send<TNetlifySite[]>(connection, {
|
||||
method: "GET",
|
||||
url: `/${accountId}/sites`
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async getNetlifyAccounts(connection: TNetlifyConnectionConfig) {
|
||||
const res = await this.send<TNetlifyAccount[]>(connection, {
|
||||
method: "GET",
|
||||
url: `/accounts`
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
export const NetlifyPublicAPI = new NetlifyPublicClient();
|
@@ -0,0 +1,67 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
|
||||
|
||||
export const NetlifyConnectionMethodSchema = z
|
||||
.nativeEnum(NetlifyConnectionMethod)
|
||||
.describe(AppConnections.CREATE(AppConnection.Netlify).method);
|
||||
|
||||
export const NetlifyConnectionAccessTokenCredentialsSchema = z.object({
|
||||
accessToken: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Access Token required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.NETLIFY.accessToken)
|
||||
});
|
||||
|
||||
const BaseNetlifyConnectionSchema = BaseAppConnectionSchema.extend({
|
||||
app: z.literal(AppConnection.Netlify)
|
||||
});
|
||||
|
||||
export const NetlifyConnectionSchema = BaseNetlifyConnectionSchema.extend({
|
||||
method: NetlifyConnectionMethodSchema,
|
||||
credentials: NetlifyConnectionAccessTokenCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedNetlifyConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseNetlifyConnectionSchema.extend({
|
||||
method: NetlifyConnectionMethodSchema,
|
||||
credentials: NetlifyConnectionAccessTokenCredentialsSchema.pick({})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateNetlifyConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: NetlifyConnectionMethodSchema,
|
||||
credentials: NetlifyConnectionAccessTokenCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.Netlify).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateNetlifyConnectionSchema = ValidateNetlifyConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Netlify)
|
||||
);
|
||||
|
||||
export const UpdateNetlifyConnectionSchema = z
|
||||
.object({
|
||||
credentials: NetlifyConnectionAccessTokenCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.Netlify).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Netlify));
|
||||
|
||||
export const NetlifyConnectionListItemSchema = z.object({
|
||||
name: z.literal("Netlify"),
|
||||
app: z.literal(AppConnection.Netlify),
|
||||
methods: z.nativeEnum(NetlifyConnectionMethod).array()
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user