mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-05 07:30:33 +00:00
Compare commits
72 Commits
docs/terra
...
daniel/inj
Author | SHA1 | Date | |
---|---|---|---|
|
899d01237c | ||
|
7357d377e1 | ||
|
573b990aa3 | ||
|
e15086edc0 | ||
|
13ef3809bd | ||
|
fb49c9250a | ||
|
5ced7fa923 | ||
|
5ffd42378a | ||
|
f995708e44 | ||
|
c266d68993 | ||
|
c7c8107f85 | ||
|
b906fe34a1 | ||
|
bec1fefee8 | ||
|
cd03107a60 | ||
|
07965de1db | ||
|
b20ff0f029 | ||
|
691cbe0a4f | ||
|
0787128803 | ||
|
837158e344 | ||
|
03bd1471b2 | ||
|
f53c39f65b | ||
|
092695089d | ||
|
2d80681597 | ||
|
cf23f98170 | ||
|
c4c8e121f0 | ||
|
0701c996e5 | ||
|
4ca6f165b7 | ||
|
b9dd565926 | ||
|
136b0bdcb5 | ||
|
7266d1f310 | ||
|
9c6ec807cb | ||
|
756b46428a | ||
|
5fcae35fae | ||
|
359e19f804 | ||
|
2aa548c7dc | ||
|
9d3a382b48 | ||
|
4f00fc6777 | ||
|
1f6a63fa71 | ||
|
9e76fa8230 | ||
|
e2d4816465 | ||
|
37c8fc80f7 | ||
|
5ca521ea6b | ||
|
40de8331a3 | ||
|
9374ee3c2e | ||
|
561dbb8835 | ||
|
dece214073 | ||
|
992df5c7d0 | ||
|
00e382d774 | ||
|
f63c434c0e | ||
|
9f0250caf2 | ||
|
d47f6f7ec9 | ||
|
1126c6b0fa | ||
|
7949142ea7 | ||
|
da28f9224b | ||
|
122de99606 | ||
|
82b765553c | ||
|
8972521716 | ||
|
81b45b24ec | ||
|
f2b0e4ae37 | ||
|
57fcfdaf21 | ||
|
e430abfc9e | ||
|
0b7b32bdc3 | ||
|
585cb1b30c | ||
|
7fdee073d8 | ||
|
c368178cb1 | ||
|
52ef0e6b81 | ||
|
0f06c4c27a | ||
|
e34deb7bd0 | ||
|
4b6f9fdec2 | ||
|
5df7539f65 | ||
|
2ff211d235 | ||
|
b4ed1fa96a |
153
.github/workflows/release_build_infisical_cli.yml
vendored
153
.github/workflows/release_build_infisical_cli.yml
vendored
@@ -1,153 +0,0 @@
|
||||
name: Build and release CLI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
# run only against tags
|
||||
tags:
|
||||
- "infisical-cli/v*.*.*"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
uses: ./.github/workflows/run-cli-tests.yml
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest-8-cores
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- run: git fetch --force --tags
|
||||
- run: echo "Ref name ${{github.ref_name}}"
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
|
||||
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
|
||||
AUR_KEY: ${{ secrets.AUR_KEY }}
|
||||
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
|
||||
- uses: actions/setup-python@v4
|
||||
- run: pip install --upgrade cloudsmith-cli
|
||||
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
|
||||
with:
|
||||
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
- name: Install deb-s3
|
||||
run: gem install deb-s3
|
||||
- name: Configure GPG Key
|
||||
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
|
||||
- name: Publish to CloudSmith
|
||||
run: sh cli/upload_to_cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
|
||||
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
- name: Invalidate Cloudfront cache
|
||||
run: aws cloudfront create-invalidation --distribution-id $CLOUDFRONT_DISTRIBUTION_ID --paths '/deb/dists/stable/*'
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
|
||||
CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}
|
55
.github/workflows/run-cli-tests.yml
vendored
55
.github/workflows/run-cli-tests.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Go CLI Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "cli/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
workflow_call:
|
||||
secrets:
|
||||
CLI_TESTS_UA_CLIENT_ID:
|
||||
required: true
|
||||
CLI_TESTS_UA_CLIENT_SECRET:
|
||||
required: true
|
||||
CLI_TESTS_SERVICE_TOKEN:
|
||||
required: true
|
||||
CLI_TESTS_PROJECT_ID:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21.x"
|
||||
- name: Install dependencies
|
||||
run: go get .
|
||||
- name: Test with the Go CLI
|
||||
env:
|
||||
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
|
||||
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
241
.goreleaser.yaml
241
.goreleaser.yaml
@@ -1,241 +0,0 @@
|
||||
# This is an example .goreleaser.yml file with some sensible defaults.
|
||||
# Make sure to check the documentation at https://goreleaser.com
|
||||
# before:
|
||||
# hooks:
|
||||
# # You may remove this if you don't use go modules.
|
||||
# - cd cli && go mod tidy
|
||||
# # you may remove this if you don't need go generate
|
||||
# - cd cli && go generate ./...
|
||||
before:
|
||||
hooks:
|
||||
- ./cli/scripts/completions.sh
|
||||
- ./cli/scripts/manpages.sh
|
||||
|
||||
monorepo:
|
||||
tag_prefix: infisical-cli/
|
||||
dir: cli
|
||||
|
||||
builds:
|
||||
- id: darwin-build
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=/home/runner/work/osxcross/target/bin/o64-clang
|
||||
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
|
||||
goos:
|
||||
- darwin
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
- id: all-other-builds
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
binary: infisical
|
||||
ldflags:
|
||||
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
|
||||
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
|
||||
flags:
|
||||
- -trimpath
|
||||
goos:
|
||||
- freebsd
|
||||
- linux
|
||||
- netbsd
|
||||
- openbsd
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
goarm:
|
||||
- "6"
|
||||
- "7"
|
||||
ignore:
|
||||
- goos: windows
|
||||
goarch: "386"
|
||||
- goos: freebsd
|
||||
goarch: "386"
|
||||
dir: ./cli
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
files:
|
||||
- ../README*
|
||||
- ../LICENSE*
|
||||
- ../manpages/*
|
||||
- ../completions/*
|
||||
|
||||
release:
|
||||
replace_existing_draft: true
|
||||
mode: "replace"
|
||||
|
||||
checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ .Version }}-devel"
|
||||
|
||||
# publishers:
|
||||
# - name: fury.io
|
||||
# ids:
|
||||
# - infisical
|
||||
# dir: "{{ dir .ArtifactPath }}"
|
||||
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
|
||||
|
||||
brews:
|
||||
- name: infisical
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
- name: "infisical@{{.Version}}"
|
||||
tap:
|
||||
owner: Infisical
|
||||
name: homebrew-get-cli
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
folder: Formula
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
install: |-
|
||||
bin.install "infisical"
|
||||
bash_completion.install "completions/infisical.bash" => "infisical"
|
||||
zsh_completion.install "completions/infisical.zsh" => "_infisical"
|
||||
fish_completion.install "completions/infisical.fish"
|
||||
man1.install "manpages/infisical.1.gz"
|
||||
|
||||
nfpms:
|
||||
- id: infisical
|
||||
package_name: infisical
|
||||
builds:
|
||||
- all-other-builds
|
||||
vendor: Infisical, Inc
|
||||
homepage: https://infisical.com/
|
||||
maintainer: Infisical, Inc
|
||||
description: The offical Infisical CLI
|
||||
license: MIT
|
||||
formats:
|
||||
- rpm
|
||||
- deb
|
||||
- apk
|
||||
- archlinux
|
||||
bindir: /usr/bin
|
||||
contents:
|
||||
- src: ./completions/infisical.bash
|
||||
dst: /etc/bash_completion.d/infisical
|
||||
- src: ./completions/infisical.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/infisical.fish
|
||||
- src: ./completions/infisical.zsh
|
||||
dst: /usr/share/zsh/site-functions/_infisical
|
||||
- src: ./manpages/infisical.1.gz
|
||||
dst: /usr/share/man/man1/infisical.1.gz
|
||||
|
||||
scoop:
|
||||
bucket:
|
||||
owner: Infisical
|
||||
name: scoop-infisical
|
||||
commit_author:
|
||||
name: "Infisical"
|
||||
email: ai@infisical.com
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
license: MIT
|
||||
|
||||
winget:
|
||||
- name: infisical
|
||||
publisher: infisical
|
||||
license: MIT
|
||||
homepage: https://infisical.com
|
||||
short_description: "The official Infisical CLI"
|
||||
repository:
|
||||
owner: infisical
|
||||
name: winget-pkgs
|
||||
branch: "infisical-{{.Version}}"
|
||||
pull_request:
|
||||
enabled: true
|
||||
draft: false
|
||||
base:
|
||||
owner: microsoft
|
||||
name: winget-pkgs
|
||||
branch: master
|
||||
|
||||
aurs:
|
||||
- name: infisical-bin
|
||||
homepage: "https://infisical.com"
|
||||
description: "The official Infisical CLI"
|
||||
maintainers:
|
||||
- Infisical, Inc <support@infisical.com>
|
||||
license: MIT
|
||||
private_key: "{{ .Env.AUR_KEY }}"
|
||||
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
|
||||
package: |-
|
||||
# bin
|
||||
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
|
||||
# license
|
||||
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
|
||||
# completions
|
||||
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
|
||||
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
|
||||
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
|
||||
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
|
||||
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
|
||||
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
|
||||
# man pages
|
||||
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
|
||||
|
||||
dockers:
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:latest-amd64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/amd64"
|
||||
- dockerfile: docker/alpine
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
use: buildx
|
||||
ids:
|
||||
- all-other-builds
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- "infisical/cli:latest-arm64"
|
||||
build_flag_templates:
|
||||
- "--pull"
|
||||
- "--platform=linux/arm64"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
|
||||
image_templates:
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
|
||||
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
|
||||
- name_template: "infisical/cli:latest"
|
||||
image_templates:
|
||||
- "infisical/cli:latest-amd64"
|
||||
- "infisical/cli:latest-arm64"
|
@@ -34,6 +34,8 @@ ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
ENV NODE_OPTIONS="--max-old-space-size=8192"
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
@@ -209,6 +211,11 @@ EXPOSE 443
|
||||
RUN grep -v 'import "./lib/telemetry/instrumentation.mjs";' dist/main.mjs > dist/main.mjs.tmp && \
|
||||
mv dist/main.mjs.tmp dist/main.mjs
|
||||
|
||||
# The OpenSSL library is installed in different locations in different architectures (x86_64 and arm64).
|
||||
# This is a workaround to avoid errors when the library is not found.
|
||||
RUN ln -sf /usr/local/lib64/ossl-modules /usr/local/lib/ossl-modules || \
|
||||
ln -sf /usr/local/lib/ossl-modules /usr/local/lib64/ossl-modules
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
@@ -55,6 +55,8 @@ USER non-root-user
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
@@ -84,6 +86,8 @@ RUN npm run build
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
@@ -112,6 +116,11 @@ RUN mkdir frontend-build
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
@@ -171,6 +180,7 @@ ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
|
55
backend/package-lock.json
generated
55
backend/package-lock.json
generated
@@ -7,7 +7,6 @@
|
||||
"": {
|
||||
"name": "backend",
|
||||
"version": "1.0.0",
|
||||
"hasInstallScript": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-elasticache": "^3.637.0",
|
||||
@@ -34,7 +33,7 @@
|
||||
"@gitbeaker/rest": "^42.5.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||
@@ -9574,20 +9573,20 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.1.0.tgz",
|
||||
"integrity": "sha512-t3cJnZ4aC7HhPZ6MGylGZULvUtBOZ6FzuUndaHGXjmIZHXnLfC/7L8a57O9Q9V7AxJGKAiRM5zu2wNm9EsvQpw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/debug": "^4.1.12",
|
||||
"@types/qs": "^6.9.11",
|
||||
"@types/qs": "^6.9.18",
|
||||
"@types/xml-encryption": "^1.2.4",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"debug": "^4.3.4",
|
||||
"xml-crypto": "^6.0.1",
|
||||
"xml-encryption": "^3.0.2",
|
||||
"debug": "^4.4.0",
|
||||
"xml-crypto": "^6.1.2",
|
||||
"xml-encryption": "^3.1.0",
|
||||
"xml2js": "^0.6.2",
|
||||
"xmlbuilder": "^15.1.1",
|
||||
"xpath": "^0.0.34"
|
||||
@@ -9597,9 +9596,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/node-saml/node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
@@ -9636,14 +9635,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@node-saml/passport-saml": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
|
||||
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.1.0.tgz",
|
||||
"integrity": "sha512-pBm+iFjv9eihcgeJuSUs4c0AuX1QEFdHwP8w1iaWCfDzXdeWZxUBU5HT2bY2S4dvNutcy+A9hYsH7ZLBGtgwDg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@node-saml/node-saml": "^5.0.1",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/passport": "^1.0.16",
|
||||
"@node-saml/node-saml": "^5.1.0",
|
||||
"@types/express": "^4.17.23",
|
||||
"@types/passport": "^1.0.17",
|
||||
"@types/passport-strategy": "^0.2.38",
|
||||
"passport": "^0.7.0",
|
||||
"passport-strategy": "^1.0.0"
|
||||
@@ -13351,9 +13350,10 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/express": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
|
||||
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz",
|
||||
"integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/body-parser": "*",
|
||||
"@types/express-serve-static-core": "^4.17.33",
|
||||
@@ -13523,9 +13523,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/passport": {
|
||||
"version": "1.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.16.tgz",
|
||||
"integrity": "sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A==",
|
||||
"version": "1.0.17",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.17.tgz",
|
||||
"integrity": "sha512-aciLyx+wDwT2t2/kJGJR2AEeBz0nJU4WuRX04Wu9Dqc5lSUtwu0WERPHYsLhF9PtseiAMPBGNUOtFjxZ56prsg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/express": "*"
|
||||
}
|
||||
@@ -31953,9 +31954,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/xml-crypto": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
|
||||
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.1.2.tgz",
|
||||
"integrity": "sha512-leBOVQdVi8FvPJrMYoum7Ici9qyxfE4kVi+AkpUoYCSXaQF4IlBm1cneTK9oAxR61LpYxTx7lNcsnBIeRpGW2w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@xmldom/is-dom-node": "^1.0.1",
|
||||
|
@@ -153,7 +153,7 @@
|
||||
"@gitbeaker/rest": "^42.5.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-paginate-graphql": "^4.0.1",
|
||||
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues"))) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.specificType("secretDetectionIgnoreValues", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues")) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("secretDetectionIgnoreValues");
|
||||
});
|
||||
}
|
||||
}
|
@@ -30,7 +30,8 @@ export const ProjectsSchema = z.object({
|
||||
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
|
||||
secretSharing: z.boolean().default(true),
|
||||
showSnapshotsLegacy: z.boolean().default(false),
|
||||
defaultProduct: z.string().nullable().optional()
|
||||
defaultProduct: z.string().nullable().optional(),
|
||||
secretDetectionIgnoreValues: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@@ -0,0 +1,16 @@
|
||||
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
|
||||
import {
|
||||
CreateGitLabDataSourceSchema,
|
||||
GitLabDataSourceSchema,
|
||||
UpdateGitLabDataSourceSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
export const registerGitLabSecretScanningRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretScanningEndpoints({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
server,
|
||||
responseSchema: GitLabDataSourceSchema,
|
||||
createSchema: CreateGitLabDataSourceSchema,
|
||||
updateSchema: UpdateGitLabDataSourceSchema
|
||||
});
|
@@ -1,3 +1,4 @@
|
||||
import { registerGitLabSecretScanningRouter } from "@app/ee/routes/v2/secret-scanning-v2-routers/gitlab-secret-scanning-router";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
|
||||
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
|
||||
@@ -10,5 +11,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
|
||||
(server: FastifyZodProvider) => Promise<void>
|
||||
> = {
|
||||
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
|
||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
|
||||
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter,
|
||||
[SecretScanningDataSource.GitLab]: registerGitLabSecretScanningRouter
|
||||
};
|
||||
|
@@ -4,6 +4,7 @@ import { SecretScanningConfigsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GitLabDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import {
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
@@ -24,7 +25,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceListItemSchema,
|
||||
BitbucketDataSourceListItemSchema
|
||||
BitbucketDataSourceListItemSchema,
|
||||
GitLabDataSourceListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
|
||||
|
@@ -5,13 +5,14 @@
|
||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { AxiosError } from "axios";
|
||||
import { CronJob } from "cron";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { verifyOfflineLicense } from "@app/lib/crypto";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@@ -603,10 +604,22 @@ export const licenseServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { data } = await licenseServerCloudApi.request.delete(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
||||
);
|
||||
return data;
|
||||
try {
|
||||
const { data } = await licenseServerCloudApi.request.delete(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
|
||||
);
|
||||
return data;
|
||||
} catch (error) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
message: `Failed to remove payment method: ${error.response?.data?.message}`
|
||||
});
|
||||
}
|
||||
throw new BadRequestError({
|
||||
message: "Unable to remove payment method"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getOrgTaxIds = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgTaxIdDTO) => {
|
||||
|
@@ -579,6 +579,9 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
const hasEmailChanged = email?.toLowerCase() !== membership.email;
|
||||
const defaultEmailVerified =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
@@ -605,8 +608,7 @@ export const scimServiceFactory = ({
|
||||
firstName,
|
||||
email: email?.toLowerCase(),
|
||||
lastName,
|
||||
isEmailVerified:
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
|
||||
isEmailVerified: hasEmailChanged ? defaultEmailVerified : undefined
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@@ -65,10 +65,14 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
|
||||
import {
|
||||
hasSecretReadValueOrDescribePermission,
|
||||
throwIfMissingSecretReadValueOrDescribePermission
|
||||
} from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
||||
import { scanSecretPolicyViolations } from "../secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
|
||||
import { sendApprovalEmailsFn } from "./secret-approval-request-fns";
|
||||
@@ -276,13 +280,19 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
) {
|
||||
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
|
||||
}
|
||||
|
||||
const hasSecretReadAccess = permission.can(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
|
||||
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath: secretPath || "/",
|
||||
secretTags: tags.map((i) => i.slug)
|
||||
});
|
||||
return canRead;
|
||||
};
|
||||
|
||||
let secrets;
|
||||
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
||||
secretApprovalRequest.folderId
|
||||
]);
|
||||
if (shouldUseSecretV2Bridge) {
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
@@ -298,8 +308,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
version: el.version,
|
||||
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
|
||||
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret && el.secret.isRotatedSecret
|
||||
? undefined
|
||||
@@ -314,8 +324,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretKey: el.secret.key,
|
||||
id: el.secret.id,
|
||||
version: el.secret.version,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secret.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
||||
@@ -330,8 +344,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretKey: el.secretVersion.key,
|
||||
id: el.secretVersion.id,
|
||||
version: el.secretVersion.version,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValue: !hasSecretReadAccess
|
||||
secretValueHidden: !getHasSecretReadAccess(
|
||||
secretApprovalRequest.environment,
|
||||
el.tags,
|
||||
secretPath?.[0]?.path
|
||||
),
|
||||
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
|
||||
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
|
||||
: el.secretVersion.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
||||
@@ -349,7 +367,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
secrets = encryptedSecrets.map((el) => ({
|
||||
...el,
|
||||
secretValueHidden: !hasSecretReadAccess,
|
||||
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
|
||||
...decryptSecretWithBot(el, botKey),
|
||||
secret: el.secret
|
||||
? {
|
||||
@@ -369,9 +387,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: undefined
|
||||
}));
|
||||
}
|
||||
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
|
||||
secretApprovalRequest.folderId
|
||||
]);
|
||||
|
||||
return { ...secretApprovalRequest, secretPath: secretPath?.[0]?.path || "/", commits: secrets };
|
||||
};
|
||||
@@ -1412,6 +1427,20 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
projectId
|
||||
});
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
await scanSecretPolicyViolations(
|
||||
projectId,
|
||||
secretPath,
|
||||
[
|
||||
...(data[SecretOperations.Create] || []),
|
||||
...(data[SecretOperations.Update] || []).filter((el) => el.secretValue)
|
||||
].map((el) => ({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: el.secretValue as string
|
||||
})),
|
||||
project.secretDetectionIgnoreValues || []
|
||||
);
|
||||
|
||||
// for created secret approval change
|
||||
const createdSecrets = data[SecretOperations.Create];
|
||||
if (createdSecrets && createdSecrets?.length) {
|
||||
|
@@ -21,6 +21,8 @@ const GRAPH_API_BASE = "https://graph.microsoft.com/v1.0";
|
||||
|
||||
type AzureErrorResponse = { error: { message: string } };
|
||||
|
||||
const EXPIRY_PADDING_IN_DAYS = 3;
|
||||
|
||||
const sleep = async () =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 1000);
|
||||
@@ -33,7 +35,8 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
const {
|
||||
connection,
|
||||
parameters: { objectId, clientId: clientIdParam },
|
||||
secretsMapping
|
||||
secretsMapping,
|
||||
rotationInterval
|
||||
} = secretRotation;
|
||||
|
||||
/**
|
||||
@@ -50,7 +53,7 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
)}-${now.getFullYear()}`;
|
||||
|
||||
const endDateTime = new Date();
|
||||
endDateTime.setFullYear(now.getFullYear() + 5);
|
||||
endDateTime.setDate(now.getDate() + rotationInterval * 2 + EXPIRY_PADDING_IN_DAYS); // give 72 hour buffer
|
||||
|
||||
try {
|
||||
const { data } = await request.post<AzureAddPasswordResponse>(
|
||||
@@ -195,6 +198,12 @@ export const azureClientSecretRotationFactory: TRotationFactory<
|
||||
callback
|
||||
) => {
|
||||
const credentials = await $rotateClientSecret();
|
||||
|
||||
// 2.5 years as expiry is set to x2 interval for the inactive period of credential
|
||||
if (rotationInterval > Math.floor(365 * 2.5) - EXPIRY_PADDING_IN_DAYS) {
|
||||
throw new BadRequestError({ message: "Azure does not support token duration over 5 years" });
|
||||
}
|
||||
|
||||
return callback(credentials);
|
||||
};
|
||||
|
||||
|
@@ -51,6 +51,7 @@ const baseSecretRotationV2Query = ({
|
||||
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
|
||||
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
|
||||
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
|
||||
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
|
||||
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
|
||||
db
|
||||
@@ -104,6 +105,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
||||
connectionCreatedAt,
|
||||
connectionUpdatedAt,
|
||||
connectionVersion,
|
||||
connectionGatewayId,
|
||||
connectionIsPlatformManagedCredentials,
|
||||
...el
|
||||
} = secretRotation;
|
||||
@@ -123,6 +125,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
|
||||
createdAt: connectionCreatedAt,
|
||||
updatedAt: connectionUpdatedAt,
|
||||
version: connectionVersion,
|
||||
gatewayId: connectionGatewayId,
|
||||
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
|
||||
},
|
||||
folder: {
|
||||
|
@@ -18,7 +18,8 @@ import {
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
@@ -302,6 +303,13 @@ export const BitbucketSecretScanningFactory = () => {
|
||||
);
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TBitbucketDataSourceInput["config"],
|
||||
TBitbucketDataSourceWithConnection
|
||||
> = async () => {
|
||||
// no validation required
|
||||
};
|
||||
|
||||
return {
|
||||
initialize,
|
||||
postInitialization,
|
||||
@@ -309,6 +317,7 @@ export const BitbucketSecretScanningFactory = () => {
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
teardown,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
||||
|
@@ -20,7 +20,8 @@ import {
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -64,7 +65,14 @@ export const GitHubSecretScanningFactory = () => {
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
|
||||
// no termination required
|
||||
// no teardown required
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TGitHubDataSourceInput["config"],
|
||||
TGitHubDataSourceWithConnection
|
||||
> = async () => {
|
||||
// no validation required
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
|
||||
@@ -238,6 +246,7 @@ export const GitHubSecretScanningFactory = () => {
|
||||
getFullScanPath,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
teardown
|
||||
teardown,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
||||
|
@@ -0,0 +1,9 @@
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
|
||||
name: "GitLab",
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
connection: AppConnection.GitLab
|
||||
};
|
@@ -0,0 +1,8 @@
|
||||
export enum GitLabDataSourceScope {
|
||||
Project = "project",
|
||||
Group = "group"
|
||||
}
|
||||
|
||||
export enum GitLabWebHookEvent {
|
||||
Push = "Push Hook"
|
||||
}
|
@@ -0,0 +1,409 @@
|
||||
import { Camelize, GitbeakerRequestError, GroupHookSchema, ProjectHookSchema } from "@gitbeaker/rest";
|
||||
import { join } from "path";
|
||||
|
||||
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import {
|
||||
SecretScanningFindingSeverity,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
cloneRepository,
|
||||
convertPatchLineToFileLineNumber,
|
||||
replaceNonChangesWithNewlines
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import {
|
||||
TSecretScanningFactoryGetDiffScanFindingsPayload,
|
||||
TSecretScanningFactoryGetDiffScanResourcePayload,
|
||||
TSecretScanningFactoryGetFullScanPath,
|
||||
TSecretScanningFactoryInitialize,
|
||||
TSecretScanningFactoryListRawResources,
|
||||
TSecretScanningFactoryParams,
|
||||
TSecretScanningFactoryPostInitialization,
|
||||
TSecretScanningFactoryTeardown,
|
||||
TSecretScanningFactoryValidateConfigUpdate
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||
import {
|
||||
getGitLabConnectionClient,
|
||||
getGitLabInstanceUrl,
|
||||
TGitLabConnection
|
||||
} from "@app/services/app-connection/gitlab";
|
||||
|
||||
import { GitLabDataSourceScope } from "./gitlab-secret-scanning-enums";
|
||||
import {
|
||||
TGitLabDataSourceCredentials,
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabDataSourceWithConnection,
|
||||
TQueueGitLabResourceDiffScan
|
||||
} from "./gitlab-secret-scanning-types";
|
||||
|
||||
const getMainDomain = (instanceUrl: string) => {
|
||||
const url = new URL(instanceUrl);
|
||||
const { hostname } = url;
|
||||
const parts = hostname.split(".");
|
||||
|
||||
if (parts.length >= 2) {
|
||||
return parts.slice(-2).join(".");
|
||||
}
|
||||
|
||||
return hostname;
|
||||
};
|
||||
|
||||
export const GitLabSecretScanningFactory = ({ appConnectionDAL, kmsService }: TSecretScanningFactoryParams) => {
|
||||
const initialize: TSecretScanningFactoryInitialize<
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ payload: { config, name }, connection }, callback) => {
|
||||
const token = alphaNumericNanoId(64);
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
const project = await client.Projects.show(projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||
}
|
||||
|
||||
let hook: Camelize<ProjectHookSchema>;
|
||||
try {
|
||||
hook = await client.ProjectHooks.add(projectId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||
token,
|
||||
pushEvents: true,
|
||||
enableSslVerification: true,
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${name}`
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof GitbeakerRequestError) {
|
||||
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
return await callback({
|
||||
credentials: {
|
||||
token,
|
||||
hookId: hook.id
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hook.id);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// group scope
|
||||
const { groupId } = config;
|
||||
|
||||
const group = await client.Groups.show(groupId);
|
||||
|
||||
if (!group) {
|
||||
throw new BadRequestError({ message: `Could not find group with ID ${groupId}.` });
|
||||
}
|
||||
|
||||
let hook: Camelize<GroupHookSchema>;
|
||||
try {
|
||||
hook = await client.GroupHooks.add(groupId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
|
||||
token,
|
||||
pushEvents: true,
|
||||
enableSslVerification: true,
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${name}`
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof GitbeakerRequestError) {
|
||||
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
return await callback({
|
||||
credentials: {
|
||||
token,
|
||||
hookId: hook.id
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hook.id);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const postInitialization: TSecretScanningFactoryPostInitialization<
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ connection, dataSourceId, credentials, payload: { config } }) => {
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
const appCfg = getConfig();
|
||||
|
||||
const hookUrl = `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`;
|
||||
const { hookId } = credentials;
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
|
||||
try {
|
||||
await client.ProjectHooks.edit(projectId, hookId, hookUrl, {
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hookId);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// group-scope
|
||||
const { groupId } = config;
|
||||
|
||||
try {
|
||||
await client.GroupHooks.edit(groupId, hookId, hookUrl, {
|
||||
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
|
||||
name: `Infisical Secret Scanning - ${dataSourceId}`,
|
||||
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
|
||||
});
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hookId);
|
||||
} catch {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const listRawResources: TSecretScanningFactoryListRawResources<TGitLabDataSourceWithConnection> = async (
|
||||
dataSource
|
||||
) => {
|
||||
const { connection, config } = dataSource;
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
|
||||
const project = await client.Projects.show(projectId);
|
||||
|
||||
if (!project) {
|
||||
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
|
||||
}
|
||||
|
||||
// scott: even though we have this data we want to get potentially updated name
|
||||
return [
|
||||
{
|
||||
name: project.pathWithNamespace,
|
||||
externalId: project.id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
// group-scope
|
||||
|
||||
const { groupId, includeProjects } = config;
|
||||
|
||||
const projects = await client.Groups.allProjects(groupId, {
|
||||
archived: false
|
||||
});
|
||||
|
||||
const filteredProjects: typeof projects = [];
|
||||
if (!includeProjects || includeProjects.includes("*")) {
|
||||
filteredProjects.push(...projects);
|
||||
} else {
|
||||
filteredProjects.push(...projects.filter((project) => includeProjects.includes(project.pathWithNamespace)));
|
||||
}
|
||||
|
||||
return filteredProjects.map(({ id, pathWithNamespace }) => ({
|
||||
name: pathWithNamespace,
|
||||
externalId: id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
}));
|
||||
};
|
||||
|
||||
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitLabDataSourceWithConnection> = async ({
|
||||
dataSource,
|
||||
resourceName,
|
||||
tempFolder
|
||||
}) => {
|
||||
const { connection } = dataSource;
|
||||
|
||||
const instanceUrl = await getGitLabInstanceUrl(connection.credentials.instanceUrl);
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
const user = await client.Users.showCurrentUser();
|
||||
|
||||
const repoPath = join(tempFolder, "repo.git");
|
||||
|
||||
if (!GitLabProjectRegex.test(resourceName)) {
|
||||
throw new Error("Invalid GitLab project name");
|
||||
}
|
||||
|
||||
await cloneRepository({
|
||||
cloneUrl: `https://${user.username}:${connection.credentials.accessToken}@${getMainDomain(instanceUrl)}/${resourceName}.git`,
|
||||
repoPath
|
||||
});
|
||||
|
||||
return repoPath;
|
||||
};
|
||||
|
||||
const teardown: TSecretScanningFactoryTeardown<
|
||||
TGitLabDataSourceWithConnection,
|
||||
TGitLabDataSourceCredentials
|
||||
> = async ({ dataSource: { connection, config }, credentials: { hookId } }) => {
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
if (config.scope === GitLabDataSourceScope.Project) {
|
||||
const { projectId } = config;
|
||||
try {
|
||||
await client.ProjectHooks.remove(projectId, hookId);
|
||||
} catch (error) {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const { groupId } = config;
|
||||
try {
|
||||
await client.GroupHooks.remove(groupId, hookId);
|
||||
} catch (error) {
|
||||
// do nothing, just try to clean up webhook
|
||||
}
|
||||
};
|
||||
|
||||
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
|
||||
TQueueGitLabResourceDiffScan["payload"]
|
||||
> = ({ project }) => {
|
||||
return {
|
||||
name: project.path_with_namespace,
|
||||
externalId: project.id.toString(),
|
||||
type: SecretScanningResource.Project
|
||||
};
|
||||
};
|
||||
|
||||
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
|
||||
TGitLabDataSourceWithConnection,
|
||||
TQueueGitLabResourceDiffScan["payload"]
|
||||
> = async ({ dataSource, payload, resourceName, configPath }) => {
|
||||
const { connection } = dataSource;
|
||||
|
||||
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
|
||||
|
||||
const { commits, project } = payload;
|
||||
|
||||
const allFindings: SecretMatch[] = [];
|
||||
|
||||
for (const commit of commits) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const commitDiffs = await client.Commits.showDiff(project.id, commit.id);
|
||||
|
||||
for (const commitDiff of commitDiffs) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (commitDiff.deletedFile) continue;
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const findings = await scanContentAndGetFindings(
|
||||
replaceNonChangesWithNewlines(`\n${commitDiff.diff}`),
|
||||
configPath
|
||||
);
|
||||
|
||||
const adjustedFindings = findings.map((finding) => {
|
||||
const startLine = convertPatchLineToFileLineNumber(commitDiff.diff, finding.StartLine);
|
||||
const endLine =
|
||||
finding.StartLine === finding.EndLine
|
||||
? startLine
|
||||
: convertPatchLineToFileLineNumber(commitDiff.diff, finding.EndLine);
|
||||
const startColumn = finding.StartColumn - 1; // subtract 1 for +
|
||||
const endColumn = finding.EndColumn - 1; // subtract 1 for +
|
||||
const authorName = commit.author.name;
|
||||
const authorEmail = commit.author.email;
|
||||
|
||||
return {
|
||||
...finding,
|
||||
StartLine: startLine,
|
||||
EndLine: endLine,
|
||||
StartColumn: startColumn,
|
||||
EndColumn: endColumn,
|
||||
File: commitDiff.newPath,
|
||||
Commit: commit.id,
|
||||
Author: authorName,
|
||||
Email: authorEmail,
|
||||
Message: commit.message,
|
||||
Fingerprint: `${commit.id}:${commitDiff.newPath}:${finding.RuleID}:${startLine}:${startColumn}`,
|
||||
Date: commit.timestamp,
|
||||
Link: `https://gitlab.com/${resourceName}/blob/${commit.id}/${commitDiff.newPath}#L${startLine}`
|
||||
};
|
||||
});
|
||||
|
||||
allFindings.push(...adjustedFindings);
|
||||
}
|
||||
}
|
||||
|
||||
return allFindings.map(
|
||||
({
|
||||
// discard match and secret as we don't want to store
|
||||
Match,
|
||||
Secret,
|
||||
...finding
|
||||
}) => ({
|
||||
details: titleCaseToCamelCase(finding),
|
||||
fingerprint: finding.Fingerprint,
|
||||
severity: SecretScanningFindingSeverity.High,
|
||||
rule: finding.RuleID
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
|
||||
TGitLabDataSourceInput["config"],
|
||||
TGitLabDataSourceWithConnection
|
||||
> = async ({ config, dataSource }) => {
|
||||
if (dataSource.config.scope !== config.scope) {
|
||||
throw new BadRequestError({ message: "Cannot change Data Source scope after creation." });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listRawResources,
|
||||
getFullScanPath,
|
||||
initialize,
|
||||
postInitialization,
|
||||
teardown,
|
||||
getDiffScanResourcePayload,
|
||||
getDiffScanFindingsPayload,
|
||||
validateConfigUpdate
|
||||
};
|
||||
};
|
@@ -0,0 +1,101 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningResource
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningDataSourceSchema,
|
||||
BaseSecretScanningFindingSchema,
|
||||
BaseUpdateSecretScanningDataSourceSchema,
|
||||
GitRepositoryScanFindingDetailsSchema
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
|
||||
import { SecretScanningDataSources } from "@app/lib/api-docs";
|
||||
import { GitLabProjectRegex } from "@app/lib/regex";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const GitLabDataSourceConfigSchema = z.discriminatedUnion("scope", [
|
||||
z.object({
|
||||
scope: z.literal(GitLabDataSourceScope.Group).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||
groupId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.groupId),
|
||||
groupName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.groupName),
|
||||
includeProjects: z
|
||||
.array(
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(256)
|
||||
.refine((value) => value === "*" || GitLabProjectRegex.test(value), "Invalid project name format")
|
||||
)
|
||||
.nonempty("One or more projects required")
|
||||
.max(100, "Cannot configure more than 100 projects")
|
||||
.default(["*"])
|
||||
.describe(SecretScanningDataSources.CONFIG.GITLAB.includeProjects)
|
||||
}),
|
||||
z.object({
|
||||
scope: z.literal(GitLabDataSourceScope.Project).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
|
||||
projectName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.projectName),
|
||||
projectId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.projectId)
|
||||
})
|
||||
]);
|
||||
|
||||
export const GitLabDataSourceSchema = BaseSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const CreateGitLabDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
|
||||
type: SecretScanningDataSource.GitLab,
|
||||
isConnectionRequired: true
|
||||
})
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateGitLabDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitLab)
|
||||
.extend({
|
||||
config: GitLabDataSourceConfigSchema.optional()
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitLabDataSourceListItemSchema = z
|
||||
.object({
|
||||
name: z.literal("GitLab"),
|
||||
connection: z.literal(AppConnection.GitLab),
|
||||
type: z.literal(SecretScanningDataSource.GitLab)
|
||||
})
|
||||
.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
);
|
||||
|
||||
export const GitLabFindingSchema = BaseSecretScanningFindingSchema.extend({
|
||||
resourceType: z.literal(SecretScanningResource.Project),
|
||||
dataSourceType: z.literal(SecretScanningDataSource.GitLab),
|
||||
details: GitRepositoryScanFindingDetailsSchema
|
||||
});
|
||||
|
||||
export const GitLabDataSourceCredentialsSchema = z.object({
|
||||
token: z.string(),
|
||||
hookId: z.number()
|
||||
});
|
@@ -0,0 +1,94 @@
|
||||
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import {
|
||||
TGitLabDataSource,
|
||||
TGitLabDataSourceCredentials,
|
||||
THandleGitLabPushEvent
|
||||
} from "./gitlab-secret-scanning-types";
|
||||
|
||||
export const gitlabSecretScanningService = (
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory,
|
||||
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const handlePushEvent = async ({ payload, token, dataSourceId }: THandleGitLabPushEvent) => {
|
||||
if (!payload.total_commits_count || !payload.project) {
|
||||
logger.warn(
|
||||
`secretScanningV2PushEvent: GitLab - Insufficient data [changes=${
|
||||
payload.total_commits_count ?? 0
|
||||
}] [projectName=${payload.project?.path_with_namespace ?? "unknown"}] [projectId=${payload.project?.id ?? "unknown"}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
|
||||
id: dataSourceId,
|
||||
type: SecretScanningDataSource.GitLab
|
||||
})) as TGitLabDataSource | undefined;
|
||||
|
||||
if (!dataSource) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitLab - Could not find data source [dataSourceId=${dataSourceId}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { isAutoScanEnabled, config, encryptedCredentials, projectId } = dataSource;
|
||||
|
||||
if (!encryptedCredentials) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
|
||||
|
||||
const credentials = JSON.parse(decryptedCredentials.toString()) as TGitLabDataSourceCredentials;
|
||||
|
||||
if (token !== credentials.token) {
|
||||
logger.error(
|
||||
`secretScanningV2PushEvent: GitLab - Invalid webhook token [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isAutoScanEnabled) {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
config.scope === GitLabDataSourceScope.Project
|
||||
? config.projectId.toString() === payload.project_id.toString()
|
||||
: config.includeProjects.includes("*") || config.includeProjects.includes(payload.project.path_with_namespace)
|
||||
) {
|
||||
await secretScanningV2Queue.queueResourceDiffScan({
|
||||
dataSourceType: SecretScanningDataSource.GitLab,
|
||||
payload,
|
||||
dataSourceId: dataSource.id
|
||||
});
|
||||
} else {
|
||||
logger.info(
|
||||
`secretScanningV2PushEvent: GitLab - ignoring due to repository not being present in config [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
handlePushEvent
|
||||
};
|
||||
};
|
@@ -0,0 +1,97 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TGitLabConnection } from "@app/services/app-connection/gitlab";
|
||||
|
||||
import {
|
||||
CreateGitLabDataSourceSchema,
|
||||
GitLabDataSourceCredentialsSchema,
|
||||
GitLabDataSourceListItemSchema,
|
||||
GitLabDataSourceSchema,
|
||||
GitLabFindingSchema
|
||||
} from "./gitlab-secret-scanning-schemas";
|
||||
|
||||
export type TGitLabDataSource = z.infer<typeof GitLabDataSourceSchema>;
|
||||
|
||||
export type TGitLabDataSourceInput = z.infer<typeof CreateGitLabDataSourceSchema>;
|
||||
|
||||
export type TGitLabDataSourceListItem = z.infer<typeof GitLabDataSourceListItemSchema>;
|
||||
|
||||
export type TGitLabFinding = z.infer<typeof GitLabFindingSchema>;
|
||||
|
||||
export type TGitLabDataSourceWithConnection = TGitLabDataSource & {
|
||||
connection: TGitLabConnection;
|
||||
};
|
||||
|
||||
export type TGitLabDataSourceCredentials = z.infer<typeof GitLabDataSourceCredentialsSchema>;
|
||||
|
||||
export type TGitLabDataSourcePushEventPayload = {
|
||||
object_kind: "push";
|
||||
event_name: "push";
|
||||
before: string;
|
||||
after: string;
|
||||
ref: string;
|
||||
ref_protected: boolean;
|
||||
checkout_sha: string;
|
||||
user_id: number;
|
||||
user_name: string;
|
||||
user_username: string;
|
||||
user_email: string;
|
||||
user_avatar: string;
|
||||
project_id: number;
|
||||
project: {
|
||||
id: number;
|
||||
name: string;
|
||||
description: string;
|
||||
web_url: string;
|
||||
avatar_url: string | null;
|
||||
git_ssh_url: string;
|
||||
git_http_url: string;
|
||||
namespace: string;
|
||||
visibility_level: number;
|
||||
path_with_namespace: string;
|
||||
default_branch: string;
|
||||
homepage: string;
|
||||
url: string;
|
||||
ssh_url: string;
|
||||
http_url: string;
|
||||
};
|
||||
repository: {
|
||||
name: string;
|
||||
url: string;
|
||||
description: string;
|
||||
homepage: string;
|
||||
git_http_url: string;
|
||||
git_ssh_url: string;
|
||||
visibility_level: number;
|
||||
};
|
||||
commits: {
|
||||
id: string;
|
||||
message: string;
|
||||
title: string;
|
||||
timestamp: string;
|
||||
url: string;
|
||||
author: {
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
added: string[];
|
||||
modified: string[];
|
||||
removed: string[];
|
||||
}[];
|
||||
total_commits_count: number;
|
||||
};
|
||||
|
||||
export type THandleGitLabPushEvent = {
|
||||
payload: TGitLabDataSourcePushEventPayload;
|
||||
dataSourceId: string;
|
||||
token: string;
|
||||
};
|
||||
|
||||
export type TQueueGitLabResourceDiffScan = {
|
||||
dataSourceType: SecretScanningDataSource.GitLab;
|
||||
payload: TGitLabDataSourcePushEventPayload;
|
||||
dataSourceId: string;
|
||||
resourceId: string;
|
||||
scanId: string;
|
||||
};
|
@@ -0,0 +1,3 @@
|
||||
export * from "./gitlab-secret-scanning-constants";
|
||||
export * from "./gitlab-secret-scanning-schemas";
|
||||
export * from "./gitlab-secret-scanning-types";
|
@@ -1,6 +1,7 @@
|
||||
export enum SecretScanningDataSource {
|
||||
GitHub = "github",
|
||||
Bitbucket = "bitbucket"
|
||||
Bitbucket = "bitbucket",
|
||||
GitLab = "gitlab"
|
||||
}
|
||||
|
||||
export enum SecretScanningScanStatus {
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
|
||||
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
|
||||
import { GitLabSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory";
|
||||
|
||||
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
|
||||
import {
|
||||
@@ -19,5 +20,6 @@ type TSecretScanningFactoryImplementation = TSecretScanningFactory<
|
||||
|
||||
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
|
||||
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation,
|
||||
[SecretScanningDataSource.GitLab]: GitLabSecretScanningFactory as TSecretScanningFactoryImplementation
|
||||
};
|
||||
|
@@ -1,11 +1,22 @@
|
||||
import { AxiosError } from "axios";
|
||||
import { exec } from "child_process";
|
||||
import { join } from "path";
|
||||
import picomatch from "picomatch";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import {
|
||||
createTempFolder,
|
||||
deleteTempFolder,
|
||||
readFindingsFile,
|
||||
writeTextToFile
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
|
||||
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { titleCaseToCamelCase } from "@app/lib/fn";
|
||||
|
||||
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
|
||||
@@ -13,7 +24,8 @@ import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListIte
|
||||
|
||||
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
|
||||
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
|
||||
[SecretScanningDataSource.GitLab]: GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretScanningDataSourceOptions = () => {
|
||||
@@ -46,6 +58,19 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
|
||||
});
|
||||
}
|
||||
|
||||
export function scanFile(inputPath: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
|
||||
exec(command, (error) => {
|
||||
if (error && error.code === 77) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export const scanGitRepositoryAndGetFindings = async (
|
||||
scanPath: string,
|
||||
findingsPath: string,
|
||||
@@ -140,3 +165,47 @@ export const parseScanErrorMessage = (err: unknown): string => {
|
||||
? errorMessage
|
||||
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
|
||||
};
|
||||
|
||||
export const scanSecretPolicyViolations = async (
|
||||
projectId: string,
|
||||
secretPath: string,
|
||||
secrets: { secretKey: string; secretValue: string }[],
|
||||
ignoreValues: string[]
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (!appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENABLED) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = appCfg.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.find(
|
||||
(el) => el.projectId === projectId && picomatch.isMatch(secretPath, el.secretPath, { strictSlashes: false })
|
||||
);
|
||||
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
try {
|
||||
const scanPromises = secrets
|
||||
.filter((secret) => !ignoreValues.includes(secret.secretValue))
|
||||
.map(async (secret) => {
|
||||
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
|
||||
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
|
||||
|
||||
try {
|
||||
await scanFile(secretFilePath);
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
|
||||
name: "SecretPolicyViolation"
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(scanPromises);
|
||||
} finally {
|
||||
await deleteTempFolder(tempFolder);
|
||||
}
|
||||
};
|
||||
|
@@ -3,15 +3,18 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
|
||||
[SecretScanningDataSource.GitHub]: "GitHub",
|
||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
|
||||
[SecretScanningDataSource.Bitbucket]: "Bitbucket",
|
||||
[SecretScanningDataSource.GitLab]: "GitLab"
|
||||
};
|
||||
|
||||
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
|
||||
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
|
||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
|
||||
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket,
|
||||
[SecretScanningDataSource.GitLab]: AppConnection.GitLab
|
||||
};
|
||||
|
||||
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
|
||||
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
|
||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
|
||||
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" },
|
||||
[SecretScanningDataSource.GitLab]: { verb: "push", noun: "projects" }
|
||||
};
|
||||
|
@@ -16,6 +16,7 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
@@ -48,6 +49,7 @@ type TSecretRotationV2QueueServiceFactoryDep = {
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
|
||||
};
|
||||
@@ -62,7 +64,8 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
smtpService,
|
||||
kmsService,
|
||||
auditLogService,
|
||||
keyStore
|
||||
keyStore,
|
||||
appConnectionDAL
|
||||
}: TSecretRotationV2QueueServiceFactoryDep) => {
|
||||
const queueDataSourceFullScan = async (
|
||||
dataSource: TSecretScanningDataSourceWithConnection,
|
||||
@@ -71,7 +74,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
try {
|
||||
const { type } = dataSource;
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const rawResources = await factory.listRawResources(dataSource);
|
||||
|
||||
@@ -171,7 +177,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const findingsPath = join(tempFolder, "findings.json");
|
||||
|
||||
@@ -329,7 +338,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
dataSourceId,
|
||||
dataSourceType
|
||||
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const resourcePayload = factory.getDiffScanResourcePayload(payload);
|
||||
|
||||
@@ -391,7 +403,10 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
|
||||
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const tempFolder = await createTempFolder();
|
||||
|
||||
|
@@ -46,6 +46,7 @@ import {
|
||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
|
||||
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
|
||||
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
|
||||
@@ -53,12 +54,14 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
|
||||
import { gitlabSecretScanningService } from "./gitlab/gitlab-secret-scanning-service";
|
||||
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
|
||||
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
|
||||
|
||||
export type TSecretScanningV2ServiceFactoryDep = {
|
||||
secretScanningV2DAL: TSecretScanningV2DALFactory;
|
||||
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
secretScanningV2Queue: Pick<
|
||||
@@ -76,6 +79,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
appConnectionService,
|
||||
licenseService,
|
||||
secretScanningV2Queue,
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
}: TSecretScanningV2ServiceFactoryDep) => {
|
||||
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
|
||||
@@ -255,7 +259,10 @@ export const secretScanningV2ServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
try {
|
||||
const createdDataSource = await factory.initialize(
|
||||
@@ -363,6 +370,31 @@ export const secretScanningV2ServiceFactory = ({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connectionId) {
|
||||
// validates permission to connect and app is valid for data source
|
||||
connection = await appConnectionService.connectAppConnectionById(
|
||||
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[dataSource.type],
|
||||
dataSource.connectionId,
|
||||
actor
|
||||
);
|
||||
}
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
if (payload.config) {
|
||||
await factory.validateConfigUpdate({
|
||||
dataSource: {
|
||||
...dataSource,
|
||||
connection
|
||||
} as TSecretScanningDataSourceWithConnection,
|
||||
config: payload.config as TSecretScanningDataSourceWithConnection["config"]
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
|
||||
|
||||
@@ -416,7 +448,10 @@ export const secretScanningV2ServiceFactory = ({
|
||||
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
|
||||
});
|
||||
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
|
||||
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
let connection: TAppConnection | null = null;
|
||||
if (dataSource.connection) {
|
||||
@@ -903,6 +938,7 @@ export const secretScanningV2ServiceFactory = ({
|
||||
findSecretScanningConfigByProjectId,
|
||||
upsertSecretScanningConfig,
|
||||
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
|
||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService),
|
||||
gitlab: gitlabSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
|
||||
};
|
||||
};
|
||||
|
@@ -21,14 +21,25 @@ import {
|
||||
TGitHubFinding,
|
||||
TQueueGitHubResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/github";
|
||||
import {
|
||||
TGitLabDataSource,
|
||||
TGitLabDataSourceCredentials,
|
||||
TGitLabDataSourceInput,
|
||||
TGitLabDataSourceListItem,
|
||||
TGitLabDataSourceWithConnection,
|
||||
TGitLabFinding,
|
||||
TQueueGitLabResourceDiffScan
|
||||
} from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
|
||||
import {
|
||||
SecretScanningDataSource,
|
||||
SecretScanningFindingStatus,
|
||||
SecretScanningScanStatus
|
||||
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
|
||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
|
||||
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource | TGitLabDataSource;
|
||||
|
||||
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
|
||||
lastScannedAt?: Date | null;
|
||||
@@ -52,15 +63,25 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
|
||||
|
||||
export type TSecretScanningDataSourceWithConnection =
|
||||
| TGitHubDataSourceWithConnection
|
||||
| TBitbucketDataSourceWithConnection;
|
||||
| TBitbucketDataSourceWithConnection
|
||||
| TGitLabDataSourceWithConnection;
|
||||
|
||||
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
|
||||
export type TSecretScanningDataSourceInput =
|
||||
| TGitHubDataSourceInput
|
||||
| TBitbucketDataSourceInput
|
||||
| TGitLabDataSourceInput;
|
||||
|
||||
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
|
||||
export type TSecretScanningDataSourceListItem =
|
||||
| TGitHubDataSourceListItem
|
||||
| TBitbucketDataSourceListItem
|
||||
| TGitLabDataSourceListItem;
|
||||
|
||||
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
|
||||
export type TSecretScanningDataSourceCredentials =
|
||||
| TBitbucketDataSourceCredentials
|
||||
| TGitLabDataSourceCredentials
|
||||
| undefined;
|
||||
|
||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
|
||||
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding | TGitLabFinding;
|
||||
|
||||
export type TListSecretScanningDataSourcesByProjectId = {
|
||||
projectId: string;
|
||||
@@ -112,7 +133,10 @@ export type TQueueSecretScanningDataSourceFullScan = {
|
||||
scanId: string;
|
||||
};
|
||||
|
||||
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
|
||||
export type TQueueSecretScanningResourceDiffScan =
|
||||
| TQueueGitHubResourceDiffScan
|
||||
| TQueueBitbucketResourceDiffScan
|
||||
| TQueueGitLabResourceDiffScan;
|
||||
|
||||
export type TQueueSecretScanningSendNotification = {
|
||||
dataSource: TSecretScanningDataSources;
|
||||
@@ -170,6 +194,11 @@ export type TSecretScanningFactoryInitialize<
|
||||
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
|
||||
) => Promise<TSecretScanningDataSourceRaw>;
|
||||
|
||||
export type TSecretScanningFactoryValidateConfigUpdate<
|
||||
C extends TSecretScanningDataSourceInput["config"],
|
||||
T extends TSecretScanningDataSourceWithConnection
|
||||
> = (params: { config: C; dataSource: T }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactoryPostInitialization<
|
||||
P extends TSecretScanningDataSourceInput,
|
||||
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
|
||||
@@ -181,17 +210,23 @@ export type TSecretScanningFactoryTeardown<
|
||||
C extends TSecretScanningDataSourceCredentials = undefined
|
||||
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
|
||||
|
||||
export type TSecretScanningFactoryParams = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSecretScanningFactory<
|
||||
T extends TSecretScanningDataSourceWithConnection,
|
||||
P extends TQueueSecretScanningResourceDiffScan["payload"],
|
||||
I extends TSecretScanningDataSourceInput,
|
||||
C extends TSecretScanningDataSourceCredentials | undefined = undefined
|
||||
> = () => {
|
||||
> = (params: TSecretScanningFactoryParams) => {
|
||||
listRawResources: TSecretScanningFactoryListRawResources<T>;
|
||||
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
|
||||
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
|
||||
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
|
||||
teardown: TSecretScanningFactoryTeardown<T, C>;
|
||||
validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<I["config"], T>;
|
||||
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
|
||||
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
|
||||
};
|
||||
|
@@ -2,10 +2,12 @@ import { z } from "zod";
|
||||
|
||||
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
|
||||
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
|
||||
import { GitLabDataSourceSchema, GitLabFindingSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
|
||||
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
|
||||
GitHubDataSourceSchema,
|
||||
BitbucketDataSourceSchema
|
||||
BitbucketDataSourceSchema,
|
||||
GitLabDataSourceSchema
|
||||
]);
|
||||
|
||||
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
|
||||
@@ -18,5 +20,10 @@ export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType"
|
||||
JSON.stringify({
|
||||
title: "Bitbucket"
|
||||
})
|
||||
),
|
||||
GitLabFindingSchema.describe(
|
||||
JSON.stringify({
|
||||
title: "GitLab"
|
||||
})
|
||||
)
|
||||
]);
|
||||
|
@@ -664,6 +664,10 @@ export const ORGANIZATIONS = {
|
||||
organizationId: "The ID of the organization to delete the membership from.",
|
||||
membershipId: "The ID of the membership to delete."
|
||||
},
|
||||
BULK_DELETE_USER_MEMBERSHIPS: {
|
||||
organizationId: "The ID of the organization to delete the memberships from.",
|
||||
membershipIds: "The IDs of the memberships to delete."
|
||||
},
|
||||
LIST_IDENTITY_MEMBERSHIPS: {
|
||||
orgId: "The ID of the organization to get identity memberships from.",
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
|
||||
@@ -704,7 +708,8 @@ export const PROJECTS = {
|
||||
hasDeleteProtection: "Enable or disable delete protection for the project.",
|
||||
secretSharing: "Enable or disable secret sharing for the project.",
|
||||
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project.",
|
||||
defaultProduct: "The default product in which the project will open"
|
||||
defaultProduct: "The default product in which the project will open",
|
||||
secretDetectionIgnoreValues: "The list of secret values to ignore for secret detection."
|
||||
},
|
||||
GET_KEY: {
|
||||
workspaceId: "The ID of the project to get the key from."
|
||||
@@ -2252,7 +2257,9 @@ export const AppConnections = {
|
||||
AZURE_DEVOPS: {
|
||||
code: "The OAuth code to use to connect with Azure DevOps.",
|
||||
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
|
||||
orgName: "The Organization name to use to connect with Azure DevOps."
|
||||
orgName: "The Organization name to use to connect with Azure DevOps.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
OCI: {
|
||||
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
|
||||
@@ -2399,12 +2406,18 @@ export const SecretSyncs = {
|
||||
env: "The name of the GitHub environment."
|
||||
},
|
||||
AZURE_KEY_VAULT: {
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
|
||||
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/",
|
||||
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
AZURE_APP_CONFIGURATION: {
|
||||
configurationUrl:
|
||||
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration."
|
||||
label: "An optional label to assign to secrets created in Azure App Configuration.",
|
||||
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
|
||||
clientId: "The Client ID to use to connect with Azure Client Secrets.",
|
||||
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
|
||||
},
|
||||
AZURE_DEVOPS: {
|
||||
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
|
||||
@@ -2701,6 +2714,14 @@ export const SecretScanningDataSources = {
|
||||
GITHUB: {
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
},
|
||||
GITLAB: {
|
||||
includeProjects: 'The projects to include when scanning. Defaults to all projects (["*"]).',
|
||||
scope: "The GitLab scope scanning should occur at (project or group level).",
|
||||
projectId: "The ID of the project to scan.",
|
||||
projectName: "The name of the project to scan.",
|
||||
groupId: "The ID of the group to scan projects from.",
|
||||
groupName: "The name of the group to scan projects from."
|
||||
},
|
||||
BITBUCKET: {
|
||||
workspaceSlug: "The workspace to scan.",
|
||||
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
|
||||
|
@@ -204,6 +204,17 @@ const envSchema = z
|
||||
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
|
||||
|
||||
// Special Detection Feature
|
||||
PARAMS_FOLDER_SECRET_DETECTION_PATHS: zpStr(
|
||||
z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => {
|
||||
if (!val) return undefined;
|
||||
return JSON.parse(val) as { secretPath: string; projectId: string }[];
|
||||
})
|
||||
),
|
||||
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
@@ -358,6 +369,7 @@ const envSchema = z
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
|
||||
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
|
||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(","),
|
||||
PARAMS_FOLDER_SECRET_DETECTION_ENABLED: (data.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.length ?? 0) > 0,
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID:
|
||||
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
|
||||
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET:
|
||||
@@ -484,7 +496,7 @@ export const overwriteSchema: {
|
||||
]
|
||||
},
|
||||
azureAppConfiguration: {
|
||||
name: "Azure App Configuration",
|
||||
name: "Azure App Connection: App Configuration",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
|
||||
@@ -497,7 +509,7 @@ export const overwriteSchema: {
|
||||
]
|
||||
},
|
||||
azureKeyVault: {
|
||||
name: "Azure Key Vault",
|
||||
name: "Azure App Connection: Key Vault",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
|
||||
@@ -510,7 +522,7 @@ export const overwriteSchema: {
|
||||
]
|
||||
},
|
||||
azureClientSecrets: {
|
||||
name: "Azure Client Secrets",
|
||||
name: "Azure App Connection: Client Secrets",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
|
||||
@@ -523,7 +535,7 @@ export const overwriteSchema: {
|
||||
]
|
||||
},
|
||||
azureDevOps: {
|
||||
name: "Azure DevOps",
|
||||
name: "Azure App Connection: DevOps",
|
||||
fields: [
|
||||
{
|
||||
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",
|
||||
|
@@ -11,3 +11,5 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
|
||||
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
|
||||
|
||||
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
|
||||
|
||||
export const GitLabProjectRegex = new RE2(/^[a-zA-Z0-9._-]+(?:\/[a-zA-Z0-9._-]+)+$/);
|
||||
|
@@ -14,6 +14,11 @@ export const blockLocalAndPrivateIpAddresses = async (url: string) => {
|
||||
if (appCfg.isDevelopmentMode) return;
|
||||
|
||||
const validUrl = new URL(url);
|
||||
|
||||
if (validUrl.username || validUrl.password) {
|
||||
throw new BadRequestError({ message: "URLs with user credentials (e.g., user:pass@) are not allowed" });
|
||||
}
|
||||
|
||||
const inputHostIps: string[] = [];
|
||||
if (isIPv4(validUrl.hostname)) {
|
||||
inputHostIps.push(validUrl.hostname);
|
||||
|
@@ -4,6 +4,8 @@ import { Probot } from "probot";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
|
||||
import { TGitLabDataSourcePushEventPayload } from "@app/ee/services/secret-scanning-v2/gitlab";
|
||||
import { GitLabWebHookEvent } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
@@ -113,4 +115,36 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
|
||||
// gitlab push event webhook
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/gitlab",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const event = req.headers["x-gitlab-event"] as GitLabWebHookEvent;
|
||||
const token = req.headers["x-gitlab-token"] as string;
|
||||
const dataSourceId = req.headers["x-data-source-id"] as string;
|
||||
|
||||
if (event !== GitLabWebHookEvent.Push) {
|
||||
return res.status(400).send({ message: `Event type not supported: ${event as string}` });
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).send({ message: "Unauthorized: Missing token" });
|
||||
}
|
||||
|
||||
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID header is required" });
|
||||
|
||||
await server.services.secretScanningV2.gitlab.handlePushEvent({
|
||||
dataSourceId,
|
||||
payload: req.body as TGitLabDataSourcePushEventPayload,
|
||||
token
|
||||
});
|
||||
|
||||
return res.send("ok");
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -1044,6 +1044,15 @@ export const registerRoutes = async (
|
||||
kmsService
|
||||
});
|
||||
|
||||
const gatewayService = gatewayServiceFactory({
|
||||
permissionService,
|
||||
gatewayDAL,
|
||||
kmsService,
|
||||
licenseService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const secretSyncQueue = secretSyncQueueFactory({
|
||||
queueService,
|
||||
secretSyncDAL,
|
||||
@@ -1067,7 +1076,8 @@ export const registerRoutes = async (
|
||||
secretVersionTagV2BridgeDAL,
|
||||
resourceMetadataDAL,
|
||||
appConnectionDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
gatewayService
|
||||
});
|
||||
|
||||
const secretQueueService = secretQueueFactory({
|
||||
@@ -1238,6 +1248,7 @@ export const registerRoutes = async (
|
||||
|
||||
const secretV2BridgeService = secretV2BridgeServiceFactory({
|
||||
folderDAL,
|
||||
projectDAL,
|
||||
secretVersionDAL: secretVersionV2BridgeDAL,
|
||||
folderCommitService,
|
||||
secretQueueService,
|
||||
@@ -1489,15 +1500,6 @@ export const registerRoutes = async (
|
||||
licenseService
|
||||
});
|
||||
|
||||
const gatewayService = gatewayServiceFactory({
|
||||
permissionService,
|
||||
gatewayDAL,
|
||||
kmsService,
|
||||
licenseService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore
|
||||
});
|
||||
|
||||
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
|
||||
identityKubernetesAuthDAL,
|
||||
identityOrgMembershipDAL,
|
||||
@@ -1931,7 +1933,8 @@ export const registerRoutes = async (
|
||||
projectMembershipDAL,
|
||||
smtpService,
|
||||
kmsService,
|
||||
keyStore
|
||||
keyStore,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
const secretScanningV2Service = secretScanningV2ServiceFactory({
|
||||
@@ -1940,7 +1943,8 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
secretScanningV2DAL,
|
||||
secretScanningV2Queue,
|
||||
kmsService
|
||||
kmsService,
|
||||
appConnectionDAL
|
||||
});
|
||||
|
||||
// setup the communication with license key server
|
||||
|
@@ -271,7 +271,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
auditLogsRetentionDays: true,
|
||||
hasDeleteProtection: true,
|
||||
secretSharing: true,
|
||||
showSnapshotsLegacy: true
|
||||
showSnapshotsLegacy: true,
|
||||
secretDetectionIgnoreValues: true
|
||||
});
|
||||
|
||||
export const SanitizedTagSchema = SecretTagsSchema.pick({
|
||||
|
@@ -52,7 +52,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
|
||||
defaultAuthOrgAuthMethod: z.string().nullish(),
|
||||
isSecretScanningDisabled: z.boolean(),
|
||||
kubernetesAutoFetchServiceAccountToken: z.boolean()
|
||||
kubernetesAutoFetchServiceAccountToken: z.boolean(),
|
||||
paramsFolderSecretDetectionEnabled: z.boolean()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -67,7 +68,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
fipsEnabled: crypto.isFipsModeEnabled(),
|
||||
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
|
||||
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
|
||||
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
|
||||
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -685,6 +687,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
body: z.object({
|
||||
email: z.string().email().trim().min(1),
|
||||
password: z.string().trim().min(1),
|
||||
|
@@ -369,7 +369,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECTS.UPDATE.slug),
|
||||
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
|
||||
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy),
|
||||
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct)
|
||||
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct),
|
||||
secretDetectionIgnoreValues: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.secretDetectionIgnoreValues)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -392,7 +396,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
hasDeleteProtection: req.body.hasDeleteProtection,
|
||||
slug: req.body.slug,
|
||||
secretSharing: req.body.secretSharing,
|
||||
showSnapshotsLegacy: req.body.showSnapshotsLegacy
|
||||
showSnapshotsLegacy: req.body.showSnapshotsLegacy,
|
||||
secretDetectionIgnoreValues: req.body.secretDetectionIgnoreValues
|
||||
},
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
|
@@ -264,6 +264,48 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:organizationId/memberships",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
hide: false,
|
||||
tags: [ApiDocsTags.Organizations],
|
||||
description: "Bulk delete organization user memberships",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
organizationId: z.string().trim().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.organizationId)
|
||||
}),
|
||||
body: z.object({
|
||||
membershipIds: z.string().trim().array().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.membershipIds)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
memberships: OrgMembershipsSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
if (req.auth.actor !== ActorType.USER) return;
|
||||
|
||||
const memberships = await server.services.org.bulkDeleteOrgMemberships({
|
||||
userId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
orgId: req.params.organizationId,
|
||||
membershipIds: req.body.membershipIds,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
return { memberships };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
// TODO: re-think endpoint structure in future so users only need to pass in membershipId bc organizationId is redundant
|
||||
method: "GET",
|
||||
|
@@ -583,7 +583,7 @@ export const appConnectionServiceFactory = ({
|
||||
deleteAppConnection,
|
||||
connectAppConnectionById,
|
||||
listAvailableAppConnectionsForUser,
|
||||
github: githubConnectionService(connectAppConnectionById),
|
||||
github: githubConnectionService(connectAppConnectionById, gatewayService),
|
||||
githubRadar: githubRadarConnectionService(connectAppConnectionById),
|
||||
gcp: gcpConnectionService(connectAppConnectionById),
|
||||
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
|
||||
|
@@ -1,3 +1,4 @@
|
||||
export enum AzureAppConfigurationConnectionMethod {
|
||||
OAuth = "oauth"
|
||||
OAuth = "oauth",
|
||||
ClientSecret = "client-secret"
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
import { AxiosError, AxiosResponse } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -19,7 +20,10 @@ export const getAzureAppConfigurationConnectionListItem = () => {
|
||||
return {
|
||||
name: "Azure App Configuration" as const,
|
||||
app: AppConnection.AzureAppConfiguration as const,
|
||||
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
|
||||
methods: Object.values(AzureAppConfigurationConnectionMethod) as [
|
||||
AzureAppConfigurationConnectionMethod.OAuth,
|
||||
AzureAppConfigurationConnectionMethod.ClientSecret
|
||||
],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
|
||||
};
|
||||
};
|
||||
@@ -35,71 +39,111 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
|
||||
SITE_URL
|
||||
} = getConfig();
|
||||
|
||||
if (
|
||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
|
||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
|
||||
) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: inputCredentials.code,
|
||||
scope: `openid offline_access https://azconfig.io/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case AzureAppConfigurationConnectionMethod.OAuth:
|
||||
if (
|
||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
|
||||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
|
||||
) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: oauthCredentials.code,
|
||||
scope: `openid offline_access https://azconfig.io/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tenantId: inputCredentials.tenantId,
|
||||
tenantId: oauthCredentials.tenantId,
|
||||
accessToken: tokenResp.data.access_token,
|
||||
refreshToken: tokenResp.data.refresh_token,
|
||||
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
||||
};
|
||||
|
||||
case AzureAppConfigurationConnectionMethod.ClientSecret:
|
||||
const { tenantId, clientId, clientSecret } = inputCredentials as {
|
||||
tenantId: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
};
|
||||
|
||||
try {
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://azconfig.io/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
tenantId,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: Date.now() + clientData.expires_in * 1000,
|
||||
clientId,
|
||||
clientSecret
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure connection method: ${method as AzureAppConfigurationConnectionMethod}`
|
||||
message: `Unhandled Azure App Configuration connection method: ${method as AzureAppConfigurationConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -22,6 +22,29 @@ export const AzureAppConfigurationConnectionOAuthOutputCredentialsSchema = z.obj
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const AzureAppConfigurationConnectionClientSecretInputCredentialsSchema = z.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Client ID required")
|
||||
.max(50, "Client ID must be at most 50 characters long"),
|
||||
clientSecret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Client Secret required")
|
||||
.max(50, "Client Secret must be at most 50 characters long"),
|
||||
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
|
||||
});
|
||||
|
||||
export const AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string(),
|
||||
tenantId: z.string(),
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
@@ -30,6 +53,14 @@ export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discri
|
||||
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureAppConfigurationConnectionMethod.ClientSecret)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureAppConfiguration).method),
|
||||
credentials: AzureAppConfigurationConnectionClientSecretInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -39,9 +70,13 @@ export const CreateAzureAppConfigurationConnectionSchema = ValidateAzureAppConfi
|
||||
|
||||
export const UpdateAzureAppConfigurationConnectionSchema = z
|
||||
.object({
|
||||
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials
|
||||
)
|
||||
credentials: z
|
||||
.union([
|
||||
AzureAppConfigurationConnectionOAuthInputCredentialsSchema,
|
||||
AzureAppConfigurationConnectionClientSecretInputCredentialsSchema
|
||||
])
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureAppConfiguration));
|
||||
|
||||
@@ -55,6 +90,10 @@ export const AzureAppConfigurationConnectionSchema = z.intersection(
|
||||
z.object({
|
||||
method: z.literal(AzureAppConfigurationConnectionMethod.OAuth),
|
||||
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
||||
}),
|
||||
z.object({
|
||||
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
|
||||
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
@@ -65,6 +104,13 @@ export const SanitizedAzureAppConfigurationConnectionSchema = z.discriminatedUni
|
||||
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema.pick({
|
||||
tenantId: true
|
||||
})
|
||||
}),
|
||||
BaseAzureAppConfigurationConnectionSchema.extend({
|
||||
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
|
||||
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
tenantId: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema,
|
||||
AzureAppConfigurationConnectionOAuthOutputCredentialsSchema,
|
||||
AzureAppConfigurationConnectionSchema,
|
||||
CreateAzureAppConfigurationConnectionSchema,
|
||||
@@ -39,3 +40,7 @@ export type ExchangeCodeAzureResponse = {
|
||||
export type TAzureAppConfigurationConnectionCredentials = z.infer<
|
||||
typeof AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
|
||||
>;
|
||||
|
||||
export type TAzureAppConfigurationConnectionClientSecretCredentials = z.infer<
|
||||
typeof AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
|
||||
>;
|
||||
|
@@ -1,4 +1,5 @@
|
||||
export enum AzureDevOpsConnectionMethod {
|
||||
OAuth = "oauth",
|
||||
AccessToken = "access-token"
|
||||
AccessToken = "access-token",
|
||||
ClientSecret = "client-secret"
|
||||
}
|
||||
|
@@ -18,6 +18,7 @@ import { AppConnection } from "../app-connection-enums";
|
||||
import { AzureDevOpsConnectionMethod } from "./azure-devops-enums";
|
||||
import {
|
||||
ExchangeCodeAzureResponse,
|
||||
TAzureDevOpsConnectionClientSecretCredentials,
|
||||
TAzureDevOpsConnectionConfig,
|
||||
TAzureDevOpsConnectionCredentials
|
||||
} from "./azure-devops-types";
|
||||
@@ -30,7 +31,8 @@ export const getAzureDevopsConnectionListItem = () => {
|
||||
app: AppConnection.AzureDevOps as const,
|
||||
methods: Object.values(AzureDevOpsConnectionMethod) as [
|
||||
AzureDevOpsConnectionMethod.OAuth,
|
||||
AzureDevOpsConnectionMethod.AccessToken
|
||||
AzureDevOpsConnectionMethod.AccessToken,
|
||||
AzureDevOpsConnectionMethod.ClientSecret
|
||||
],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
|
||||
};
|
||||
@@ -53,11 +55,7 @@ export const getAzureDevopsConnection = async (
|
||||
});
|
||||
}
|
||||
|
||||
const credentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureDevOpsConnectionCredentials;
|
||||
const currentTime = Date.now();
|
||||
|
||||
// Handle different connection methods
|
||||
switch (appConnection.method) {
|
||||
@@ -69,12 +67,17 @@ export const getAzureDevopsConnection = async (
|
||||
});
|
||||
}
|
||||
|
||||
if (!("refreshToken" in credentials)) {
|
||||
const oauthCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureDevOpsConnectionCredentials;
|
||||
|
||||
if (!("refreshToken" in oauthCredentials)) {
|
||||
throw new BadRequestError({ message: "Invalid OAuth credentials" });
|
||||
}
|
||||
|
||||
const { refreshToken, tenantId } = credentials;
|
||||
const currentTime = Date.now();
|
||||
const { refreshToken, tenantId } = oauthCredentials;
|
||||
|
||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
@@ -87,29 +90,75 @@ export const getAzureDevopsConnection = async (
|
||||
})
|
||||
);
|
||||
|
||||
const updatedCredentials = {
|
||||
...credentials,
|
||||
const updatedOAuthCredentials = {
|
||||
...oauthCredentials,
|
||||
accessToken: data.access_token,
|
||||
expiresAt: currentTime + data.expires_in * 1000,
|
||||
refreshToken: data.refresh_token
|
||||
};
|
||||
|
||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedCredentials,
|
||||
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedOAuthCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
|
||||
|
||||
return data.access_token;
|
||||
|
||||
case AzureDevOpsConnectionMethod.AccessToken:
|
||||
if (!("accessToken" in credentials)) {
|
||||
const accessTokenCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as { accessToken: string };
|
||||
|
||||
if (!("accessToken" in accessTokenCredentials)) {
|
||||
throw new BadRequestError({ message: "Invalid API token credentials" });
|
||||
}
|
||||
// For access token, return the basic auth token directly
|
||||
return credentials.accessToken;
|
||||
return accessTokenCredentials.accessToken;
|
||||
|
||||
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||
const clientSecretCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureDevOpsConnectionClientSecretCredentials;
|
||||
|
||||
const { accessToken, expiresAt, clientId, clientSecret, tenantId: clientTenantId } = clientSecretCredentials;
|
||||
|
||||
// Check if token is still valid (with 5 minute buffer)
|
||||
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", clientTenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
const updatedClientCredentials = {
|
||||
...clientSecretCredentials,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: currentTime + clientData.expires_in * 1000
|
||||
};
|
||||
|
||||
const encryptedClientCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedClientCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
|
||||
|
||||
return clientData.access_token;
|
||||
|
||||
default:
|
||||
throw new BadRequestError({ message: `Unsupported connection method` });
|
||||
@@ -138,7 +187,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
let tokenError: AxiosError | null = null;
|
||||
|
||||
try {
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId: string };
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId: string; orgName: string };
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
@@ -262,9 +311,67 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
|
||||
});
|
||||
}
|
||||
|
||||
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||
const { tenantId, clientId, clientSecret, orgName } = inputCredentials as {
|
||||
tenantId: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
orgName: string;
|
||||
};
|
||||
|
||||
try {
|
||||
// First, get the access token using client credentials flow
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://app.vssps.visualstudio.com/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
// Validate access to the specific organization
|
||||
const response = await request.get(
|
||||
`${IntegrationUrls.AZURE_DEVOPS_API_URL}/${encodeURIComponent(orgName)}/_apis/projects?api-version=7.2-preview.2&$top=1`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${clientData.access_token}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate connection to organization '${orgName}': ${response.status}`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tenantId,
|
||||
clientId,
|
||||
clientSecret,
|
||||
orgName,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: Date.now() + clientData.expires_in * 1000
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to authenticate with Azure DevOps using client credentials: ${
|
||||
(e?.response?.data as { error_description?: string })?.error_description || e.message
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to validate Azure DevOps client credentials"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure connection method: ${method as AzureDevOpsConnectionMethod}`
|
||||
message: `Unhandled Azure DevOps connection method: ${method as AzureDevOpsConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -38,6 +38,42 @@ export const AzureDevOpsConnectionAccessTokenOutputCredentialsSchema = z.object(
|
||||
orgName: z.string()
|
||||
});
|
||||
|
||||
export const AzureDevOpsConnectionClientSecretInputCredentialsSchema = z.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Client ID required")
|
||||
.max(50, "Client ID must be at most 50 characters long")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientId),
|
||||
clientSecret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Client Secret required")
|
||||
.max(50, "Client Secret must be at most 50 characters long")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientSecret),
|
||||
tenantId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Tenant ID required")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.tenantId),
|
||||
orgName: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Organization name required")
|
||||
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.orgName)
|
||||
});
|
||||
|
||||
export const AzureDevOpsConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string(),
|
||||
tenantId: z.string(),
|
||||
orgName: z.string(),
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
@@ -54,6 +90,14 @@ export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUni
|
||||
credentials: AzureDevOpsConnectionAccessTokenInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureDevOpsConnectionMethod.ClientSecret)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureDevOps).method),
|
||||
credentials: AzureDevOpsConnectionClientSecretInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -64,7 +108,11 @@ export const CreateAzureDevOpsConnectionSchema = ValidateAzureDevOpsConnectionCr
|
||||
export const UpdateAzureDevOpsConnectionSchema = z
|
||||
.object({
|
||||
credentials: z
|
||||
.union([AzureDevOpsConnectionOAuthInputCredentialsSchema, AzureDevOpsConnectionAccessTokenInputCredentialsSchema])
|
||||
.union([
|
||||
AzureDevOpsConnectionOAuthInputCredentialsSchema,
|
||||
AzureDevOpsConnectionAccessTokenInputCredentialsSchema,
|
||||
AzureDevOpsConnectionClientSecretInputCredentialsSchema
|
||||
])
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.AzureDevOps).credentials)
|
||||
})
|
||||
@@ -84,6 +132,10 @@ export const AzureDevOpsConnectionSchema = z.intersection(
|
||||
z.object({
|
||||
method: z.literal(AzureDevOpsConnectionMethod.AccessToken),
|
||||
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema
|
||||
}),
|
||||
z.object({
|
||||
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
|
||||
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
@@ -101,6 +153,14 @@ export const SanitizedAzureDevOpsConnectionSchema = z.discriminatedUnion("method
|
||||
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema.pick({
|
||||
orgName: true
|
||||
})
|
||||
}),
|
||||
BaseAzureDevOpsConnectionSchema.extend({
|
||||
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
|
||||
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
tenantId: true,
|
||||
orgName: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
|
@@ -52,6 +52,11 @@ const getAuthHeaders = (appConnection: TAzureDevOpsConnection, accessToken: stri
|
||||
Authorization: `Basic ${basicAuthToken}`,
|
||||
Accept: "application/json"
|
||||
};
|
||||
case AzureDevOpsConnectionMethod.ClientSecret:
|
||||
return {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: "application/json"
|
||||
};
|
||||
default:
|
||||
throw new BadRequestError({ message: "Unsupported connection method" });
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureDevOpsConnectionClientSecretOutputCredentialsSchema,
|
||||
AzureDevOpsConnectionOAuthOutputCredentialsSchema,
|
||||
AzureDevOpsConnectionSchema,
|
||||
CreateAzureDevOpsConnectionSchema,
|
||||
@@ -27,6 +28,10 @@ export type TAzureDevOpsConnectionConfig = DiscriminativePick<
|
||||
|
||||
export type TAzureDevOpsConnectionCredentials = z.infer<typeof AzureDevOpsConnectionOAuthOutputCredentialsSchema>;
|
||||
|
||||
export type TAzureDevOpsConnectionClientSecretCredentials = z.infer<
|
||||
typeof AzureDevOpsConnectionClientSecretOutputCredentialsSchema
|
||||
>;
|
||||
|
||||
export interface ExchangeCodeAzureResponse {
|
||||
token_type: string;
|
||||
scope: string;
|
||||
|
@@ -1,3 +1,4 @@
|
||||
export enum AzureKeyVaultConnectionMethod {
|
||||
OAuth = "oauth"
|
||||
OAuth = "oauth",
|
||||
ClientSecret = "client-secret"
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
import { AxiosError, AxiosResponse } from "axios";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -16,25 +17,16 @@ import { AppConnection } from "../app-connection-enums";
|
||||
import { AzureKeyVaultConnectionMethod } from "./azure-key-vault-connection-enums";
|
||||
import {
|
||||
ExchangeCodeAzureResponse,
|
||||
TAzureKeyVaultConnectionClientSecretCredentials,
|
||||
TAzureKeyVaultConnectionConfig,
|
||||
TAzureKeyVaultConnectionCredentials
|
||||
} from "./azure-key-vault-connection-types";
|
||||
|
||||
export const getAzureConnectionAccessToken = async (
|
||||
connectionId: string,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
if (
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `Azure environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
const appConnection = await appConnectionDAL.findById(connectionId);
|
||||
|
||||
if (!appConnection) {
|
||||
@@ -49,49 +41,101 @@ export const getAzureConnectionAccessToken = async (
|
||||
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not a valid Azure connection` });
|
||||
}
|
||||
|
||||
const credentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureKeyVaultConnectionCredentials;
|
||||
const currentTime = Date.now();
|
||||
|
||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
scope: `openid offline_access`,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||
refresh_token: credentials.refreshToken
|
||||
})
|
||||
);
|
||||
switch (appConnection.method) {
|
||||
case AzureKeyVaultConnectionMethod.OAuth:
|
||||
const appCfg = getConfig();
|
||||
if (
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
|
||||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: `Azure environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
const accessExpiresAt = new Date();
|
||||
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
|
||||
const oauthCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureKeyVaultConnectionCredentials;
|
||||
|
||||
const updatedCredentials = {
|
||||
...credentials,
|
||||
accessToken: data.access_token,
|
||||
expiresAt: accessExpiresAt.getTime(),
|
||||
refreshToken: data.refresh_token
|
||||
};
|
||||
const { data } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "refresh_token",
|
||||
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||
refresh_token: oauthCredentials.refreshToken
|
||||
})
|
||||
);
|
||||
|
||||
const encryptedCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
const updatedOAuthCredentials = {
|
||||
...oauthCredentials,
|
||||
accessToken: data.access_token,
|
||||
expiresAt: currentTime + data.expires_in * 1000,
|
||||
refreshToken: data.refresh_token
|
||||
};
|
||||
|
||||
await appConnectionDAL.update(
|
||||
{ id: connectionId },
|
||||
{
|
||||
encryptedCredentials
|
||||
}
|
||||
);
|
||||
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedOAuthCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
return {
|
||||
accessToken: data.access_token
|
||||
};
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
|
||||
|
||||
return {
|
||||
accessToken: data.access_token
|
||||
};
|
||||
|
||||
case AzureKeyVaultConnectionMethod.ClientSecret:
|
||||
const clientSecretCredentials = (await decryptAppConnectionCredentials({
|
||||
orgId: appConnection.orgId,
|
||||
kmsService,
|
||||
encryptedCredentials: appConnection.encryptedCredentials
|
||||
})) as TAzureKeyVaultConnectionClientSecretCredentials;
|
||||
|
||||
const { accessToken, expiresAt, clientId, clientSecret, tenantId } = clientSecretCredentials;
|
||||
|
||||
// Check if token is still valid (with 5 minute buffer)
|
||||
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
|
||||
return { accessToken };
|
||||
}
|
||||
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://vault.azure.net/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
const updatedClientCredentials = {
|
||||
...clientSecretCredentials,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: currentTime + clientData.expires_in * 1000
|
||||
};
|
||||
|
||||
const encryptedClientCredentials = await encryptAppConnectionCredentials({
|
||||
credentials: updatedClientCredentials,
|
||||
orgId: appConnection.orgId,
|
||||
kmsService
|
||||
});
|
||||
|
||||
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
|
||||
|
||||
return { accessToken: clientData.access_token };
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure Key Vault connection method: ${appConnection.method as AzureKeyVaultConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getAzureKeyVaultConnectionListItem = () => {
|
||||
@@ -100,7 +144,10 @@ export const getAzureKeyVaultConnectionListItem = () => {
|
||||
return {
|
||||
name: "Azure Key Vault" as const,
|
||||
app: AppConnection.AzureKeyVault as const,
|
||||
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
|
||||
methods: Object.values(AzureKeyVaultConnectionMethod) as [
|
||||
AzureKeyVaultConnectionMethod.OAuth,
|
||||
AzureKeyVaultConnectionMethod.ClientSecret
|
||||
],
|
||||
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
|
||||
};
|
||||
};
|
||||
@@ -111,68 +158,108 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
|
||||
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
|
||||
getConfig();
|
||||
|
||||
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: inputCredentials.code,
|
||||
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case AzureKeyVaultConnectionMethod.OAuth:
|
||||
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
|
||||
throw new InternalServerError({
|
||||
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
|
||||
let tokenError: AxiosError | null = null;
|
||||
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
|
||||
try {
|
||||
tokenResp = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "authorization_code",
|
||||
code: oauthCredentials.code,
|
||||
scope: `openid offline_access https://vault.azure.net/.default`,
|
||||
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
|
||||
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
|
||||
})
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
tokenError = e;
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenError) {
|
||||
if (tokenError instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp) {
|
||||
throw new InternalServerError({
|
||||
message: `Failed to get access token: Token was empty with no error`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tenantId: inputCredentials.tenantId,
|
||||
tenantId: oauthCredentials.tenantId,
|
||||
accessToken: tokenResp.data.access_token,
|
||||
refreshToken: tokenResp.data.refresh_token,
|
||||
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
|
||||
};
|
||||
|
||||
case AzureKeyVaultConnectionMethod.ClientSecret:
|
||||
const { tenantId, clientId, clientSecret } = inputCredentials as {
|
||||
tenantId: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
};
|
||||
|
||||
try {
|
||||
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
|
||||
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
|
||||
new URLSearchParams({
|
||||
grant_type: "client_credentials",
|
||||
scope: `https://vault.azure.net/.default`,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
tenantId,
|
||||
accessToken: clientData.access_token,
|
||||
expiresAt: Date.now() + clientData.expires_in * 1000,
|
||||
clientId,
|
||||
clientSecret
|
||||
};
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to get access token: ${
|
||||
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
|
||||
}`
|
||||
});
|
||||
} else {
|
||||
throw new InternalServerError({
|
||||
message: "Failed to get access token"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled Azure connection method: ${method as AzureKeyVaultConnectionMethod}`
|
||||
message: `Unhandled Azure Key Vault connection method: ${method as AzureKeyVaultConnectionMethod}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
@@ -22,6 +22,29 @@ export const AzureKeyVaultConnectionOAuthOutputCredentialsSchema = z.object({
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const AzureKeyVaultConnectionClientSecretInputCredentialsSchema = z.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.uuid()
|
||||
.trim()
|
||||
.min(1, "Client ID required")
|
||||
.max(50, "Client ID must be at most 50 characters long"),
|
||||
clientSecret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Client Secret required")
|
||||
.max(50, "Client Secret must be at most 50 characters long"),
|
||||
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
|
||||
});
|
||||
|
||||
export const AzureKeyVaultConnectionClientSecretOutputCredentialsSchema = z.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string(),
|
||||
tenantId: z.string(),
|
||||
accessToken: z.string(),
|
||||
expiresAt: z.number()
|
||||
});
|
||||
|
||||
export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z
|
||||
@@ -30,6 +53,14 @@ export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedU
|
||||
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
method: z
|
||||
.literal(AzureKeyVaultConnectionMethod.ClientSecret)
|
||||
.describe(AppConnections.CREATE(AppConnection.AzureKeyVault).method),
|
||||
credentials: AzureKeyVaultConnectionClientSecretInputCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@@ -39,9 +70,13 @@ export const CreateAzureKeyVaultConnectionSchema = ValidateAzureKeyVaultConnecti
|
||||
|
||||
export const UpdateAzureKeyVaultConnectionSchema = z
|
||||
.object({
|
||||
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials
|
||||
)
|
||||
credentials: z
|
||||
.union([
|
||||
AzureKeyVaultConnectionOAuthInputCredentialsSchema,
|
||||
AzureKeyVaultConnectionClientSecretInputCredentialsSchema
|
||||
])
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureKeyVault));
|
||||
|
||||
@@ -55,6 +90,10 @@ export const AzureKeyVaultConnectionSchema = z.intersection(
|
||||
z.object({
|
||||
method: z.literal(AzureKeyVaultConnectionMethod.OAuth),
|
||||
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema
|
||||
}),
|
||||
z.object({
|
||||
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
|
||||
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
|
||||
})
|
||||
])
|
||||
);
|
||||
@@ -65,6 +104,13 @@ export const SanitizedAzureKeyVaultConnectionSchema = z.discriminatedUnion("meth
|
||||
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema.pick({
|
||||
tenantId: true
|
||||
})
|
||||
}),
|
||||
BaseAzureKeyVaultConnectionSchema.extend({
|
||||
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
|
||||
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema.pick({
|
||||
clientId: true,
|
||||
tenantId: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
|
@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
AzureKeyVaultConnectionClientSecretOutputCredentialsSchema,
|
||||
AzureKeyVaultConnectionOAuthOutputCredentialsSchema,
|
||||
AzureKeyVaultConnectionSchema,
|
||||
CreateAzureKeyVaultConnectionSchema,
|
||||
@@ -36,3 +37,7 @@ export type ExchangeCodeAzureResponse = {
|
||||
};
|
||||
|
||||
export type TAzureKeyVaultConnectionCredentials = z.infer<typeof AzureKeyVaultConnectionOAuthOutputCredentialsSchema>;
|
||||
|
||||
export type TAzureKeyVaultConnectionClientSecretCredentials = z.infer<
|
||||
typeof AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
|
||||
>;
|
||||
|
@@ -1,10 +1,16 @@
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { AxiosResponse } from "axios";
|
||||
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
|
||||
import https from "https";
|
||||
import RE2 from "re2";
|
||||
|
||||
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { request as httpRequest } from "@app/lib/config/request";
|
||||
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
@@ -24,123 +30,224 @@ export const getGitHubConnectionListItem = () => {
|
||||
};
|
||||
};
|
||||
|
||||
export const getGitHubClient = (appConnection: TGitHubConnection) => {
|
||||
export const requestWithGitHubGateway = async <T>(
|
||||
appConnection: { gatewayId?: string | null },
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
requestConfig: AxiosRequestConfig
|
||||
): Promise<AxiosResponse<T>> => {
|
||||
const { gatewayId } = appConnection;
|
||||
|
||||
// If gateway isn't set up, don't proxy request
|
||||
if (!gatewayId) {
|
||||
return httpRequest.request(requestConfig);
|
||||
}
|
||||
|
||||
const url = new URL(requestConfig.url as string);
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(url.toString());
|
||||
|
||||
const [targetHost] = await verifyHostInputValidity(url.host, true);
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
return withGatewayProxy(
|
||||
async (proxyPort) => {
|
||||
const httpsAgent = new https.Agent({
|
||||
servername: targetHost
|
||||
});
|
||||
|
||||
url.protocol = "https:";
|
||||
url.host = `localhost:${proxyPort}`;
|
||||
|
||||
const finalRequestConfig: AxiosRequestConfig = {
|
||||
...requestConfig,
|
||||
url: url.toString(),
|
||||
httpsAgent,
|
||||
headers: {
|
||||
...requestConfig.headers,
|
||||
Host: targetHost
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
return await httpRequest.request(finalRequestConfig);
|
||||
} catch (error) {
|
||||
const axiosError = error as AxiosError;
|
||||
logger.error("Error during GitHub gateway request:", axiosError.message, axiosError.response?.data);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost,
|
||||
targetPort: 443,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const { method, credentials } = appConnection;
|
||||
|
||||
let client: Octokit;
|
||||
|
||||
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
|
||||
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
|
||||
|
||||
switch (method) {
|
||||
case GitHubConnectionMethod.App:
|
||||
if (!appId || !appPrivateKey) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub ${getAppConnectionMethodName(method).replace("GitHub", "")} has not been configured`
|
||||
});
|
||||
}
|
||||
|
||||
client = new Octokit({
|
||||
authStrategy: createAppAuth,
|
||||
auth: {
|
||||
appId,
|
||||
privateKey: appPrivateKey,
|
||||
installationId: credentials.installationId
|
||||
}
|
||||
});
|
||||
break;
|
||||
case GitHubConnectionMethod.OAuth:
|
||||
client = new Octokit({
|
||||
auth: credentials.accessToken
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new InternalServerError({
|
||||
message: `Unhandled GitHub connection method: ${method as GitHubConnectionMethod}`
|
||||
});
|
||||
if (!appId || !appPrivateKey) {
|
||||
throw new InternalServerError({
|
||||
message: `GitHub App keys are not configured.`
|
||||
});
|
||||
}
|
||||
|
||||
return client;
|
||||
if (appConnection.method !== GitHubConnectionMethod.App) {
|
||||
throw new InternalServerError({ message: "Cannot generate GitHub App token for non-app connection" });
|
||||
}
|
||||
|
||||
const appAuth = createAppAuth({
|
||||
appId,
|
||||
privateKey: appPrivateKey,
|
||||
installationId: appConnection.credentials.installationId
|
||||
});
|
||||
|
||||
const { token } = await appAuth({ type: "installation" });
|
||||
return token;
|
||||
};
|
||||
|
||||
function extractNextPageUrl(linkHeader: string | undefined): string | null {
|
||||
if (!linkHeader) return null;
|
||||
|
||||
const links = linkHeader.split(",");
|
||||
const nextLink = links.find((link) => link.includes('rel="next"'));
|
||||
|
||||
if (!nextLink) return null;
|
||||
|
||||
const match = new RE2(/<([^>]+)>/).exec(nextLink);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
export const makePaginatedGitHubRequest = async <T, R = T[]>(
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
path: string,
|
||||
dataMapper?: (data: R) => T[]
|
||||
): Promise<T[]> => {
|
||||
const { credentials, method } = appConnection;
|
||||
|
||||
const token =
|
||||
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
|
||||
let url: string | null = `https://api.${credentials.host || "github.com"}${path}`;
|
||||
let results: T[] = [];
|
||||
let i = 0;
|
||||
|
||||
while (url && i < 1000) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const response: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
|
||||
url,
|
||||
method: "GET",
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${token}`,
|
||||
"X-GitHub-Api-Version": "2022-11-28"
|
||||
}
|
||||
});
|
||||
|
||||
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
|
||||
results = results.concat(items);
|
||||
|
||||
url = extractNextPageUrl(response.headers.link as string | undefined);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return results;
|
||||
};
|
||||
|
||||
type GitHubOrganization = {
|
||||
login: string;
|
||||
id: number;
|
||||
type: string;
|
||||
};
|
||||
|
||||
type GitHubRepository = {
|
||||
id: number;
|
||||
name: string;
|
||||
owner: GitHubOrganization;
|
||||
permissions?: {
|
||||
admin: boolean;
|
||||
maintain: boolean;
|
||||
push: boolean;
|
||||
triage: boolean;
|
||||
pull: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export const getGitHubRepositories = async (appConnection: TGitHubConnection) => {
|
||||
const client = getGitHubClient(appConnection);
|
||||
type GitHubEnvironment = {
|
||||
id: number;
|
||||
name: string;
|
||||
};
|
||||
|
||||
let repositories: GitHubRepository[];
|
||||
|
||||
switch (appConnection.method) {
|
||||
case GitHubConnectionMethod.App:
|
||||
repositories = await client.paginate("GET /installation/repositories");
|
||||
break;
|
||||
case GitHubConnectionMethod.OAuth:
|
||||
default:
|
||||
repositories = (await client.paginate("GET /user/repos")).filter((repo) => repo.permissions?.admin);
|
||||
break;
|
||||
export const getGitHubRepositories = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
if (appConnection.method === GitHubConnectionMethod.App) {
|
||||
return makePaginatedGitHubRequest<GitHubRepository, { repositories: GitHubRepository[] }>(
|
||||
appConnection,
|
||||
gatewayService,
|
||||
"/installation/repositories",
|
||||
(data) => data.repositories
|
||||
);
|
||||
}
|
||||
|
||||
return repositories;
|
||||
const repos = await makePaginatedGitHubRequest<GitHubRepository>(appConnection, gatewayService, "/user/repos");
|
||||
return repos.filter((repo) => repo.permissions?.admin);
|
||||
};
|
||||
|
||||
export const getGitHubOrganizations = async (appConnection: TGitHubConnection) => {
|
||||
const client = getGitHubClient(appConnection);
|
||||
export const getGitHubOrganizations = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
if (appConnection.method === GitHubConnectionMethod.App) {
|
||||
const installationRepositories = await makePaginatedGitHubRequest<
|
||||
GitHubRepository,
|
||||
{ repositories: GitHubRepository[] }
|
||||
>(appConnection, gatewayService, "/installation/repositories", (data) => data.repositories);
|
||||
|
||||
let organizations: GitHubOrganization[];
|
||||
|
||||
switch (appConnection.method) {
|
||||
case GitHubConnectionMethod.App: {
|
||||
const installationRepositories = await client.paginate("GET /installation/repositories");
|
||||
|
||||
const organizationMap: Record<string, GitHubOrganization> = {};
|
||||
|
||||
installationRepositories.forEach((repo) => {
|
||||
if (repo.owner.type === "Organization") {
|
||||
organizationMap[repo.owner.id] = repo.owner;
|
||||
}
|
||||
});
|
||||
|
||||
organizations = Object.values(organizationMap);
|
||||
|
||||
break;
|
||||
}
|
||||
case GitHubConnectionMethod.OAuth:
|
||||
default:
|
||||
organizations = await client.paginate("GET /user/orgs");
|
||||
break;
|
||||
}
|
||||
|
||||
return organizations;
|
||||
};
|
||||
|
||||
export const getGitHubEnvironments = async (appConnection: TGitHubConnection, owner: string, repo: string) => {
|
||||
const client = getGitHubClient(appConnection);
|
||||
|
||||
try {
|
||||
const environments = await client.paginate("GET /repos/{owner}/{repo}/environments", {
|
||||
owner,
|
||||
repo
|
||||
const organizationMap: Record<string, GitHubOrganization> = {};
|
||||
installationRepositories.forEach((repo) => {
|
||||
if (repo.owner.type === "Organization") {
|
||||
organizationMap[repo.owner.id] = repo.owner;
|
||||
}
|
||||
});
|
||||
|
||||
return environments;
|
||||
} catch (e) {
|
||||
// repo doesn't have envs
|
||||
if ((e as { status: number }).status === 404) {
|
||||
return [];
|
||||
}
|
||||
return Object.values(organizationMap);
|
||||
}
|
||||
|
||||
throw e;
|
||||
return makePaginatedGitHubRequest<GitHubOrganization>(appConnection, gatewayService, "/user/orgs");
|
||||
};
|
||||
|
||||
export const getGitHubEnvironments = async (
|
||||
appConnection: TGitHubConnection,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
owner: string,
|
||||
repo: string
|
||||
) => {
|
||||
try {
|
||||
return await makePaginatedGitHubRequest<GitHubEnvironment, { environments: GitHubEnvironment[] }>(
|
||||
appConnection,
|
||||
gatewayService,
|
||||
`/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/environments`,
|
||||
(data) => data.environments
|
||||
);
|
||||
} catch (error) {
|
||||
const axiosError = error as AxiosError;
|
||||
if (axiosError.response?.status === 404) return [];
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -159,9 +266,11 @@ export function isGithubErrorResponse(data: GithubTokenRespData): data is Github
|
||||
return "error" in data;
|
||||
}
|
||||
|
||||
export const validateGitHubConnectionCredentials = async (config: TGitHubConnectionConfig) => {
|
||||
export const validateGitHubConnectionCredentials = async (
|
||||
config: TGitHubConnectionConfig,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const { credentials, method } = config;
|
||||
|
||||
const {
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID,
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET,
|
||||
@@ -192,10 +301,13 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<GithubTokenRespData>;
|
||||
const host = credentials.host || "github.com";
|
||||
|
||||
try {
|
||||
tokenResp = await request.get<GithubTokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
params: {
|
||||
tokenResp = await requestWithGitHubGateway<GithubTokenRespData>(config, gatewayService, {
|
||||
url: `https://${host}/login/oauth/access_token`,
|
||||
method: "POST",
|
||||
data: {
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret,
|
||||
code: credentials.code,
|
||||
@@ -203,7 +315,7 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
},
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
"Accept-Encoding": "application/json"
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
@@ -233,7 +345,7 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
const installationsResp = await request.get<{
|
||||
const installationsResp = await requestWithGitHubGateway<{
|
||||
installations: {
|
||||
id: number;
|
||||
account: {
|
||||
@@ -242,7 +354,8 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
id: number;
|
||||
};
|
||||
}[];
|
||||
}>(IntegrationUrls.GITHUB_USER_INSTALLATIONS, {
|
||||
}>(config, gatewayService, {
|
||||
url: IntegrationUrls.GITHUB_USER_INSTALLATIONS.replace("api.github.com", `api.${host}`),
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${tokenResp.data.access_token}`,
|
||||
|
@@ -11,20 +11,24 @@ import {
|
||||
import { GitHubConnectionMethod } from "./github-connection-enums";
|
||||
|
||||
export const GitHubConnectionOAuthInputCredentialsSchema = z.object({
|
||||
code: z.string().trim().min(1, "OAuth code required")
|
||||
code: z.string().trim().min(1, "OAuth code required"),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const GitHubConnectionAppInputCredentialsSchema = z.object({
|
||||
code: z.string().trim().min(1, "GitHub App code required"),
|
||||
installationId: z.string().min(1, "GitHub App Installation ID required")
|
||||
installationId: z.string().min(1, "GitHub App Installation ID required"),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const GitHubConnectionOAuthOutputCredentialsSchema = z.object({
|
||||
accessToken: z.string()
|
||||
accessToken: z.string(),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const GitHubConnectionAppOutputCredentialsSchema = z.object({
|
||||
installationId: z.string()
|
||||
installationId: z.string(),
|
||||
host: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
@@ -43,7 +47,9 @@ export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("m
|
||||
]);
|
||||
|
||||
export const CreateGitHubConnectionSchema = ValidateGitHubConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHub)
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.GitHub, {
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateGitHubConnectionSchema = z
|
||||
@@ -53,7 +59,11 @@ export const UpdateGitHubConnectionSchema = z
|
||||
.optional()
|
||||
.describe(AppConnections.UPDATE(AppConnection.GitHub).credentials)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHub));
|
||||
.and(
|
||||
GenericUpdateAppConnectionFieldsSchema(AppConnection.GitHub, {
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
const BaseGitHubConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.GitHub) });
|
||||
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
@@ -19,11 +20,14 @@ type TListGitHubEnvironmentsDTO = {
|
||||
owner: string;
|
||||
};
|
||||
|
||||
export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
export const githubConnectionService = (
|
||||
getAppConnection: TGetAppConnectionFunc,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const listRepositories = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
|
||||
|
||||
const repositories = await getGitHubRepositories(appConnection);
|
||||
const repositories = await getGitHubRepositories(appConnection, gatewayService);
|
||||
|
||||
return repositories;
|
||||
};
|
||||
@@ -31,7 +35,7 @@ export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc)
|
||||
const listOrganizations = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
|
||||
|
||||
const organizations = await getGitHubOrganizations(appConnection);
|
||||
const organizations = await getGitHubOrganizations(appConnection, gatewayService);
|
||||
|
||||
return organizations;
|
||||
};
|
||||
@@ -42,7 +46,7 @@ export const githubConnectionService = (getAppConnection: TGetAppConnectionFunc)
|
||||
) => {
|
||||
const appConnection = await getAppConnection(AppConnection.GitHub, connectionId, actor);
|
||||
|
||||
const environments = await getGitHubEnvironments(appConnection, owner, repo);
|
||||
const environments = await getGitHubEnvironments(appConnection, gatewayService, owner, repo);
|
||||
|
||||
return environments;
|
||||
};
|
||||
|
@@ -17,4 +17,7 @@ export type TGitHubConnectionInput = z.infer<typeof CreateGitHubConnectionSchema
|
||||
|
||||
export type TValidateGitHubConnectionCredentialsSchema = typeof ValidateGitHubConnectionCredentialsSchema;
|
||||
|
||||
export type TGitHubConnectionConfig = DiscriminativePick<TGitHubConnectionInput, "method" | "app" | "credentials">;
|
||||
export type TGitHubConnectionConfig = DiscriminativePick<
|
||||
TGitHubConnectionInput,
|
||||
"method" | "app" | "credentials" | "gatewayId"
|
||||
>;
|
||||
|
@@ -222,6 +222,37 @@ export const validateGitLabConnectionCredentials = async (config: TGitLabConnect
|
||||
return inputCredentials;
|
||||
};
|
||||
|
||||
export const getGitLabConnectionClient = async (
|
||||
appConnection: TGitLabConnection,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
) => {
|
||||
let { accessToken } = appConnection.credentials;
|
||||
|
||||
if (
|
||||
appConnection.method === GitLabConnectionMethod.OAuth &&
|
||||
appConnection.credentials.refreshToken &&
|
||||
new Date(appConnection.credentials.expiresAt) < new Date()
|
||||
) {
|
||||
accessToken = await refreshGitLabToken(
|
||||
appConnection.credentials.refreshToken,
|
||||
appConnection.id,
|
||||
appConnection.orgId,
|
||||
appConnectionDAL,
|
||||
kmsService,
|
||||
appConnection.credentials.instanceUrl
|
||||
);
|
||||
}
|
||||
|
||||
const client = await getGitLabClient(
|
||||
accessToken,
|
||||
appConnection.credentials.instanceUrl,
|
||||
appConnection.method === GitLabConnectionMethod.OAuth
|
||||
);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
export const listGitLabProjects = async ({
|
||||
appConnection,
|
||||
appConnectionDAL,
|
||||
|
@@ -513,6 +513,21 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const deleteMembershipsById = async (ids: string[], orgId: string, tx?: Knex) => {
|
||||
try {
|
||||
const memberships = await (tx || db)(TableName.OrgMembership)
|
||||
.where({
|
||||
orgId
|
||||
})
|
||||
.whereIn("id", ids)
|
||||
.delete()
|
||||
.returning("*");
|
||||
return memberships;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Delete org memberships" });
|
||||
}
|
||||
};
|
||||
|
||||
const findMembership = async (
|
||||
filter: TFindFilter<TOrgMemberships>,
|
||||
{ offset, limit, sort, tx }: TFindOpt<TOrgMemberships> = {}
|
||||
@@ -634,6 +649,7 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
createMembership,
|
||||
updateMembershipById,
|
||||
deleteMembershipById,
|
||||
deleteMembershipsById,
|
||||
updateMembership
|
||||
});
|
||||
};
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
@@ -14,6 +15,19 @@ type TDeleteOrgMembership = {
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "delete">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "updateSubscriptionOrgMemberCount">;
|
||||
projectUserAdditionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
|
||||
userId?: string;
|
||||
};
|
||||
|
||||
type TDeleteOrgMemberships = {
|
||||
orgMembershipIds: string[];
|
||||
orgId: string;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "deleteMembershipsById" | "transaction">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "delete" | "findProjectMembershipsByUserIds">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "delete">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "updateSubscriptionOrgMemberCount">;
|
||||
projectUserAdditionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
|
||||
userId?: string;
|
||||
};
|
||||
|
||||
export const deleteOrgMembershipFn = async ({
|
||||
@@ -24,11 +38,17 @@ export const deleteOrgMembershipFn = async ({
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
userId
|
||||
}: TDeleteOrgMembership) => {
|
||||
const deletedMembership = await orgDAL.transaction(async (tx) => {
|
||||
const orgMembership = await orgDAL.deleteMembershipById(orgMembershipId, orgId, tx);
|
||||
|
||||
if (userId && orgMembership.userId === userId) {
|
||||
// scott: this is temporary, we will add a leave org endpoint with proper handling to ensure org isn't abandoned/broken
|
||||
throw new BadRequestError({ message: "You cannot remove yourself from an organization" });
|
||||
}
|
||||
|
||||
if (!orgMembership.userId) {
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMembership;
|
||||
@@ -86,3 +106,88 @@ export const deleteOrgMembershipFn = async ({
|
||||
|
||||
return deletedMembership;
|
||||
};
|
||||
|
||||
export const deleteOrgMembershipsFn = async ({
|
||||
orgMembershipIds,
|
||||
orgId,
|
||||
orgDAL,
|
||||
projectMembershipDAL,
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService,
|
||||
userId
|
||||
}: TDeleteOrgMemberships) => {
|
||||
const deletedMemberships = await orgDAL.transaction(async (tx) => {
|
||||
const orgMemberships = await orgDAL.deleteMembershipsById(orgMembershipIds, orgId, tx);
|
||||
|
||||
const membershipUserIds = orgMemberships
|
||||
.filter((member) => Boolean(member.userId))
|
||||
.map((member) => member.userId) as string[];
|
||||
|
||||
if (userId && membershipUserIds.includes(userId)) {
|
||||
// scott: this is temporary, we will add a leave org endpoint with proper handling to ensure org isn't abandoned/broken
|
||||
throw new BadRequestError({ message: "You cannot remove yourself from an organization" });
|
||||
}
|
||||
|
||||
if (!membershipUserIds.length) {
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMemberships;
|
||||
}
|
||||
|
||||
await userAliasDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
userId: membershipUserIds
|
||||
},
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await projectUserAdditionalPrivilegeDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
userId: membershipUserIds
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// Get all the project memberships of the users in the organization
|
||||
const projectMemberships = await projectMembershipDAL.findProjectMembershipsByUserIds(orgId, membershipUserIds);
|
||||
|
||||
// Delete all the project memberships of the users in the organization
|
||||
await projectMembershipDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectMemberships.map((membership) => membership.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// Get all the project keys of the user in the organization
|
||||
const projectKeys = await projectKeyDAL.find({
|
||||
$in: {
|
||||
projectId: projectMemberships.map((membership) => membership.projectId),
|
||||
receiverId: membershipUserIds
|
||||
}
|
||||
});
|
||||
|
||||
// Delete all the project keys of the user in the organization
|
||||
await projectKeyDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectKeys.map((key) => key.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMemberships;
|
||||
});
|
||||
|
||||
return deletedMemberships;
|
||||
};
|
||||
|
@@ -75,10 +75,11 @@ import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TIncidentContactsDALFactory } from "./incident-contacts-dal";
|
||||
import { TOrgBotDALFactory } from "./org-bot-dal";
|
||||
import { TOrgDALFactory } from "./org-dal";
|
||||
import { deleteOrgMembershipFn } from "./org-fns";
|
||||
import { deleteOrgMembershipFn, deleteOrgMembershipsFn } from "./org-fns";
|
||||
import { TOrgRoleDALFactory } from "./org-role-dal";
|
||||
import {
|
||||
TDeleteOrgMembershipDTO,
|
||||
TDeleteOrgMembershipsDTO,
|
||||
TFindAllWorkspacesDTO,
|
||||
TFindOrgMembersByEmailDTO,
|
||||
TGetOrgGroupsDTO,
|
||||
@@ -106,7 +107,13 @@ type TOrgServiceFactoryDep = {
|
||||
identityMetadataDAL: Pick<TIdentityMetadataDALFactory, "delete" | "insertMany" | "transaction">;
|
||||
projectMembershipDAL: Pick<
|
||||
TProjectMembershipDALFactory,
|
||||
"findProjectMembershipsByUserId" | "delete" | "create" | "find" | "insertMany" | "transaction"
|
||||
| "findProjectMembershipsByUserId"
|
||||
| "delete"
|
||||
| "create"
|
||||
| "find"
|
||||
| "insertMany"
|
||||
| "transaction"
|
||||
| "findProjectMembershipsByUserIds"
|
||||
>;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">;
|
||||
orgMembershipDAL: Pick<
|
||||
@@ -1369,12 +1376,42 @@ export const orgServiceFactory = ({
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
userId
|
||||
});
|
||||
|
||||
return deletedMembership;
|
||||
};
|
||||
|
||||
const bulkDeleteOrgMemberships = async ({
|
||||
orgId,
|
||||
userId,
|
||||
membershipIds,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TDeleteOrgMembershipsDTO) => {
|
||||
const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Member);
|
||||
|
||||
if (membershipIds.includes(userId)) {
|
||||
throw new BadRequestError({ message: "You cannot delete your own organization membership" });
|
||||
}
|
||||
|
||||
const deletedMemberships = await deleteOrgMembershipsFn({
|
||||
orgMembershipIds: membershipIds,
|
||||
orgId,
|
||||
orgDAL,
|
||||
projectMembershipDAL,
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService,
|
||||
userId
|
||||
});
|
||||
|
||||
return deletedMemberships;
|
||||
};
|
||||
|
||||
const listProjectMembershipsByOrgMembershipId = async ({
|
||||
orgMembershipId,
|
||||
orgId,
|
||||
@@ -1528,6 +1565,7 @@ export const orgServiceFactory = ({
|
||||
findOrgBySlug,
|
||||
resendOrgMemberInvitation,
|
||||
upgradePrivilegeSystem,
|
||||
notifyInvitedUsers
|
||||
notifyInvitedUsers,
|
||||
bulkDeleteOrgMemberships
|
||||
};
|
||||
};
|
||||
|
@@ -25,6 +25,14 @@ export type TDeleteOrgMembershipDTO = {
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
};
|
||||
|
||||
export type TDeleteOrgMembershipsDTO = {
|
||||
userId: string;
|
||||
orgId: string;
|
||||
membershipIds: string[];
|
||||
actorOrgId: string | undefined;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
};
|
||||
|
||||
export type TInviteUserToOrgDTO = {
|
||||
inviteeEmails: string[];
|
||||
organizationRoleSlug: string;
|
||||
|
@@ -314,11 +314,122 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findProjectMembershipsByUserIds = async (orgId: string, userIds: string[]) => {
|
||||
try {
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.ProjectMembership)
|
||||
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
|
||||
.whereIn(`${TableName.Users}.id`, userIds)
|
||||
.where(`${TableName.Project}.orgId`, orgId)
|
||||
.join<TUserEncryptionKeys>(
|
||||
TableName.UserEncryptionKey,
|
||||
`${TableName.UserEncryptionKey}.userId`,
|
||||
`${TableName.Users}.id`
|
||||
)
|
||||
.join(
|
||||
TableName.ProjectUserMembershipRole,
|
||||
`${TableName.ProjectUserMembershipRole}.projectMembershipId`,
|
||||
`${TableName.ProjectMembership}.id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.ProjectRoles,
|
||||
`${TableName.ProjectUserMembershipRole}.customRoleId`,
|
||||
`${TableName.ProjectRoles}.id`
|
||||
)
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.ProjectMembership),
|
||||
db.ref("isGhost").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("publicKey").withSchema(TableName.UserEncryptionKey),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("id").withSchema(TableName.Users).as("userId"),
|
||||
db.ref("role").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("id").withSchema(TableName.ProjectUserMembershipRole).as("membershipRoleId"),
|
||||
db.ref("customRoleId").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("name").withSchema(TableName.ProjectRoles).as("customRoleName"),
|
||||
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
|
||||
db.ref("temporaryMode").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("isTemporary").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("temporaryAccessStartTime").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole),
|
||||
db.ref("name").as("projectName").withSchema(TableName.Project),
|
||||
db.ref("id").as("projectId").withSchema(TableName.Project),
|
||||
db.ref("type").as("projectType").withSchema(TableName.Project)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
const members = sqlNestRelationships({
|
||||
data: docs,
|
||||
parentMapper: ({
|
||||
email,
|
||||
firstName,
|
||||
username,
|
||||
lastName,
|
||||
publicKey,
|
||||
isGhost,
|
||||
id,
|
||||
projectId,
|
||||
projectName,
|
||||
projectType,
|
||||
userId
|
||||
}) => ({
|
||||
id,
|
||||
userId,
|
||||
projectId,
|
||||
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
|
||||
project: {
|
||||
id: projectId,
|
||||
name: projectName,
|
||||
type: projectType
|
||||
}
|
||||
}),
|
||||
key: "id",
|
||||
childrenMapper: [
|
||||
{
|
||||
label: "roles" as const,
|
||||
key: "membershipRoleId",
|
||||
mapper: ({
|
||||
role,
|
||||
customRoleId,
|
||||
customRoleName,
|
||||
customRoleSlug,
|
||||
membershipRoleId,
|
||||
temporaryRange,
|
||||
temporaryMode,
|
||||
temporaryAccessEndTime,
|
||||
temporaryAccessStartTime,
|
||||
isTemporary
|
||||
}) => ({
|
||||
id: membershipRoleId,
|
||||
role,
|
||||
customRoleId,
|
||||
customRoleName,
|
||||
customRoleSlug,
|
||||
temporaryRange,
|
||||
temporaryMode,
|
||||
temporaryAccessEndTime,
|
||||
temporaryAccessStartTime,
|
||||
isTemporary
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
return members;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find project memberships by user ids" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...projectMemberOrm,
|
||||
findAllProjectMembers,
|
||||
findProjectGhostUser,
|
||||
findMembershipsByUsername,
|
||||
findProjectMembershipsByUserId
|
||||
findProjectMembershipsByUserId,
|
||||
findProjectMembershipsByUserIds
|
||||
};
|
||||
};
|
||||
|
@@ -645,7 +645,7 @@ export const projectServiceFactory = ({
|
||||
const updateProject = async ({ actor, actorId, actorOrgId, actorAuthMethod, update, filter }: TUpdateProjectDTO) => {
|
||||
const project = await projectDAL.findProjectByFilter(filter);
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
const { permission, hasRole } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: project.id,
|
||||
@@ -667,6 +667,12 @@ export const projectServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
if (update.secretDetectionIgnoreValues && !hasRole(ProjectMembershipRole.Admin)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Only admins can update secret detection ignore values"
|
||||
});
|
||||
}
|
||||
|
||||
const updatedProject = await projectDAL.updateById(project.id, {
|
||||
name: update.name,
|
||||
description: update.description,
|
||||
@@ -676,7 +682,8 @@ export const projectServiceFactory = ({
|
||||
slug: update.slug,
|
||||
secretSharing: update.secretSharing,
|
||||
defaultProduct: update.defaultProduct,
|
||||
showSnapshotsLegacy: update.showSnapshotsLegacy
|
||||
showSnapshotsLegacy: update.showSnapshotsLegacy,
|
||||
secretDetectionIgnoreValues: update.secretDetectionIgnoreValues
|
||||
});
|
||||
|
||||
return updatedProject;
|
||||
|
@@ -96,6 +96,7 @@ export type TUpdateProjectDTO = {
|
||||
slug?: string;
|
||||
secretSharing?: boolean;
|
||||
showSnapshotsLegacy?: boolean;
|
||||
secretDetectionIgnoreValues?: string[];
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
@@ -13,7 +13,7 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
|
||||
import { TAzureAppConfigurationSyncWithCredentials } from "./azure-app-configuration-sync-types";
|
||||
|
||||
type TAzureAppConfigurationSyncFactoryDeps = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
|
@@ -12,7 +12,7 @@ import { SecretSyncError } from "../secret-sync-errors";
|
||||
import { GetAzureKeyVaultSecret, TAzureKeyVaultSyncWithCredentials } from "./azure-key-vault-sync-types";
|
||||
|
||||
type TAzureKeyVaultSyncFactoryDeps = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
|
@@ -1,7 +1,12 @@
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import sodium from "libsodium-wrappers";
|
||||
|
||||
import { getGitHubClient } from "@app/services/app-connection/github";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import {
|
||||
getGitHubAppAuthToken,
|
||||
GitHubConnectionMethod,
|
||||
makePaginatedGitHubRequest,
|
||||
requestWithGitHubGateway
|
||||
} from "@app/services/app-connection/github";
|
||||
import { GitHubSyncScope, GitHubSyncVisibility } from "@app/services/secret-sync/github/github-sync-enums";
|
||||
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
|
||||
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
|
||||
@@ -12,155 +17,165 @@ import { TGitHubPublicKey, TGitHubSecret, TGitHubSecretPayload, TGitHubSyncWithC
|
||||
|
||||
// TODO: rate limit handling
|
||||
|
||||
const getEncryptedSecrets = async (client: Octokit, secretSync: TGitHubSyncWithCredentials) => {
|
||||
let encryptedSecrets: TGitHubSecret[];
|
||||
|
||||
const { destinationConfig } = secretSync;
|
||||
const getEncryptedSecrets = async (
|
||||
secretSync: TGitHubSyncWithCredentials,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const { destinationConfig, connection } = secretSync;
|
||||
|
||||
let path: string;
|
||||
switch (destinationConfig.scope) {
|
||||
case GitHubSyncScope.Organization: {
|
||||
encryptedSecrets = await client.paginate("GET /orgs/{org}/actions/secrets", {
|
||||
org: destinationConfig.org
|
||||
});
|
||||
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets`;
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.Repository: {
|
||||
encryptedSecrets = await client.paginate("GET /repos/{owner}/{repo}/actions/secrets", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo
|
||||
});
|
||||
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets`;
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.RepositoryEnvironment:
|
||||
default: {
|
||||
encryptedSecrets = await client.paginate("GET /repos/{owner}/{repo}/environments/{environment_name}/secrets", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo,
|
||||
environment_name: destinationConfig.env
|
||||
});
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return encryptedSecrets;
|
||||
return makePaginatedGitHubRequest<TGitHubSecret, { secrets: TGitHubSecret[] }>(
|
||||
connection,
|
||||
gatewayService,
|
||||
path,
|
||||
(data) => data.secrets
|
||||
);
|
||||
};
|
||||
|
||||
const getPublicKey = async (client: Octokit, secretSync: TGitHubSyncWithCredentials) => {
|
||||
let publicKey: TGitHubPublicKey;
|
||||
|
||||
const { destinationConfig } = secretSync;
|
||||
const getPublicKey = async (
|
||||
secretSync: TGitHubSyncWithCredentials,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
token: string
|
||||
) => {
|
||||
const { destinationConfig, connection } = secretSync;
|
||||
|
||||
let path: string;
|
||||
switch (destinationConfig.scope) {
|
||||
case GitHubSyncScope.Organization: {
|
||||
publicKey = (
|
||||
await client.request("GET /orgs/{org}/actions/secrets/public-key", {
|
||||
org: destinationConfig.org
|
||||
})
|
||||
).data;
|
||||
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets/public-key`;
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.Repository: {
|
||||
publicKey = (
|
||||
await client.request("GET /repos/{owner}/{repo}/actions/secrets/public-key", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo
|
||||
})
|
||||
).data;
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets/public-key`;
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.RepositoryEnvironment:
|
||||
default: {
|
||||
publicKey = (
|
||||
await client.request("GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/public-key", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo,
|
||||
environment_name: destinationConfig.env
|
||||
})
|
||||
).data;
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets/public-key`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return publicKey;
|
||||
const response = await requestWithGitHubGateway<TGitHubPublicKey>(connection, gatewayService, {
|
||||
url: `https://api.${connection.credentials.host || "github.com"}${path}`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${token}`,
|
||||
"X-GitHub-Api-Version": "2022-11-28"
|
||||
}
|
||||
});
|
||||
|
||||
return response.data;
|
||||
};
|
||||
|
||||
const deleteSecret = async (
|
||||
client: Octokit,
|
||||
secretSync: TGitHubSyncWithCredentials,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
token: string,
|
||||
encryptedSecret: TGitHubSecret
|
||||
) => {
|
||||
const { destinationConfig } = secretSync;
|
||||
const { destinationConfig, connection } = secretSync;
|
||||
|
||||
let path: string;
|
||||
switch (destinationConfig.scope) {
|
||||
case GitHubSyncScope.Organization: {
|
||||
await client.request(`DELETE /orgs/{org}/actions/secrets/{secret_name}`, {
|
||||
org: destinationConfig.org,
|
||||
secret_name: encryptedSecret.name
|
||||
});
|
||||
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets/${encodeURIComponent(encryptedSecret.name)}`;
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.Repository: {
|
||||
await client.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo,
|
||||
secret_name: encryptedSecret.name
|
||||
});
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets/${encodeURIComponent(encryptedSecret.name)}`;
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.RepositoryEnvironment:
|
||||
default: {
|
||||
await client.request("DELETE /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo,
|
||||
environment_name: destinationConfig.env,
|
||||
secret_name: encryptedSecret.name
|
||||
});
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets/${encodeURIComponent(encryptedSecret.name)}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
await requestWithGitHubGateway(connection, gatewayService, {
|
||||
url: `https://api.${connection.credentials.host || "github.com"}${path}`,
|
||||
method: "DELETE",
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${token}`,
|
||||
"X-GitHub-Api-Version": "2022-11-28"
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const putSecret = async (client: Octokit, secretSync: TGitHubSyncWithCredentials, payload: TGitHubSecretPayload) => {
|
||||
const { destinationConfig } = secretSync;
|
||||
const putSecret = async (
|
||||
secretSync: TGitHubSyncWithCredentials,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
token: string,
|
||||
payload: TGitHubSecretPayload
|
||||
) => {
|
||||
const { destinationConfig, connection } = secretSync;
|
||||
|
||||
let path: string;
|
||||
let body: Record<string, string | number[]> = payload;
|
||||
|
||||
switch (destinationConfig.scope) {
|
||||
case GitHubSyncScope.Organization: {
|
||||
const { visibility, selectedRepositoryIds } = destinationConfig;
|
||||
|
||||
await client.request(`PUT /orgs/{org}/actions/secrets/{secret_name}`, {
|
||||
org: destinationConfig.org,
|
||||
path = `/orgs/${encodeURIComponent(destinationConfig.org)}/actions/secrets/${encodeURIComponent(payload.secret_name)}`;
|
||||
body = {
|
||||
...payload,
|
||||
visibility,
|
||||
...(visibility === GitHubSyncVisibility.Selected && {
|
||||
selected_repository_ids: selectedRepositoryIds
|
||||
})
|
||||
});
|
||||
};
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.Repository: {
|
||||
await client.request("PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo,
|
||||
...payload
|
||||
});
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/actions/secrets/${encodeURIComponent(payload.secret_name)}`;
|
||||
break;
|
||||
}
|
||||
case GitHubSyncScope.RepositoryEnvironment:
|
||||
default: {
|
||||
await client.request("PUT /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}", {
|
||||
owner: destinationConfig.owner,
|
||||
repo: destinationConfig.repo,
|
||||
environment_name: destinationConfig.env,
|
||||
...payload
|
||||
});
|
||||
path = `/repos/${encodeURIComponent(destinationConfig.owner)}/${encodeURIComponent(destinationConfig.repo)}/environments/${encodeURIComponent(destinationConfig.env)}/secrets/${encodeURIComponent(payload.secret_name)}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
await requestWithGitHubGateway(connection, gatewayService, {
|
||||
url: `https://api.${connection.credentials.host || "github.com"}${path}`,
|
||||
method: "PUT",
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${token}`,
|
||||
"X-GitHub-Api-Version": "2022-11-28"
|
||||
},
|
||||
data: body
|
||||
});
|
||||
};
|
||||
|
||||
export const GithubSyncFns = {
|
||||
syncSecrets: async (secretSync: TGitHubSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
syncSecrets: async (
|
||||
secretSync: TGitHubSyncWithCredentials,
|
||||
ogSecretMap: TSecretMap,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const secretMap = Object.fromEntries(Object.entries(ogSecretMap).map(([i, v]) => [i.toUpperCase(), v]));
|
||||
|
||||
switch (secretSync.destinationConfig.scope) {
|
||||
case GitHubSyncScope.Organization:
|
||||
if (Object.values(secretMap).length > 1000) {
|
||||
@@ -187,38 +202,40 @@ export const GithubSyncFns = {
|
||||
);
|
||||
}
|
||||
|
||||
const client = getGitHubClient(secretSync.connection);
|
||||
const { connection } = secretSync;
|
||||
const token =
|
||||
connection.method === GitHubConnectionMethod.OAuth
|
||||
? connection.credentials.accessToken
|
||||
: await getGitHubAppAuthToken(connection);
|
||||
|
||||
const encryptedSecrets = await getEncryptedSecrets(client, secretSync);
|
||||
const encryptedSecrets = await getEncryptedSecrets(secretSync, gatewayService);
|
||||
const publicKey = await getPublicKey(secretSync, gatewayService, token);
|
||||
|
||||
const publicKey = await getPublicKey(client, secretSync);
|
||||
await sodium.ready;
|
||||
for await (const key of Object.keys(secretMap)) {
|
||||
// convert secret & base64 key to Uint8Array.
|
||||
const binaryKey = sodium.from_base64(publicKey.key, sodium.base64_variants.ORIGINAL);
|
||||
const binarySecretValue = sodium.from_string(secretMap[key].value);
|
||||
|
||||
await sodium.ready.then(async () => {
|
||||
for await (const key of Object.keys(secretMap)) {
|
||||
// convert secret & base64 key to Uint8Array.
|
||||
const binaryKey = sodium.from_base64(publicKey.key, sodium.base64_variants.ORIGINAL);
|
||||
const binarySecretValue = sodium.from_string(secretMap[key].value);
|
||||
// encrypt secret using libsodium
|
||||
const encryptedBytes = sodium.crypto_box_seal(binarySecretValue, binaryKey);
|
||||
|
||||
// encrypt secret using libsodium
|
||||
const encryptedBytes = sodium.crypto_box_seal(binarySecretValue, binaryKey);
|
||||
// convert encrypted Uint8Array to base64
|
||||
const encryptedSecretValue = sodium.to_base64(encryptedBytes, sodium.base64_variants.ORIGINAL);
|
||||
|
||||
// convert encrypted Uint8Array to base64
|
||||
const encryptedSecretValue = sodium.to_base64(encryptedBytes, sodium.base64_variants.ORIGINAL);
|
||||
|
||||
try {
|
||||
await putSecret(client, secretSync, {
|
||||
secret_name: key,
|
||||
encrypted_value: encryptedSecretValue,
|
||||
key_id: publicKey.key_id
|
||||
});
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: key
|
||||
});
|
||||
}
|
||||
try {
|
||||
await putSecret(secretSync, gatewayService, token, {
|
||||
secret_name: key,
|
||||
encrypted_value: encryptedSecretValue,
|
||||
key_id: publicKey.key_id
|
||||
});
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: key
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (secretSync.syncOptions.disableSecretDeletion) return;
|
||||
|
||||
@@ -228,21 +245,31 @@ export const GithubSyncFns = {
|
||||
continue;
|
||||
|
||||
if (!(encryptedSecret.name in secretMap)) {
|
||||
await deleteSecret(client, secretSync, encryptedSecret);
|
||||
await deleteSecret(secretSync, gatewayService, token, encryptedSecret);
|
||||
}
|
||||
}
|
||||
},
|
||||
getSecrets: async (secretSync: TGitHubSyncWithCredentials) => {
|
||||
throw new Error(`${SECRET_SYNC_NAME_MAP[secretSync.destination]} does not support importing secrets.`);
|
||||
},
|
||||
removeSecrets: async (secretSync: TGitHubSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const client = getGitHubClient(secretSync.connection);
|
||||
removeSecrets: async (
|
||||
secretSync: TGitHubSyncWithCredentials,
|
||||
ogSecretMap: TSecretMap,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const secretMap = Object.fromEntries(Object.entries(ogSecretMap).map(([i, v]) => [i.toUpperCase(), v]));
|
||||
|
||||
const encryptedSecrets = await getEncryptedSecrets(client, secretSync);
|
||||
const { connection } = secretSync;
|
||||
const token =
|
||||
connection.method === GitHubConnectionMethod.OAuth
|
||||
? connection.credentials.accessToken
|
||||
: await getGitHubAppAuthToken(connection);
|
||||
|
||||
const encryptedSecrets = await getEncryptedSecrets(secretSync, gatewayService);
|
||||
|
||||
for await (const encryptedSecret of encryptedSecrets) {
|
||||
if (encryptedSecret.name in secretMap) {
|
||||
await deleteSecret(client, secretSync, encryptedSecret);
|
||||
await deleteSecret(secretSync, gatewayService, token, encryptedSecret);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import { AxiosError } from "axios";
|
||||
import handlebars from "handlebars";
|
||||
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OCI_VAULT_SYNC_LIST_OPTION, OCIVaultSyncFns } from "@app/ee/services/secret-sync/oci-vault";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -97,6 +98,7 @@ export const listSecretSyncOptions = () => {
|
||||
type TSyncSecretDeps = {
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
// Add schema to secret keys
|
||||
@@ -191,7 +193,7 @@ export const SecretSyncFns = {
|
||||
syncSecrets: (
|
||||
secretSync: TSecretSyncWithCredentials,
|
||||
secretMap: TSecretMap,
|
||||
{ kmsService, appConnectionDAL }: TSyncSecretDeps
|
||||
{ kmsService, appConnectionDAL, gatewayService }: TSyncSecretDeps
|
||||
): Promise<void> => {
|
||||
const schemaSecretMap = addSchema(secretMap, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema);
|
||||
|
||||
@@ -201,7 +203,7 @@ export const SecretSyncFns = {
|
||||
case SecretSync.AWSSecretsManager:
|
||||
return AwsSecretsManagerSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.GitHub:
|
||||
return GithubSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
return GithubSyncFns.syncSecrets(secretSync, schemaSecretMap, gatewayService);
|
||||
case SecretSync.GCPSecretManager:
|
||||
return GcpSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.AzureKeyVault:
|
||||
@@ -395,7 +397,7 @@ export const SecretSyncFns = {
|
||||
removeSecrets: (
|
||||
secretSync: TSecretSyncWithCredentials,
|
||||
secretMap: TSecretMap,
|
||||
{ kmsService, appConnectionDAL }: TSyncSecretDeps
|
||||
{ kmsService, appConnectionDAL, gatewayService }: TSyncSecretDeps
|
||||
): Promise<void> => {
|
||||
const schemaSecretMap = addSchema(secretMap, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema);
|
||||
|
||||
@@ -405,7 +407,7 @@ export const SecretSyncFns = {
|
||||
case SecretSync.AWSSecretsManager:
|
||||
return AwsSecretsManagerSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.GitHub:
|
||||
return GithubSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
return GithubSyncFns.removeSecrets(secretSync, schemaSecretMap, gatewayService);
|
||||
case SecretSync.GCPSecretManager:
|
||||
return GcpSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.AzureKeyVault:
|
||||
|
@@ -4,6 +4,7 @@ import { Job } from "bullmq";
|
||||
|
||||
import { ProjectMembershipRole, SecretType } from "@app/db/schemas";
|
||||
import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@@ -96,6 +97,7 @@ type TSecretSyncQueueFactoryDep = {
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
type SecretSyncActionJob = Job<
|
||||
@@ -138,7 +140,8 @@ export const secretSyncQueueFactory = ({
|
||||
secretVersionTagV2BridgeDAL,
|
||||
resourceMetadataDAL,
|
||||
folderCommitService,
|
||||
licenseService
|
||||
licenseService,
|
||||
gatewayService
|
||||
}: TSecretSyncQueueFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
@@ -353,7 +356,8 @@ export const secretSyncQueueFactory = ({
|
||||
|
||||
const importedSecrets = await SecretSyncFns.getSecrets(secretSync, {
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
gatewayService
|
||||
});
|
||||
|
||||
if (!Object.keys(importedSecrets).length) return {};
|
||||
@@ -481,7 +485,8 @@ export const secretSyncQueueFactory = ({
|
||||
|
||||
await SecretSyncFns.syncSecrets(secretSyncWithCredentials, secretMap, {
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
gatewayService
|
||||
});
|
||||
|
||||
isSynced = true;
|
||||
@@ -730,7 +735,8 @@ export const secretSyncQueueFactory = ({
|
||||
secretMap,
|
||||
{
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
gatewayService
|
||||
}
|
||||
);
|
||||
|
||||
|
@@ -25,6 +25,7 @@ import {
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
|
||||
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
|
||||
import { scanSecretPolicyViolations } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
|
||||
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||
@@ -38,6 +39,7 @@ import { ActorType } from "../auth/auth-type";
|
||||
import { TCommitResourceChangeDTO, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TReminderServiceFactory } from "../reminder/reminder-types";
|
||||
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
|
||||
@@ -88,6 +90,7 @@ import { TSecretVersionV2TagDALFactory } from "./secret-version-tag-dal";
|
||||
|
||||
type TSecretV2BridgeServiceFactoryDep = {
|
||||
secretDAL: TSecretV2BridgeDALFactory;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
secretVersionDAL: TSecretVersionV2DALFactory;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
@@ -126,6 +129,7 @@ export type TSecretV2BridgeServiceFactory = ReturnType<typeof secretV2BridgeServ
|
||||
*/
|
||||
export const secretV2BridgeServiceFactory = ({
|
||||
secretDAL,
|
||||
projectDAL,
|
||||
projectEnvDAL,
|
||||
secretTagDAL,
|
||||
secretVersionDAL,
|
||||
@@ -295,6 +299,19 @@ export const secretV2BridgeServiceFactory = ({
|
||||
})
|
||||
);
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
await scanSecretPolicyViolations(
|
||||
projectId,
|
||||
secretPath,
|
||||
[
|
||||
{
|
||||
secretKey: inputSecret.secretName,
|
||||
secretValue: inputSecret.secretValue
|
||||
}
|
||||
],
|
||||
project.secretDetectionIgnoreValues || []
|
||||
);
|
||||
|
||||
const { nestedReferences, localReferences } = getAllSecretReferences(inputSecret.secretValue);
|
||||
const allSecretReferences = nestedReferences.concat(
|
||||
localReferences.map((el) => ({ secretKey: el, secretPath, environment }))
|
||||
@@ -506,6 +523,21 @@ export const secretV2BridgeServiceFactory = ({
|
||||
|
||||
const { secretName, secretValue } = inputSecret;
|
||||
|
||||
if (secretValue) {
|
||||
const project = await projectDAL.findById(projectId);
|
||||
await scanSecretPolicyViolations(
|
||||
projectId,
|
||||
secretPath,
|
||||
[
|
||||
{
|
||||
secretKey: inputSecret.newSecretName || secretName,
|
||||
secretValue
|
||||
}
|
||||
],
|
||||
project.secretDetectionIgnoreValues || []
|
||||
);
|
||||
}
|
||||
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
@@ -1585,6 +1617,9 @@ export const secretV2BridgeServiceFactory = ({
|
||||
if (secrets.length)
|
||||
throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` });
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
await scanSecretPolicyViolations(projectId, secretPath, inputSecrets, project.secretDetectionIgnoreValues || []);
|
||||
|
||||
// get all tags
|
||||
const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds);
|
||||
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : [];
|
||||
@@ -1925,6 +1960,19 @@ export const secretV2BridgeServiceFactory = ({
|
||||
});
|
||||
await $validateSecretReferences(projectId, permission, secretReferences, tx);
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
await scanSecretPolicyViolations(
|
||||
projectId,
|
||||
secretPath,
|
||||
secretsToUpdate
|
||||
.filter((el) => el.secretValue)
|
||||
.map((el) => ({
|
||||
secretKey: el.newSecretName || el.secretKey,
|
||||
secretValue: el.secretValue as string
|
||||
})),
|
||||
project.secretDetectionIgnoreValues || []
|
||||
);
|
||||
|
||||
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
|
||||
folderId,
|
||||
orgId: actorOrgId,
|
||||
|
4
cli/.gitignore
vendored
4
cli/.gitignore
vendored
@@ -1,4 +0,0 @@
|
||||
.infisical.json
|
||||
dist/
|
||||
agent-config.test.yaml
|
||||
.test.env
|
@@ -1,3 +0,0 @@
|
||||
bea0ff6e05a4de73a5db625d4ae181a015b50855:frontend/components/utilities/attemptLogin.js:stripe-access-token:147
|
||||
bea0ff6e05a4de73a5db625d4ae181a015b50855:backend/src/json/integrations.json:generic-api-key:5
|
||||
1961b92340e5d2613acae528b886c842427ce5d0:frontend/components/utilities/attemptLogin.js:stripe-access-token:148
|
@@ -1,37 +0,0 @@
|
||||
infisical:
|
||||
address: "https://app.infisical.com/"
|
||||
auth:
|
||||
type: "universal-auth"
|
||||
config:
|
||||
client-id: "./client-id"
|
||||
client-secret: "./client-secret"
|
||||
remove_client_secret_on_read: false
|
||||
sinks:
|
||||
- type: "file"
|
||||
config:
|
||||
path: "access-token"
|
||||
templates:
|
||||
- template-content: |
|
||||
{{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }}
|
||||
{{- range . }}
|
||||
{{ .Key }}={{ .Value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
destination-path: my-dot-env-0.env
|
||||
config:
|
||||
polling-interval: 60s
|
||||
execute:
|
||||
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
|
||||
|
||||
- base64-template-content: e3stIHdpdGggc2VjcmV0ICIyMDJmMDRkNy1lNGNiLTQzZDQtYTI5Mi1lODkzNzEyZDYxZmMiICJkZXYiICIvIiB9fQp7ey0gcmFuZ2UgLiB9fQp7eyAuS2V5IH19PXt7IC5WYWx1ZSB9fQp7ey0gZW5kIH19Cnt7LSBlbmQgfX0=
|
||||
destination-path: my-dot-env.env
|
||||
config:
|
||||
polling-interval: 60s
|
||||
execute:
|
||||
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
|
||||
|
||||
- source-path: my-dot-ev-secret-template1
|
||||
destination-path: my-dot-env-1.env
|
||||
config:
|
||||
exec:
|
||||
command: mkdir hello-world1
|
@@ -1,103 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
)
|
||||
|
||||
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
|
||||
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
|
||||
// the code requires maintenance if/when the Finding struct changes
|
||||
for _, b := range baseline {
|
||||
if finding.RuleID == b.RuleID &&
|
||||
finding.Description == b.Description &&
|
||||
finding.StartLine == b.StartLine &&
|
||||
finding.EndLine == b.EndLine &&
|
||||
finding.StartColumn == b.StartColumn &&
|
||||
finding.EndColumn == b.EndColumn &&
|
||||
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
|
||||
finding.File == b.File &&
|
||||
finding.Commit == b.Commit &&
|
||||
finding.Author == b.Author &&
|
||||
finding.Email == b.Email &&
|
||||
finding.Date == b.Date &&
|
||||
finding.Message == b.Message &&
|
||||
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
|
||||
finding.Entropy == b.Entropy {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
|
||||
bytes, err := os.ReadFile(baselinePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not open %s", baselinePath)
|
||||
}
|
||||
|
||||
var previousFindings []report.Finding
|
||||
err = json.Unmarshal(bytes, &previousFindings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
|
||||
}
|
||||
|
||||
return previousFindings, nil
|
||||
}
|
||||
|
||||
func (d *Detector) AddBaseline(baselinePath string, source string) error {
|
||||
if baselinePath != "" {
|
||||
absoluteSource, err := filepath.Abs(source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
absoluteBaseline, err := filepath.Abs(baselinePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
baseline, err := LoadBaseline(baselinePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.baseline = baseline
|
||||
baselinePath = relativeBaseline
|
||||
|
||||
}
|
||||
|
||||
d.baselinePath = baselinePath
|
||||
return nil
|
||||
}
|
@@ -1,70 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package scm
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Platform int
|
||||
|
||||
const (
|
||||
UnknownPlatform Platform = iota
|
||||
NoPlatform // Explicitly disable the feature
|
||||
GitHubPlatform
|
||||
GitLabPlatform
|
||||
AzureDevOpsPlatform
|
||||
BitBucketPlatform
|
||||
// TODO: Add others.
|
||||
)
|
||||
|
||||
func (p Platform) String() string {
|
||||
return [...]string{
|
||||
"unknown",
|
||||
"none",
|
||||
"github",
|
||||
"gitlab",
|
||||
"azuredevops",
|
||||
"bitbucket",
|
||||
}[p]
|
||||
}
|
||||
|
||||
func PlatformFromString(s string) (Platform, error) {
|
||||
switch strings.ToLower(s) {
|
||||
case "", "unknown":
|
||||
return UnknownPlatform, nil
|
||||
case "none":
|
||||
return NoPlatform, nil
|
||||
case "github":
|
||||
return GitHubPlatform, nil
|
||||
case "gitlab":
|
||||
return GitLabPlatform, nil
|
||||
case "azuredevops":
|
||||
return AzureDevOpsPlatform, nil
|
||||
case "bitbucket":
|
||||
return BitBucketPlatform, nil
|
||||
default:
|
||||
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
|
||||
}
|
||||
}
|
@@ -1,159 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
type AllowlistMatchCondition int
|
||||
|
||||
const (
|
||||
AllowlistMatchOr AllowlistMatchCondition = iota
|
||||
AllowlistMatchAnd
|
||||
)
|
||||
|
||||
func (a AllowlistMatchCondition) String() string {
|
||||
return [...]string{
|
||||
"OR",
|
||||
"AND",
|
||||
}[a]
|
||||
}
|
||||
|
||||
// Allowlist allows a rule to be ignored for specific
|
||||
// regexes, paths, and/or commits
|
||||
type Allowlist struct {
|
||||
// Short human readable description of the allowlist.
|
||||
Description string
|
||||
|
||||
// MatchCondition determines whether all criteria must match.
|
||||
MatchCondition AllowlistMatchCondition
|
||||
|
||||
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
|
||||
Commits []string
|
||||
|
||||
// Paths is a slice of path regular expressions that are allowed to be ignored.
|
||||
Paths []*regexp.Regexp
|
||||
|
||||
// Can be `match` or `line`.
|
||||
//
|
||||
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
|
||||
//
|
||||
// If `line` the _Regexes_ will be tested against the entire line.
|
||||
//
|
||||
// If RegexTarget is empty, it will be tested against the found secret.
|
||||
RegexTarget string
|
||||
|
||||
// Regexes is slice of content regular expressions that are allowed to be ignored.
|
||||
Regexes []*regexp.Regexp
|
||||
|
||||
// StopWords is a slice of stop words that are allowed to be ignored.
|
||||
// This targets the _secret_, not the content of the regex match like the
|
||||
// Regexes slice.
|
||||
StopWords []string
|
||||
|
||||
// validated is an internal flag to track whether `Validate()` has been called.
|
||||
validated bool
|
||||
}
|
||||
|
||||
func (a *Allowlist) Validate() error {
|
||||
if a.validated {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Disallow empty allowlists.
|
||||
if len(a.Commits) == 0 &&
|
||||
len(a.Paths) == 0 &&
|
||||
len(a.Regexes) == 0 &&
|
||||
len(a.StopWords) == 0 {
|
||||
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
|
||||
}
|
||||
|
||||
// Deduplicate commits and stopwords.
|
||||
if len(a.Commits) > 0 {
|
||||
uniqueCommits := make(map[string]struct{})
|
||||
for _, commit := range a.Commits {
|
||||
uniqueCommits[commit] = struct{}{}
|
||||
}
|
||||
a.Commits = maps.Keys(uniqueCommits)
|
||||
}
|
||||
if len(a.StopWords) > 0 {
|
||||
uniqueStopwords := make(map[string]struct{})
|
||||
for _, stopWord := range a.StopWords {
|
||||
uniqueStopwords[stopWord] = struct{}{}
|
||||
}
|
||||
a.StopWords = maps.Keys(uniqueStopwords)
|
||||
}
|
||||
|
||||
a.validated = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// CommitAllowed returns true if the commit is allowed to be ignored.
|
||||
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
|
||||
if a == nil || c == "" {
|
||||
return false, ""
|
||||
}
|
||||
|
||||
for _, commit := range a.Commits {
|
||||
if commit == c {
|
||||
return true, c
|
||||
}
|
||||
}
|
||||
return false, ""
|
||||
}
|
||||
|
||||
// PathAllowed returns true if the path is allowed to be ignored.
|
||||
func (a *Allowlist) PathAllowed(path string) bool {
|
||||
if a == nil || path == "" {
|
||||
return false
|
||||
}
|
||||
return anyRegexMatch(path, a.Paths)
|
||||
}
|
||||
|
||||
// RegexAllowed returns true if the regex is allowed to be ignored.
|
||||
func (a *Allowlist) RegexAllowed(secret string) bool {
|
||||
if a == nil || secret == "" {
|
||||
return false
|
||||
}
|
||||
return anyRegexMatch(secret, a.Regexes)
|
||||
}
|
||||
|
||||
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
|
||||
if a == nil || s == "" {
|
||||
return false, ""
|
||||
}
|
||||
|
||||
s = strings.ToLower(s)
|
||||
for _, stopWord := range a.StopWords {
|
||||
if strings.Contains(s, strings.ToLower(stopWord)) {
|
||||
return true, stopWord
|
||||
}
|
||||
}
|
||||
return false, ""
|
||||
}
|
@@ -1,426 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
const DefaultScanConfigFileName = ".infisical-scan.toml"
|
||||
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
|
||||
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
|
||||
|
||||
var (
|
||||
//go:embed gitleaks.toml
|
||||
DefaultConfig string
|
||||
|
||||
// use to keep track of how many configs we can extend
|
||||
// yea I know, globals bad
|
||||
extendDepth int
|
||||
)
|
||||
|
||||
const maxExtendDepth = 2
|
||||
|
||||
// ViperConfig is the config struct used by the Viper config package
|
||||
// to parse the config file. This struct does not include regular expressions.
|
||||
// It is used as an intermediary to convert the Viper config to the Config struct.
|
||||
type ViperConfig struct {
|
||||
Title string
|
||||
Description string
|
||||
Extend Extend
|
||||
Rules []struct {
|
||||
ID string
|
||||
Description string
|
||||
Path string
|
||||
Regex string
|
||||
SecretGroup int
|
||||
Entropy float64
|
||||
Keywords []string
|
||||
Tags []string
|
||||
|
||||
// Deprecated: this is a shim for backwards-compatibility.
|
||||
// TODO: Remove this in 9.x.
|
||||
AllowList *viperRuleAllowlist
|
||||
Allowlists []*viperRuleAllowlist
|
||||
}
|
||||
// Deprecated: this is a shim for backwards-compatibility.
|
||||
// TODO: Remove this in 9.x.
|
||||
AllowList *viperGlobalAllowlist
|
||||
Allowlists []*viperGlobalAllowlist
|
||||
}
|
||||
|
||||
type viperRuleAllowlist struct {
|
||||
Description string
|
||||
Condition string
|
||||
Commits []string
|
||||
Paths []string
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
StopWords []string
|
||||
}
|
||||
|
||||
type viperGlobalAllowlist struct {
|
||||
TargetRules []string
|
||||
viperRuleAllowlist `mapstructure:",squash"`
|
||||
}
|
||||
|
||||
// Config is a configuration struct that contains rules and an allowlist if present.
|
||||
type Config struct {
|
||||
Title string
|
||||
Extend Extend
|
||||
Path string
|
||||
Description string
|
||||
Rules map[string]Rule
|
||||
Keywords map[string]struct{}
|
||||
// used to keep sarif results consistent
|
||||
OrderedRules []string
|
||||
Allowlists []*Allowlist
|
||||
}
|
||||
|
||||
// Extend is a struct that allows users to define how they want their
|
||||
// configuration extended by other configuration files.
|
||||
type Extend struct {
|
||||
Path string
|
||||
URL string
|
||||
UseDefault bool
|
||||
DisabledRules []string
|
||||
}
|
||||
|
||||
func (vc *ViperConfig) Translate() (Config, error) {
|
||||
var (
|
||||
keywords = make(map[string]struct{})
|
||||
orderedRules []string
|
||||
rulesMap = make(map[string]Rule)
|
||||
ruleAllowlists = make(map[string][]*Allowlist)
|
||||
)
|
||||
|
||||
// Validate individual rules.
|
||||
for _, vr := range vc.Rules {
|
||||
var (
|
||||
pathPat *regexp.Regexp
|
||||
regexPat *regexp.Regexp
|
||||
)
|
||||
if vr.Path != "" {
|
||||
pathPat = regexp.MustCompile(vr.Path)
|
||||
}
|
||||
if vr.Regex != "" {
|
||||
regexPat = regexp.MustCompile(vr.Regex)
|
||||
}
|
||||
if vr.Keywords == nil {
|
||||
vr.Keywords = []string{}
|
||||
} else {
|
||||
for i, k := range vr.Keywords {
|
||||
keyword := strings.ToLower(k)
|
||||
keywords[keyword] = struct{}{}
|
||||
vr.Keywords[i] = keyword
|
||||
}
|
||||
}
|
||||
if vr.Tags == nil {
|
||||
vr.Tags = []string{}
|
||||
}
|
||||
cr := Rule{
|
||||
RuleID: vr.ID,
|
||||
Description: vr.Description,
|
||||
Regex: regexPat,
|
||||
SecretGroup: vr.SecretGroup,
|
||||
Entropy: vr.Entropy,
|
||||
Path: pathPat,
|
||||
Keywords: vr.Keywords,
|
||||
Tags: vr.Tags,
|
||||
}
|
||||
|
||||
// Parse the rule allowlists, including the older format for backwards compatibility.
|
||||
if vr.AllowList != nil {
|
||||
// TODO: Remove this in v9.
|
||||
if len(vr.Allowlists) > 0 {
|
||||
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
|
||||
}
|
||||
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
|
||||
}
|
||||
for _, a := range vr.Allowlists {
|
||||
allowlist, err := parseAllowlist(a)
|
||||
if err != nil {
|
||||
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
|
||||
}
|
||||
cr.Allowlists = append(cr.Allowlists, allowlist)
|
||||
}
|
||||
orderedRules = append(orderedRules, cr.RuleID)
|
||||
rulesMap[cr.RuleID] = cr
|
||||
}
|
||||
|
||||
// Assemble the config.
|
||||
c := Config{
|
||||
Title: vc.Title,
|
||||
Description: vc.Description,
|
||||
Extend: vc.Extend,
|
||||
Rules: rulesMap,
|
||||
Keywords: keywords,
|
||||
OrderedRules: orderedRules,
|
||||
}
|
||||
// Parse the config allowlists, including the older format for backwards compatibility.
|
||||
if vc.AllowList != nil {
|
||||
// TODO: Remove this in v9.
|
||||
if len(vc.Allowlists) > 0 {
|
||||
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
|
||||
}
|
||||
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
|
||||
}
|
||||
for _, a := range vc.Allowlists {
|
||||
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
|
||||
if err != nil {
|
||||
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
|
||||
}
|
||||
// Allowlists with |targetRules| aren't added to the global list.
|
||||
if len(a.TargetRules) > 0 {
|
||||
for _, ruleID := range a.TargetRules {
|
||||
// It's not possible to validate |ruleID| until after extend.
|
||||
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
|
||||
}
|
||||
} else {
|
||||
c.Allowlists = append(c.Allowlists, allowlist)
|
||||
}
|
||||
}
|
||||
|
||||
if maxExtendDepth != extendDepth {
|
||||
// disallow both usedefault and path from being set
|
||||
if c.Extend.Path != "" && c.Extend.UseDefault {
|
||||
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
|
||||
}
|
||||
if c.Extend.UseDefault {
|
||||
if err := c.extendDefault(); err != nil {
|
||||
return Config{}, err
|
||||
}
|
||||
} else if c.Extend.Path != "" {
|
||||
if err := c.extendPath(); err != nil {
|
||||
return Config{}, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the rules after everything has been assembled (including extended configs).
|
||||
if extendDepth == 0 {
|
||||
for _, rule := range c.Rules {
|
||||
if err := rule.Validate(); err != nil {
|
||||
return Config{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// Populate targeted configs.
|
||||
for ruleID, allowlists := range ruleAllowlists {
|
||||
rule, ok := c.Rules[ruleID]
|
||||
if !ok {
|
||||
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
|
||||
}
|
||||
rule.Allowlists = append(rule.Allowlists, allowlists...)
|
||||
c.Rules[ruleID] = rule
|
||||
}
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
|
||||
var matchCondition AllowlistMatchCondition
|
||||
switch strings.ToUpper(a.Condition) {
|
||||
case "AND", "&&":
|
||||
matchCondition = AllowlistMatchAnd
|
||||
case "", "OR", "||":
|
||||
matchCondition = AllowlistMatchOr
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
|
||||
}
|
||||
|
||||
// Validate the target.
|
||||
regexTarget := a.RegexTarget
|
||||
if regexTarget != "" {
|
||||
switch regexTarget {
|
||||
case "secret":
|
||||
regexTarget = ""
|
||||
case "match", "line":
|
||||
// do nothing
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
|
||||
}
|
||||
}
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range a.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range a.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
|
||||
allowlist := &Allowlist{
|
||||
Description: a.Description,
|
||||
MatchCondition: matchCondition,
|
||||
Commits: a.Commits,
|
||||
Paths: allowlistPaths,
|
||||
RegexTarget: regexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
StopWords: a.StopWords,
|
||||
}
|
||||
if err := allowlist.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return allowlist, nil
|
||||
}
|
||||
|
||||
func (c *Config) GetOrderedRules() []Rule {
|
||||
var orderedRules []Rule
|
||||
for _, id := range c.OrderedRules {
|
||||
if _, ok := c.Rules[id]; ok {
|
||||
orderedRules = append(orderedRules, c.Rules[id])
|
||||
}
|
||||
}
|
||||
return orderedRules
|
||||
}
|
||||
|
||||
func (c *Config) extendDefault() error {
|
||||
extendDepth++
|
||||
viper.SetConfigType("toml")
|
||||
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
|
||||
return fmt.Errorf("failed to load extended default config, err: %w", err)
|
||||
}
|
||||
defaultViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
|
||||
return fmt.Errorf("failed to load extended default config, err: %w", err)
|
||||
}
|
||||
cfg, err := defaultViperConfig.Translate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load extended default config, err: %w", err)
|
||||
|
||||
}
|
||||
logging.Debug().Msg("extending config with default config")
|
||||
c.extend(cfg)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Config) extendPath() error {
|
||||
extendDepth++
|
||||
viper.SetConfigFile(c.Extend.Path)
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
return fmt.Errorf("failed to load extended config, err: %w", err)
|
||||
}
|
||||
extensionViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
|
||||
return fmt.Errorf("failed to load extended config, err: %w", err)
|
||||
}
|
||||
cfg, err := extensionViperConfig.Translate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load extended config, err: %w", err)
|
||||
}
|
||||
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
|
||||
c.extend(cfg)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Config) extendURL() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func (c *Config) extend(extensionConfig Config) {
|
||||
// Get config name for helpful log messages.
|
||||
var configName string
|
||||
if c.Extend.Path != "" {
|
||||
configName = c.Extend.Path
|
||||
} else {
|
||||
configName = "default"
|
||||
}
|
||||
// Convert |Config.DisabledRules| into a map for ease of access.
|
||||
disabledRuleIDs := map[string]struct{}{}
|
||||
for _, id := range c.Extend.DisabledRules {
|
||||
if _, ok := extensionConfig.Rules[id]; !ok {
|
||||
logging.Warn().
|
||||
Str("rule-id", id).
|
||||
Str("config", configName).
|
||||
Msg("Disabled rule doesn't exist in extended config.")
|
||||
}
|
||||
disabledRuleIDs[id] = struct{}{}
|
||||
}
|
||||
|
||||
for ruleID, baseRule := range extensionConfig.Rules {
|
||||
// Skip the rule.
|
||||
if _, ok := disabledRuleIDs[ruleID]; ok {
|
||||
logging.Debug().
|
||||
Str("rule-id", ruleID).
|
||||
Str("config", configName).
|
||||
Msg("Ignoring rule from extended config.")
|
||||
continue
|
||||
}
|
||||
|
||||
currentRule, ok := c.Rules[ruleID]
|
||||
if !ok {
|
||||
// Rule doesn't exist, add it to the config.
|
||||
c.Rules[ruleID] = baseRule
|
||||
for _, k := range baseRule.Keywords {
|
||||
c.Keywords[k] = struct{}{}
|
||||
}
|
||||
c.OrderedRules = append(c.OrderedRules, ruleID)
|
||||
} else {
|
||||
// Rule exists, merge our changes into the base.
|
||||
if currentRule.Description != "" {
|
||||
baseRule.Description = currentRule.Description
|
||||
}
|
||||
if currentRule.Entropy != 0 {
|
||||
baseRule.Entropy = currentRule.Entropy
|
||||
}
|
||||
if currentRule.SecretGroup != 0 {
|
||||
baseRule.SecretGroup = currentRule.SecretGroup
|
||||
}
|
||||
if currentRule.Regex != nil {
|
||||
baseRule.Regex = currentRule.Regex
|
||||
}
|
||||
if currentRule.Path != nil {
|
||||
baseRule.Path = currentRule.Path
|
||||
}
|
||||
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
|
||||
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
|
||||
for _, a := range currentRule.Allowlists {
|
||||
baseRule.Allowlists = append(baseRule.Allowlists, a)
|
||||
}
|
||||
// The keywords from the base rule and the extended rule must be merged into the global keywords list
|
||||
for _, k := range baseRule.Keywords {
|
||||
c.Keywords[k] = struct{}{}
|
||||
}
|
||||
c.Rules[ruleID] = baseRule
|
||||
}
|
||||
}
|
||||
|
||||
// append allowlists, not attempting to merge
|
||||
for _, a := range extensionConfig.Allowlists {
|
||||
c.Allowlists = append(c.Allowlists, a)
|
||||
}
|
||||
|
||||
// sort to keep extended rules in order
|
||||
sort.Strings(c.OrderedRules)
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -1,114 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
// Rules contain information that define details on how to detect secrets
|
||||
type Rule struct {
|
||||
// RuleID is a unique identifier for this rule
|
||||
RuleID string
|
||||
|
||||
// Description is the description of the rule.
|
||||
Description string
|
||||
|
||||
// Entropy is a float representing the minimum shannon
|
||||
// entropy a regex group must have to be considered a secret.
|
||||
Entropy float64
|
||||
|
||||
// SecretGroup is an int used to extract secret from regex
|
||||
// match and used as the group that will have its entropy
|
||||
// checked if `entropy` is set.
|
||||
SecretGroup int
|
||||
|
||||
// Regex is a golang regular expression used to detect secrets.
|
||||
Regex *regexp.Regexp
|
||||
|
||||
// Path is a golang regular expression used to
|
||||
// filter secrets by path
|
||||
Path *regexp.Regexp
|
||||
|
||||
// Tags is an array of strings used for metadata
|
||||
// and reporting purposes.
|
||||
Tags []string
|
||||
|
||||
// Keywords are used for pre-regex check filtering. Rules that contain
|
||||
// keywords will perform a quick string compare check to make sure the
|
||||
// keyword(s) are in the content being scanned.
|
||||
Keywords []string
|
||||
|
||||
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
|
||||
Allowlists []*Allowlist
|
||||
|
||||
// validated is an internal flag to track whether `Validate()` has been called.
|
||||
validated bool
|
||||
}
|
||||
|
||||
// Validate guards against common misconfigurations.
|
||||
func (r *Rule) Validate() error {
|
||||
if r.validated {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ensure |id| is present.
|
||||
if strings.TrimSpace(r.RuleID) == "" {
|
||||
// Try to provide helpful context, since |id| is empty.
|
||||
var context string
|
||||
if r.Regex != nil {
|
||||
context = ", regex: " + r.Regex.String()
|
||||
} else if r.Path != nil {
|
||||
context = ", path: " + r.Path.String()
|
||||
} else if r.Description != "" {
|
||||
context = ", description: " + r.Description
|
||||
}
|
||||
return fmt.Errorf("rule |id| is missing or empty" + context)
|
||||
}
|
||||
|
||||
// Ensure the rule actually matches something.
|
||||
if r.Regex == nil && r.Path == nil {
|
||||
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
|
||||
}
|
||||
|
||||
// Ensure |secretGroup| works.
|
||||
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
|
||||
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
|
||||
}
|
||||
|
||||
for _, allowlist := range r.Allowlists {
|
||||
// This will probably never happen.
|
||||
if allowlist == nil {
|
||||
continue
|
||||
}
|
||||
if err := allowlist.Validate(); err != nil {
|
||||
return fmt.Errorf("%s: %w", r.RuleID, err)
|
||||
}
|
||||
}
|
||||
|
||||
r.validated = true
|
||||
return nil
|
||||
}
|
@@ -1,46 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
)
|
||||
|
||||
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
|
||||
for _, re := range res {
|
||||
if regexMatched(f, re) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func regexMatched(f string, re *regexp.Regexp) bool {
|
||||
if re == nil {
|
||||
return false
|
||||
}
|
||||
if re.FindString(f) != "" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
@@ -1,328 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"unicode"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
)
|
||||
|
||||
var b64LikelyChars [128]byte
|
||||
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
|
||||
var decoders = []func(string) ([]byte, error){
|
||||
base64.StdEncoding.DecodeString,
|
||||
base64.RawURLEncoding.DecodeString,
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Basically look for anything that isn't just letters
|
||||
for _, c := range `0123456789+/-_` {
|
||||
b64LikelyChars[c] = 1
|
||||
}
|
||||
}
|
||||
|
||||
// EncodedSegment represents a portion of text that is encoded in some way.
|
||||
// `decode` supports recusive decoding and can result in "segment trees".
|
||||
// There can be multiple segments in the original text, so each can be thought
|
||||
// of as its own tree with the root being the original segment.
|
||||
type EncodedSegment struct {
|
||||
// The parent segment in a segment tree. If nil, it is a root segment
|
||||
parent *EncodedSegment
|
||||
|
||||
// Relative start/end are the bounds of the encoded value in the current pass.
|
||||
relativeStart int
|
||||
relativeEnd int
|
||||
|
||||
// Absolute start/end refer to the bounds of the root segment in this segment
|
||||
// tree
|
||||
absoluteStart int
|
||||
absoluteEnd int
|
||||
|
||||
// Decoded start/end refer to the bounds of the decoded value in the current
|
||||
// pass. These can differ from relative values because decoding can shrink
|
||||
// or grow the size of the segment.
|
||||
decodedStart int
|
||||
decodedEnd int
|
||||
|
||||
// This is the actual decoded content in the segment
|
||||
decodedValue string
|
||||
|
||||
// This is the type of encoding
|
||||
encoding string
|
||||
}
|
||||
|
||||
// isChildOf inspects the bounds of two segments to determine
|
||||
// if one should be the child of another
|
||||
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
|
||||
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
|
||||
}
|
||||
|
||||
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
|
||||
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
|
||||
return start <= s.decodedEnd && end >= s.decodedStart
|
||||
}
|
||||
|
||||
// adjustMatchIndex takes the matchIndex from the current decoding pass and
|
||||
// updates it to match the absolute matchIndex in the original text.
|
||||
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
|
||||
// The match is within the bounds of the segment so we just return
|
||||
// the absolute start and end of the root segment.
|
||||
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
|
||||
return []int{
|
||||
s.absoluteStart,
|
||||
s.absoluteEnd,
|
||||
}
|
||||
}
|
||||
|
||||
// Since it overlaps one side and/or the other, we're going to have to adjust
|
||||
// and climb parents until we're either at the root or we've determined
|
||||
// we're fully inside one of the parent segments.
|
||||
adjustedMatchIndex := make([]int, 2)
|
||||
|
||||
if matchIndex[0] < s.decodedStart {
|
||||
// It starts before the encoded segment so adjust the start to match
|
||||
// the location before it was decoded
|
||||
matchStartDelta := s.decodedStart - matchIndex[0]
|
||||
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
|
||||
} else {
|
||||
// It starts within the encoded segment so set the bound to the
|
||||
// relative start
|
||||
adjustedMatchIndex[0] = s.relativeStart
|
||||
}
|
||||
|
||||
if matchIndex[1] > s.decodedEnd {
|
||||
// It ends after the encoded segment so adjust the end to match
|
||||
// the location before it was decoded
|
||||
matchEndDelta := matchIndex[1] - s.decodedEnd
|
||||
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
|
||||
} else {
|
||||
// It ends within the encoded segment so set the bound to the relative end
|
||||
adjustedMatchIndex[1] = s.relativeEnd
|
||||
}
|
||||
|
||||
// We're still not at a root segment so we'll need to keep on adjusting
|
||||
if s.parent != nil {
|
||||
return s.parent.adjustMatchIndex(adjustedMatchIndex)
|
||||
}
|
||||
|
||||
return adjustedMatchIndex
|
||||
}
|
||||
|
||||
// depth reports how many levels of decoding needed to be done (default is 1)
|
||||
func (s EncodedSegment) depth() int {
|
||||
depth := 1
|
||||
|
||||
// Climb the tree and increment the depth
|
||||
for current := &s; current.parent != nil; current = current.parent {
|
||||
depth++
|
||||
}
|
||||
|
||||
return depth
|
||||
}
|
||||
|
||||
// tags returns additional meta data tags related to the types of segments
|
||||
func (s EncodedSegment) tags() []string {
|
||||
return []string{
|
||||
fmt.Sprintf("decoded:%s", s.encoding),
|
||||
fmt.Sprintf("decode-depth:%d", s.depth()),
|
||||
}
|
||||
}
|
||||
|
||||
// Decoder decodes various types of data in place
|
||||
type Decoder struct {
|
||||
decodedMap map[string]string
|
||||
}
|
||||
|
||||
// NewDecoder creates a default decoder struct
|
||||
func NewDecoder() *Decoder {
|
||||
return &Decoder{
|
||||
decodedMap: make(map[string]string),
|
||||
}
|
||||
}
|
||||
|
||||
// decode returns the data with the values decoded in-place
|
||||
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
|
||||
segments := d.findEncodedSegments(data, parentSegments)
|
||||
|
||||
if len(segments) > 0 {
|
||||
result := bytes.NewBuffer(make([]byte, 0, len(data)))
|
||||
|
||||
relativeStart := 0
|
||||
for _, segment := range segments {
|
||||
result.WriteString(data[relativeStart:segment.relativeStart])
|
||||
result.WriteString(segment.decodedValue)
|
||||
relativeStart = segment.relativeEnd
|
||||
}
|
||||
result.WriteString(data[relativeStart:])
|
||||
|
||||
return result.String(), segments
|
||||
}
|
||||
|
||||
return data, segments
|
||||
}
|
||||
|
||||
// findEncodedSegments finds the encoded segments in the data and updates the
|
||||
// segment tree for this pass
|
||||
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
|
||||
if len(data) == 0 {
|
||||
return []EncodedSegment{}
|
||||
}
|
||||
|
||||
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
|
||||
if matchIndices == nil {
|
||||
return []EncodedSegment{}
|
||||
}
|
||||
|
||||
segments := make([]EncodedSegment, 0, len(matchIndices))
|
||||
|
||||
// Keeps up with offsets from the text changing size as things are decoded
|
||||
decodedShift := 0
|
||||
|
||||
for _, matchIndex := range matchIndices {
|
||||
encodedValue := data[matchIndex[0]:matchIndex[1]]
|
||||
|
||||
if !isLikelyB64(encodedValue) {
|
||||
d.decodedMap[encodedValue] = ""
|
||||
continue
|
||||
}
|
||||
|
||||
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
|
||||
|
||||
// We haven't decoded this yet, so go ahead and decode it
|
||||
if !alreadyDecoded {
|
||||
decodedValue = decodeValue(encodedValue)
|
||||
d.decodedMap[encodedValue] = decodedValue
|
||||
}
|
||||
|
||||
// Skip this segment because there was nothing to check
|
||||
if len(decodedValue) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create a segment for the encoded data
|
||||
segment := EncodedSegment{
|
||||
relativeStart: matchIndex[0],
|
||||
relativeEnd: matchIndex[1],
|
||||
absoluteStart: matchIndex[0],
|
||||
absoluteEnd: matchIndex[1],
|
||||
decodedStart: matchIndex[0] + decodedShift,
|
||||
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
|
||||
decodedValue: decodedValue,
|
||||
encoding: "base64",
|
||||
}
|
||||
|
||||
// Shift decoded start and ends based on size changes
|
||||
decodedShift += len(decodedValue) - len(encodedValue)
|
||||
|
||||
// Adjust the absolute position of segments contained in parent segments
|
||||
for _, parentSegment := range parentSegments {
|
||||
if segment.isChildOf(parentSegment) {
|
||||
segment.absoluteStart = parentSegment.absoluteStart
|
||||
segment.absoluteEnd = parentSegment.absoluteEnd
|
||||
segment.parent = &parentSegment
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
logging.Debug().Msgf("segment found: %#v", segment)
|
||||
segments = append(segments, segment)
|
||||
}
|
||||
|
||||
return segments
|
||||
}
|
||||
|
||||
// decoders tries a list of decoders and returns the first successful one
|
||||
func decodeValue(encodedValue string) string {
|
||||
for _, decoder := range decoders {
|
||||
decodedValue, err := decoder(encodedValue)
|
||||
|
||||
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
|
||||
return string(decodedValue)
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func isASCII(b []byte) bool {
|
||||
for i := 0; i < len(b); i++ {
|
||||
if b[i] > unicode.MaxASCII || b[i] < '\t' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// Skip a lot of method signatures and things at the risk of missing about
|
||||
// 1% of base64
|
||||
func isLikelyB64(s string) bool {
|
||||
for _, c := range s {
|
||||
if b64LikelyChars[c] != 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Find a segment where the decoded bounds overlaps a range
|
||||
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
|
||||
for _, segment := range encodedSegments {
|
||||
if segment.decodedOverlaps(start, end) {
|
||||
return &segment
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s EncodedSegment) currentLine(currentRaw string) string {
|
||||
start := 0
|
||||
end := len(currentRaw)
|
||||
|
||||
// Find the start of the range
|
||||
for i := s.decodedStart; i > -1; i-- {
|
||||
c := currentRaw[i]
|
||||
if c == '\n' {
|
||||
start = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Find the end of the range
|
||||
for i := s.decodedEnd; i < end; i++ {
|
||||
c := currentRaw[i]
|
||||
if c == '\n' {
|
||||
end = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return currentRaw[start:end]
|
||||
}
|
@@ -1,699 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/config"
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/regexp"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
|
||||
ahocorasick "github.com/BobuSumisu/aho-corasick"
|
||||
"github.com/fatih/semgroup"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/spf13/viper"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
const (
|
||||
gitleaksAllowSignature = "gitleaks:allow"
|
||||
chunkSize = 100 * 1_000 // 100kb
|
||||
|
||||
// SlowWarningThreshold is the amount of time to wait before logging that a file is slow.
|
||||
// This is useful for identifying problematic files and tuning the allowlist.
|
||||
SlowWarningThreshold = 5 * time.Second
|
||||
)
|
||||
|
||||
var (
|
||||
newLineRegexp = regexp.MustCompile("\n")
|
||||
isWindows = runtime.GOOS == "windows"
|
||||
)
|
||||
|
||||
// Detector is the main detector struct
|
||||
type Detector struct {
|
||||
// Config is the configuration for the detector
|
||||
Config config.Config
|
||||
|
||||
// Redact is a flag to redact findings. This is exported
|
||||
// so users using gitleaks as a library can set this flag
|
||||
// without calling `detector.Start(cmd *cobra.Command)`
|
||||
Redact uint
|
||||
|
||||
// verbose is a flag to print findings
|
||||
Verbose bool
|
||||
|
||||
// MaxDecodeDepths limits how many recursive decoding passes are allowed
|
||||
MaxDecodeDepth int
|
||||
|
||||
// files larger than this will be skipped
|
||||
MaxTargetMegaBytes int
|
||||
|
||||
// followSymlinks is a flag to enable scanning symlink files
|
||||
FollowSymlinks bool
|
||||
|
||||
// NoColor is a flag to disable color output
|
||||
NoColor bool
|
||||
|
||||
// IgnoreGitleaksAllow is a flag to ignore gitleaks:allow comments.
|
||||
IgnoreGitleaksAllow bool
|
||||
|
||||
// commitMap is used to keep track of commits that have been scanned.
|
||||
// This is only used for logging purposes and git scans.
|
||||
commitMap map[string]bool
|
||||
|
||||
// findingMutex is to prevent concurrent access to the
|
||||
// findings slice when adding findings.
|
||||
findingMutex *sync.Mutex
|
||||
|
||||
// findings is a slice of report.Findings. This is the result
|
||||
// of the detector's scan which can then be used to generate a
|
||||
// report.
|
||||
findings []report.Finding
|
||||
|
||||
// prefilter is a ahocorasick struct used for doing efficient string
|
||||
// matching given a set of words (keywords from the rules in the config)
|
||||
prefilter ahocorasick.Trie
|
||||
|
||||
// a list of known findings that should be ignored
|
||||
baseline []report.Finding
|
||||
|
||||
// path to baseline
|
||||
baselinePath string
|
||||
|
||||
// gitleaksIgnore
|
||||
gitleaksIgnore map[string]struct{}
|
||||
|
||||
// Sema (https://github.com/fatih/semgroup) controls the concurrency
|
||||
Sema *semgroup.Group
|
||||
|
||||
// report-related settings.
|
||||
ReportPath string
|
||||
Reporter report.Reporter
|
||||
|
||||
TotalBytes atomic.Uint64
|
||||
}
|
||||
|
||||
// Fragment contains the data to be scanned
|
||||
type Fragment struct {
|
||||
// Raw is the raw content of the fragment
|
||||
Raw string
|
||||
|
||||
Bytes []byte
|
||||
|
||||
// FilePath is the path to the file, if applicable.
|
||||
// The path separator MUST be normalized to `/`.
|
||||
FilePath string
|
||||
SymlinkFile string
|
||||
// WindowsFilePath is the path with the original separator.
|
||||
// This provides a backwards-compatible solution to https://github.com/gitleaks/gitleaks/issues/1565.
|
||||
WindowsFilePath string `json:"-"` // TODO: remove this in v9.
|
||||
|
||||
// CommitSHA is the SHA of the commit if applicable
|
||||
CommitSHA string
|
||||
|
||||
// newlineIndices is a list of indices of newlines in the raw content.
|
||||
// This is used to calculate the line location of a finding
|
||||
newlineIndices [][]int
|
||||
}
|
||||
|
||||
// NewDetector creates a new detector with the given config
|
||||
func NewDetector(cfg config.Config) *Detector {
|
||||
return &Detector{
|
||||
commitMap: make(map[string]bool),
|
||||
gitleaksIgnore: make(map[string]struct{}),
|
||||
findingMutex: &sync.Mutex{},
|
||||
findings: make([]report.Finding, 0),
|
||||
Config: cfg,
|
||||
prefilter: *ahocorasick.NewTrieBuilder().AddStrings(maps.Keys(cfg.Keywords)).Build(),
|
||||
Sema: semgroup.NewGroup(context.Background(), 40),
|
||||
}
|
||||
}
|
||||
|
||||
// NewDetectorDefaultConfig creates a new detector with the default config
|
||||
func NewDetectorDefaultConfig() (*Detector, error) {
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewDetector(cfg), nil
|
||||
}
|
||||
|
||||
func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
|
||||
logging.Debug().Msgf("found .gitleaksignore file: %s", gitleaksIgnorePath)
|
||||
file, err := os.Open(gitleaksIgnorePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
// https://github.com/securego/gosec/issues/512
|
||||
if err := file.Close(); err != nil {
|
||||
logging.Warn().Msgf("Error closing .gitleaksignore file: %s\n", err)
|
||||
}
|
||||
}()
|
||||
|
||||
scanner := bufio.NewScanner(file)
|
||||
replacer := strings.NewReplacer("\\", "/")
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
// Skip lines that start with a comment
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Normalize the path.
|
||||
// TODO: Make this a breaking change in v9.
|
||||
s := strings.Split(line, ":")
|
||||
switch len(s) {
|
||||
case 3:
|
||||
// Global fingerprint.
|
||||
// `file:rule-id:start-line`
|
||||
s[0] = replacer.Replace(s[0])
|
||||
case 4:
|
||||
// Commit fingerprint.
|
||||
// `commit:file:rule-id:start-line`
|
||||
s[1] = replacer.Replace(s[1])
|
||||
default:
|
||||
logging.Warn().Str("fingerprint", line).Msg("Invalid .gitleaksignore entry")
|
||||
}
|
||||
d.gitleaksIgnore[strings.Join(s, ":")] = struct{}{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectBytes scans the given bytes and returns a list of findings
|
||||
func (d *Detector) DetectBytes(content []byte) []report.Finding {
|
||||
return d.DetectString(string(content))
|
||||
}
|
||||
|
||||
// DetectString scans the given string and returns a list of findings
|
||||
func (d *Detector) DetectString(content string) []report.Finding {
|
||||
return d.Detect(Fragment{
|
||||
Raw: content,
|
||||
})
|
||||
}
|
||||
|
||||
// Detect scans the given fragment and returns a list of findings
|
||||
func (d *Detector) Detect(fragment Fragment) []report.Finding {
|
||||
if fragment.Bytes == nil {
|
||||
d.TotalBytes.Add(uint64(len(fragment.Raw)))
|
||||
}
|
||||
d.TotalBytes.Add(uint64(len(fragment.Bytes)))
|
||||
|
||||
var (
|
||||
findings []report.Finding
|
||||
logger = func() zerolog.Logger {
|
||||
l := logging.With().Str("path", fragment.FilePath)
|
||||
if fragment.CommitSHA != "" {
|
||||
l = l.Str("commit", fragment.CommitSHA)
|
||||
}
|
||||
return l.Logger()
|
||||
}()
|
||||
)
|
||||
|
||||
// check if filepath is allowed
|
||||
if fragment.FilePath != "" {
|
||||
// is the path our config or baseline file?
|
||||
if fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath) {
|
||||
logging.Trace().Msg("skipping file: matches config or baseline path")
|
||||
return findings
|
||||
}
|
||||
}
|
||||
// check if commit or filepath is allowed.
|
||||
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, d.Config.Allowlists); isAllowed {
|
||||
event.Msg("skipping file: global allowlist")
|
||||
return findings
|
||||
}
|
||||
|
||||
// add newline indices for location calculation in detectRule
|
||||
fragment.newlineIndices = newLineRegexp.FindAllStringIndex(fragment.Raw, -1)
|
||||
|
||||
// setup variables to handle different decoding passes
|
||||
currentRaw := fragment.Raw
|
||||
encodedSegments := []EncodedSegment{}
|
||||
currentDecodeDepth := 0
|
||||
decoder := NewDecoder()
|
||||
|
||||
for {
|
||||
// build keyword map for prefiltering rules
|
||||
keywords := make(map[string]bool)
|
||||
normalizedRaw := strings.ToLower(currentRaw)
|
||||
matches := d.prefilter.MatchString(normalizedRaw)
|
||||
for _, m := range matches {
|
||||
keywords[normalizedRaw[m.Pos():int(m.Pos())+len(m.Match())]] = true
|
||||
}
|
||||
|
||||
for _, rule := range d.Config.Rules {
|
||||
if len(rule.Keywords) == 0 {
|
||||
// if no keywords are associated with the rule always scan the
|
||||
// fragment using the rule
|
||||
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
|
||||
continue
|
||||
}
|
||||
|
||||
// check if keywords are in the fragment
|
||||
for _, k := range rule.Keywords {
|
||||
if _, ok := keywords[strings.ToLower(k)]; ok {
|
||||
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// increment the depth by 1 as we start our decoding pass
|
||||
currentDecodeDepth++
|
||||
|
||||
// stop the loop if we've hit our max decoding depth
|
||||
if currentDecodeDepth > d.MaxDecodeDepth {
|
||||
break
|
||||
}
|
||||
|
||||
// decode the currentRaw for the next pass
|
||||
currentRaw, encodedSegments = decoder.decode(currentRaw, encodedSegments)
|
||||
|
||||
// stop the loop when there's nothing else to decode
|
||||
if len(encodedSegments) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return filter(findings, d.Redact)
|
||||
}
|
||||
|
||||
// detectRule scans the given fragment for the given rule and returns a list of findings
|
||||
func (d *Detector) detectRule(fragment Fragment, currentRaw string, r config.Rule, encodedSegments []EncodedSegment) []report.Finding {
|
||||
var (
|
||||
findings []report.Finding
|
||||
logger = func() zerolog.Logger {
|
||||
l := logging.With().Str("rule-id", r.RuleID).Str("path", fragment.FilePath)
|
||||
if fragment.CommitSHA != "" {
|
||||
l = l.Str("commit", fragment.CommitSHA)
|
||||
}
|
||||
return l.Logger()
|
||||
}()
|
||||
)
|
||||
|
||||
// check if commit or file is allowed for this rule.
|
||||
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, r.Allowlists); isAllowed {
|
||||
event.Msg("skipping file: rule allowlist")
|
||||
return findings
|
||||
}
|
||||
|
||||
if r.Path != nil {
|
||||
if r.Regex == nil && len(encodedSegments) == 0 {
|
||||
// Path _only_ rule
|
||||
if r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath)) {
|
||||
finding := report.Finding{
|
||||
RuleID: r.RuleID,
|
||||
Description: r.Description,
|
||||
File: fragment.FilePath,
|
||||
SymlinkFile: fragment.SymlinkFile,
|
||||
Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
|
||||
Tags: r.Tags,
|
||||
}
|
||||
return append(findings, finding)
|
||||
}
|
||||
} else {
|
||||
// if path is set _and_ a regex is set, then we need to check both
|
||||
// so if the path does not match, then we should return early and not
|
||||
// consider the regex
|
||||
if !(r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath))) {
|
||||
return findings
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if path only rule, skip content checks
|
||||
if r.Regex == nil {
|
||||
return findings
|
||||
}
|
||||
|
||||
// if flag configure and raw data size bigger then the flag
|
||||
if d.MaxTargetMegaBytes > 0 {
|
||||
rawLength := len(currentRaw) / 1000000
|
||||
if rawLength > d.MaxTargetMegaBytes {
|
||||
logger.Debug().
|
||||
Int("size", rawLength).
|
||||
Int("max-size", d.MaxTargetMegaBytes).
|
||||
Msg("skipping fragment: size")
|
||||
return findings
|
||||
}
|
||||
}
|
||||
|
||||
// use currentRaw instead of fragment.Raw since this represents the current
|
||||
// decoding pass on the text
|
||||
for _, matchIndex := range r.Regex.FindAllStringIndex(currentRaw, -1) {
|
||||
// Extract secret from match
|
||||
secret := strings.Trim(currentRaw[matchIndex[0]:matchIndex[1]], "\n")
|
||||
|
||||
// For any meta data from decoding
|
||||
var metaTags []string
|
||||
currentLine := ""
|
||||
|
||||
// Check if the decoded portions of the segment overlap with the match
|
||||
// to see if its potentially a new match
|
||||
if len(encodedSegments) > 0 {
|
||||
if segment := segmentWithDecodedOverlap(encodedSegments, matchIndex[0], matchIndex[1]); segment != nil {
|
||||
matchIndex = segment.adjustMatchIndex(matchIndex)
|
||||
metaTags = append(metaTags, segment.tags()...)
|
||||
currentLine = segment.currentLine(currentRaw)
|
||||
} else {
|
||||
// This item has already been added to a finding
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
// Fixes: https://github.com/gitleaks/gitleaks/issues/1352
|
||||
// removes the incorrectly following line that was detected by regex expression '\n'
|
||||
matchIndex[1] = matchIndex[0] + len(secret)
|
||||
}
|
||||
|
||||
// determine location of match. Note that the location
|
||||
// in the finding will be the line/column numbers of the _match_
|
||||
// not the _secret_, which will be different if the secretGroup
|
||||
// value is set for this rule
|
||||
loc := location(fragment, matchIndex)
|
||||
|
||||
if matchIndex[1] > loc.endLineIndex {
|
||||
loc.endLineIndex = matchIndex[1]
|
||||
}
|
||||
|
||||
finding := report.Finding{
|
||||
RuleID: r.RuleID,
|
||||
Description: r.Description,
|
||||
StartLine: loc.startLine,
|
||||
EndLine: loc.endLine,
|
||||
StartColumn: loc.startColumn,
|
||||
EndColumn: loc.endColumn,
|
||||
Line: fragment.Raw[loc.startLineIndex:loc.endLineIndex],
|
||||
Match: secret,
|
||||
Secret: secret,
|
||||
File: fragment.FilePath,
|
||||
SymlinkFile: fragment.SymlinkFile,
|
||||
Tags: append(r.Tags, metaTags...),
|
||||
}
|
||||
|
||||
if !d.IgnoreGitleaksAllow && strings.Contains(finding.Line, gitleaksAllowSignature) {
|
||||
logger.Trace().
|
||||
Str("finding", finding.Secret).
|
||||
Msg("skipping finding: 'gitleaks:allow' signature")
|
||||
continue
|
||||
}
|
||||
|
||||
if currentLine == "" {
|
||||
currentLine = finding.Line
|
||||
}
|
||||
|
||||
// Set the value of |secret|, if the pattern contains at least one capture group.
|
||||
// (The first element is the full match, hence we check >= 2.)
|
||||
groups := r.Regex.FindStringSubmatch(finding.Secret)
|
||||
if len(groups) >= 2 {
|
||||
if r.SecretGroup > 0 {
|
||||
if len(groups) <= r.SecretGroup {
|
||||
// Config validation should prevent this
|
||||
continue
|
||||
}
|
||||
finding.Secret = groups[r.SecretGroup]
|
||||
} else {
|
||||
// If |secretGroup| is not set, we will use the first suitable capture group.
|
||||
for _, s := range groups[1:] {
|
||||
if len(s) > 0 {
|
||||
finding.Secret = s
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// check entropy
|
||||
entropy := shannonEntropy(finding.Secret)
|
||||
finding.Entropy = float32(entropy)
|
||||
if r.Entropy != 0.0 {
|
||||
// entropy is too low, skip this finding
|
||||
if entropy <= r.Entropy {
|
||||
logger.Trace().
|
||||
Str("finding", finding.Secret).
|
||||
Float32("entropy", finding.Entropy).
|
||||
Msg("skipping finding: low entropy")
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// check if the result matches any of the global allowlists.
|
||||
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, d.Config.Allowlists); isAllowed {
|
||||
event.Msg("skipping finding: global allowlist")
|
||||
continue
|
||||
}
|
||||
|
||||
// check if the result matches any of the rule allowlists.
|
||||
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, r.Allowlists); isAllowed {
|
||||
event.Msg("skipping finding: rule allowlist")
|
||||
continue
|
||||
}
|
||||
findings = append(findings, finding)
|
||||
}
|
||||
return findings
|
||||
}
|
||||
|
||||
// AddFinding synchronously adds a finding to the findings slice
|
||||
func (d *Detector) AddFinding(finding report.Finding) {
|
||||
globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
|
||||
if finding.Commit != "" {
|
||||
finding.Fingerprint = fmt.Sprintf("%s:%s:%s:%d", finding.Commit, finding.File, finding.RuleID, finding.StartLine)
|
||||
} else {
|
||||
finding.Fingerprint = globalFingerprint
|
||||
}
|
||||
|
||||
// check if we should ignore this finding
|
||||
logger := logging.With().Str("finding", finding.Secret).Logger()
|
||||
if _, ok := d.gitleaksIgnore[globalFingerprint]; ok {
|
||||
logger.Debug().
|
||||
Str("fingerprint", globalFingerprint).
|
||||
Msg("skipping finding: global fingerprint")
|
||||
return
|
||||
} else if finding.Commit != "" {
|
||||
// Awkward nested if because I'm not sure how to chain these two conditions.
|
||||
if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
|
||||
logger.Debug().
|
||||
Str("fingerprint", finding.Fingerprint).
|
||||
Msgf("skipping finding: fingerprint")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if d.baseline != nil && !IsNew(finding, d.Redact, d.baseline) {
|
||||
logger.Debug().
|
||||
Str("fingerprint", finding.Fingerprint).
|
||||
Msgf("skipping finding: baseline")
|
||||
return
|
||||
}
|
||||
|
||||
d.findingMutex.Lock()
|
||||
d.findings = append(d.findings, finding)
|
||||
if d.Verbose {
|
||||
printFinding(finding, d.NoColor)
|
||||
}
|
||||
d.findingMutex.Unlock()
|
||||
}
|
||||
|
||||
// Findings returns the findings added to the detector
|
||||
func (d *Detector) Findings() []report.Finding {
|
||||
return d.findings
|
||||
}
|
||||
|
||||
// AddCommit synchronously adds a commit to the commit slice
|
||||
func (d *Detector) addCommit(commit string) {
|
||||
d.commitMap[commit] = true
|
||||
}
|
||||
|
||||
// checkCommitOrPathAllowed evaluates |fragment| against all provided |allowlists|.
|
||||
//
|
||||
// If the match condition is "OR", only commit and path are checked.
|
||||
// Otherwise, if regexes or stopwords are defined this will fail.
|
||||
func checkCommitOrPathAllowed(
|
||||
logger zerolog.Logger,
|
||||
fragment Fragment,
|
||||
allowlists []*config.Allowlist,
|
||||
) (bool, *zerolog.Event) {
|
||||
if fragment.FilePath == "" && fragment.CommitSHA == "" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
for _, a := range allowlists {
|
||||
var (
|
||||
isAllowed bool
|
||||
allowlistChecks []bool
|
||||
commitAllowed, _ = a.CommitAllowed(fragment.CommitSHA)
|
||||
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
|
||||
)
|
||||
// If the condition is "AND" we need to check all conditions.
|
||||
if a.MatchCondition == config.AllowlistMatchAnd {
|
||||
if len(a.Commits) > 0 {
|
||||
allowlistChecks = append(allowlistChecks, commitAllowed)
|
||||
}
|
||||
if len(a.Paths) > 0 {
|
||||
allowlistChecks = append(allowlistChecks, pathAllowed)
|
||||
}
|
||||
// These will be checked later.
|
||||
if len(a.Regexes) > 0 {
|
||||
continue
|
||||
}
|
||||
if len(a.StopWords) > 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
isAllowed = allTrue(allowlistChecks)
|
||||
} else {
|
||||
isAllowed = commitAllowed || pathAllowed
|
||||
}
|
||||
if isAllowed {
|
||||
event := logger.Trace().Str("condition", a.MatchCondition.String())
|
||||
if commitAllowed {
|
||||
event.Bool("allowed-commit", commitAllowed)
|
||||
}
|
||||
if pathAllowed {
|
||||
event.Bool("allowed-path", pathAllowed)
|
||||
}
|
||||
return true, event
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// checkFindingAllowed evaluates |finding| against all provided |allowlists|.
|
||||
//
|
||||
// If the match condition is "OR", only regex and stopwords are run. (Commit and path should be handled separately).
|
||||
// Otherwise, all conditions are checked.
|
||||
//
|
||||
// TODO: The method signature is awkward. I can't think of a better way to log helpful info.
|
||||
func checkFindingAllowed(
|
||||
logger zerolog.Logger,
|
||||
finding report.Finding,
|
||||
fragment Fragment,
|
||||
currentLine string,
|
||||
allowlists []*config.Allowlist,
|
||||
) (bool, *zerolog.Event) {
|
||||
for _, a := range allowlists {
|
||||
allowlistTarget := finding.Secret
|
||||
switch a.RegexTarget {
|
||||
case "match":
|
||||
allowlistTarget = finding.Match
|
||||
case "line":
|
||||
allowlistTarget = currentLine
|
||||
}
|
||||
|
||||
var (
|
||||
checks []bool
|
||||
isAllowed bool
|
||||
commitAllowed bool
|
||||
commit string
|
||||
pathAllowed bool
|
||||
regexAllowed = a.RegexAllowed(allowlistTarget)
|
||||
containsStopword, word = a.ContainsStopWord(finding.Secret)
|
||||
)
|
||||
// If the condition is "AND" we need to check all conditions.
|
||||
if a.MatchCondition == config.AllowlistMatchAnd {
|
||||
// Determine applicable checks.
|
||||
if len(a.Commits) > 0 {
|
||||
commitAllowed, commit = a.CommitAllowed(fragment.CommitSHA)
|
||||
checks = append(checks, commitAllowed)
|
||||
}
|
||||
if len(a.Paths) > 0 {
|
||||
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
|
||||
checks = append(checks, pathAllowed)
|
||||
}
|
||||
if len(a.Regexes) > 0 {
|
||||
checks = append(checks, regexAllowed)
|
||||
}
|
||||
if len(a.StopWords) > 0 {
|
||||
checks = append(checks, containsStopword)
|
||||
}
|
||||
|
||||
isAllowed = allTrue(checks)
|
||||
} else {
|
||||
isAllowed = regexAllowed || containsStopword
|
||||
}
|
||||
|
||||
if isAllowed {
|
||||
event := logger.Trace().
|
||||
Str("finding", finding.Secret).
|
||||
Str("condition", a.MatchCondition.String())
|
||||
if commitAllowed {
|
||||
event.Str("allowed-commit", commit)
|
||||
}
|
||||
if pathAllowed {
|
||||
event.Bool("allowed-path", pathAllowed)
|
||||
}
|
||||
if regexAllowed {
|
||||
event.Bool("allowed-regex", regexAllowed)
|
||||
}
|
||||
if containsStopword {
|
||||
event.Str("allowed-stopword", word)
|
||||
}
|
||||
return true, event
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func allTrue(bools []bool) bool {
|
||||
for _, check := range bools {
|
||||
if !check {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func fileExists(fileName string) bool {
|
||||
// check for a .infisicalignore file
|
||||
info, err := os.Stat(fileName)
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
|
||||
if info != nil && err == nil {
|
||||
if !info.IsDir() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
@@ -1,225 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
"github.com/Infisical/infisical-merge/detect/sources"
|
||||
)
|
||||
|
||||
const maxPeekSize = 25 * 1_000 // 10kb
|
||||
|
||||
func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
|
||||
for pa := range paths {
|
||||
d.Sema.Go(func() error {
|
||||
logger := logging.With().Str("path", pa.Path).Logger()
|
||||
logger.Trace().Msg("Scanning path")
|
||||
|
||||
f, err := os.Open(pa.Path)
|
||||
if err != nil {
|
||||
if os.IsPermission(err) {
|
||||
logger.Warn().Msg("Skipping file: permission denied")
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = f.Close()
|
||||
}()
|
||||
|
||||
// Get file size
|
||||
fileInfo, err := f.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fileSize := fileInfo.Size()
|
||||
if d.MaxTargetMegaBytes > 0 {
|
||||
rawLength := fileSize / 1000000
|
||||
if rawLength > int64(d.MaxTargetMegaBytes) {
|
||||
logger.Debug().
|
||||
Int64("size", rawLength).
|
||||
Msg("Skipping file: exceeds --max-target-megabytes")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
// Buffer to hold file chunks
|
||||
reader = bufio.NewReaderSize(f, chunkSize)
|
||||
buf = make([]byte, chunkSize)
|
||||
totalLines = 0
|
||||
)
|
||||
for {
|
||||
n, err := reader.Read(buf)
|
||||
|
||||
// "Callers should always process the n > 0 bytes returned before considering the error err."
|
||||
// https://pkg.go.dev/io#Reader
|
||||
if n > 0 {
|
||||
// Only check the filetype at the start of file.
|
||||
if totalLines == 0 {
|
||||
// TODO: could other optimizations be introduced here?
|
||||
if mimetype, err := filetype.Match(buf[:n]); err != nil {
|
||||
return nil
|
||||
} else if mimetype.MIME.Type == "application" {
|
||||
return nil // skip binary files
|
||||
}
|
||||
}
|
||||
|
||||
// Try to split chunks across large areas of whitespace, if possible.
|
||||
peekBuf := bytes.NewBuffer(buf[:n])
|
||||
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
|
||||
return readErr
|
||||
}
|
||||
|
||||
// Count the number of newlines in this chunk
|
||||
chunk := peekBuf.String()
|
||||
linesInChunk := strings.Count(chunk, "\n")
|
||||
totalLines += linesInChunk
|
||||
fragment := Fragment{
|
||||
Raw: chunk,
|
||||
Bytes: peekBuf.Bytes(),
|
||||
}
|
||||
if pa.Symlink != "" {
|
||||
fragment.SymlinkFile = pa.Symlink
|
||||
}
|
||||
|
||||
if isWindows {
|
||||
fragment.FilePath = filepath.ToSlash(pa.Path)
|
||||
fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
|
||||
fragment.WindowsFilePath = pa.Path
|
||||
} else {
|
||||
fragment.FilePath = pa.Path
|
||||
}
|
||||
|
||||
timer := time.AfterFunc(SlowWarningThreshold, func() {
|
||||
logger.Debug().Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
|
||||
})
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
// need to add 1 since line counting starts at 1
|
||||
finding.StartLine += (totalLines - linesInChunk) + 1
|
||||
finding.EndLine += (totalLines - linesInChunk) + 1
|
||||
d.AddFinding(finding)
|
||||
}
|
||||
if timer != nil {
|
||||
timer.Stop()
|
||||
timer = nil
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if err := d.Sema.Wait(); err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
|
||||
return d.findings, nil
|
||||
}
|
||||
|
||||
// readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
|
||||
// This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
|
||||
func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
|
||||
if peekBuf.Len() == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Does the buffer end in consecutive newlines?
|
||||
var (
|
||||
data = peekBuf.Bytes()
|
||||
lastChar = data[len(data)-1]
|
||||
newlineCount = 0 // Tracks consecutive newlines
|
||||
)
|
||||
if isWhitespace(lastChar) {
|
||||
for i := len(data) - 1; i >= 0; i-- {
|
||||
lastChar = data[i]
|
||||
if lastChar == '\n' {
|
||||
newlineCount++
|
||||
|
||||
// Stop if two consecutive newlines are found
|
||||
if newlineCount >= 2 {
|
||||
return nil
|
||||
}
|
||||
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
|
||||
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
|
||||
// (Intentionally do nothing.)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If not, read ahead until we (hopefully) find some.
|
||||
newlineCount = 0
|
||||
for {
|
||||
data = peekBuf.Bytes()
|
||||
// Check if the last character is a newline.
|
||||
lastChar = data[len(data)-1]
|
||||
if lastChar == '\n' {
|
||||
newlineCount++
|
||||
|
||||
// Stop if two consecutive newlines are found
|
||||
if newlineCount >= 2 {
|
||||
break
|
||||
}
|
||||
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
|
||||
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
|
||||
// (Intentionally do nothing.)
|
||||
} else {
|
||||
newlineCount = 0 // Reset if a non-newline character is found
|
||||
}
|
||||
|
||||
// Stop growing the buffer if it reaches maxSize
|
||||
if (peekBuf.Len() - n) >= maxPeekSize {
|
||||
break
|
||||
}
|
||||
|
||||
// Read additional data into a temporary buffer
|
||||
b, err := r.ReadByte()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return err
|
||||
}
|
||||
peekBuf.WriteByte(b)
|
||||
}
|
||||
return nil
|
||||
}
|
@@ -1,216 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/cmd/scm"
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/logging"
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
"github.com/Infisical/infisical-merge/detect/sources"
|
||||
)
|
||||
|
||||
func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.Finding, error) {
|
||||
defer cmd.Wait()
|
||||
var (
|
||||
diffFilesCh = cmd.DiffFilesCh()
|
||||
errCh = cmd.ErrCh()
|
||||
)
|
||||
|
||||
// loop to range over both DiffFiles (stdout) and ErrCh (stderr)
|
||||
for diffFilesCh != nil || errCh != nil {
|
||||
select {
|
||||
case gitdiffFile, open := <-diffFilesCh:
|
||||
if !open {
|
||||
diffFilesCh = nil
|
||||
break
|
||||
}
|
||||
|
||||
// skip binary files
|
||||
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if commit is allowed
|
||||
commitSHA := ""
|
||||
if gitdiffFile.PatchHeader != nil {
|
||||
commitSHA = gitdiffFile.PatchHeader.SHA
|
||||
for _, a := range d.Config.Allowlists {
|
||||
if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
|
||||
logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
d.addCommit(commitSHA)
|
||||
|
||||
d.Sema.Go(func() error {
|
||||
for _, textFragment := range gitdiffFile.TextFragments {
|
||||
if textFragment == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
fragment := Fragment{
|
||||
Raw: textFragment.Raw(gitdiff.OpAdd),
|
||||
CommitSHA: commitSHA,
|
||||
FilePath: gitdiffFile.NewName,
|
||||
}
|
||||
|
||||
timer := time.AfterFunc(SlowWarningThreshold, func() {
|
||||
logging.Debug().
|
||||
Str("commit", commitSHA[:7]).
|
||||
Str("path", fragment.FilePath).
|
||||
Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
|
||||
})
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
d.AddFinding(augmentGitFinding(remote, finding, textFragment, gitdiffFile))
|
||||
}
|
||||
if timer != nil {
|
||||
timer.Stop()
|
||||
timer = nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
case err, open := <-errCh:
|
||||
if !open {
|
||||
errCh = nil
|
||||
break
|
||||
}
|
||||
|
||||
return d.findings, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := d.Sema.Wait(); err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
logging.Info().Msgf("%d commits scanned.", len(d.commitMap))
|
||||
logging.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
|
||||
return d.findings, nil
|
||||
}
|
||||
|
||||
type RemoteInfo struct {
|
||||
Platform scm.Platform
|
||||
Url string
|
||||
}
|
||||
|
||||
func NewRemoteInfo(platform scm.Platform, source string) *RemoteInfo {
|
||||
if platform == scm.NoPlatform {
|
||||
return &RemoteInfo{Platform: platform}
|
||||
}
|
||||
|
||||
remoteUrl, err := getRemoteUrl(source)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "No remote configured") {
|
||||
logging.Debug().Msg("skipping finding links: repository has no configured remote.")
|
||||
platform = scm.NoPlatform
|
||||
} else {
|
||||
logging.Error().Err(err).Msg("skipping finding links: unable to parse remote URL")
|
||||
}
|
||||
goto End
|
||||
}
|
||||
|
||||
if platform == scm.UnknownPlatform {
|
||||
platform = platformFromHost(remoteUrl)
|
||||
if platform == scm.UnknownPlatform {
|
||||
logging.Info().
|
||||
Str("host", remoteUrl.Hostname()).
|
||||
Msg("Unknown SCM platform. Use --platform to include links in findings.")
|
||||
} else {
|
||||
logging.Debug().
|
||||
Str("host", remoteUrl.Hostname()).
|
||||
Str("platform", platform.String()).
|
||||
Msg("SCM platform parsed from host")
|
||||
}
|
||||
}
|
||||
|
||||
End:
|
||||
var rUrl string
|
||||
if remoteUrl != nil {
|
||||
rUrl = remoteUrl.String()
|
||||
}
|
||||
return &RemoteInfo{
|
||||
Platform: platform,
|
||||
Url: rUrl,
|
||||
}
|
||||
}
|
||||
|
||||
var sshUrlpat = regexp.MustCompile(`^git@([a-zA-Z0-9.-]+):([\w/.-]+?)(?:\.git)?$`)
|
||||
|
||||
func getRemoteUrl(source string) (*url.URL, error) {
|
||||
// This will return the first remote — typically, "origin".
|
||||
cmd := exec.Command("git", "ls-remote", "--quiet", "--get-url")
|
||||
if source != "." {
|
||||
cmd.Dir = source
|
||||
}
|
||||
|
||||
stdout, err := cmd.Output()
|
||||
if err != nil {
|
||||
var exitError *exec.ExitError
|
||||
if errors.As(err, &exitError) {
|
||||
return nil, fmt.Errorf("command failed (%d): %w, stderr: %s", exitError.ExitCode(), err, string(bytes.TrimSpace(exitError.Stderr)))
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
remoteUrl := string(bytes.TrimSpace(stdout))
|
||||
if matches := sshUrlpat.FindStringSubmatch(remoteUrl); matches != nil {
|
||||
remoteUrl = fmt.Sprintf("https://%s/%s", matches[1], matches[2])
|
||||
}
|
||||
remoteUrl = strings.TrimSuffix(remoteUrl, ".git")
|
||||
|
||||
parsedUrl, err := url.Parse(remoteUrl)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse remote URL: %w", err)
|
||||
}
|
||||
|
||||
// Remove any user info.
|
||||
parsedUrl.User = nil
|
||||
return parsedUrl, nil
|
||||
}
|
||||
|
||||
func platformFromHost(u *url.URL) scm.Platform {
|
||||
switch strings.ToLower(u.Hostname()) {
|
||||
case "github.com":
|
||||
return scm.GitHubPlatform
|
||||
case "gitlab.com":
|
||||
return scm.GitLabPlatform
|
||||
case "dev.azure.com", "visualstudio.com":
|
||||
return scm.AzureDevOpsPlatform
|
||||
case "bitbucket.org":
|
||||
return scm.BitBucketPlatform
|
||||
default:
|
||||
return scm.UnknownPlatform
|
||||
}
|
||||
}
|
@@ -1,102 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
// Location represents a location in a file
|
||||
type Location struct {
|
||||
startLine int
|
||||
endLine int
|
||||
startColumn int
|
||||
endColumn int
|
||||
startLineIndex int
|
||||
endLineIndex int
|
||||
}
|
||||
|
||||
func location(fragment Fragment, matchIndex []int) Location {
|
||||
var (
|
||||
prevNewLine int
|
||||
location Location
|
||||
lineSet bool
|
||||
_lineNum int
|
||||
)
|
||||
|
||||
start := matchIndex[0]
|
||||
end := matchIndex[1]
|
||||
|
||||
// default startLineIndex to 0
|
||||
location.startLineIndex = 0
|
||||
|
||||
// Fixes: https://github.com/zricethezav/gitleaks/issues/1037
|
||||
// When a fragment does NOT have any newlines, a default "newline"
|
||||
// will be counted to make the subsequent location calculation logic work
|
||||
// for fragments will no newlines.
|
||||
if len(fragment.newlineIndices) == 0 {
|
||||
fragment.newlineIndices = [][]int{
|
||||
{len(fragment.Raw), len(fragment.Raw) + 1},
|
||||
}
|
||||
}
|
||||
|
||||
for lineNum, pair := range fragment.newlineIndices {
|
||||
_lineNum = lineNum
|
||||
newLineByteIndex := pair[0]
|
||||
if prevNewLine <= start && start < newLineByteIndex {
|
||||
lineSet = true
|
||||
location.startLine = lineNum
|
||||
location.endLine = lineNum
|
||||
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
|
||||
location.startLineIndex = prevNewLine
|
||||
location.endLineIndex = newLineByteIndex
|
||||
}
|
||||
if prevNewLine < end && end <= newLineByteIndex {
|
||||
location.endLine = lineNum
|
||||
location.endColumn = (end - prevNewLine)
|
||||
location.endLineIndex = newLineByteIndex
|
||||
}
|
||||
|
||||
prevNewLine = pair[0]
|
||||
}
|
||||
|
||||
if !lineSet {
|
||||
// if lines never get set then that means the secret is most likely
|
||||
// on the last line of the diff output and the diff output does not have
|
||||
// a newline
|
||||
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
|
||||
location.endColumn = (end - prevNewLine)
|
||||
location.startLine = _lineNum + 1
|
||||
location.endLine = _lineNum + 1
|
||||
|
||||
// search for new line byte index
|
||||
i := 0
|
||||
for end+i < len(fragment.Raw) {
|
||||
if fragment.Raw[end+i] == '\n' {
|
||||
break
|
||||
}
|
||||
if fragment.Raw[end+i] == '\r' {
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
location.endLineIndex = end + i
|
||||
}
|
||||
return location
|
||||
}
|
@@ -1,72 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package logging
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
var Logger zerolog.Logger
|
||||
|
||||
func init() {
|
||||
// send all logs to stdout
|
||||
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).
|
||||
Level(zerolog.InfoLevel).
|
||||
With().Timestamp().Logger()
|
||||
}
|
||||
|
||||
func With() zerolog.Context {
|
||||
return Logger.With()
|
||||
}
|
||||
|
||||
func Trace() *zerolog.Event {
|
||||
return Logger.Trace()
|
||||
}
|
||||
|
||||
func Debug() *zerolog.Event {
|
||||
return Logger.Debug()
|
||||
}
|
||||
func Info() *zerolog.Event {
|
||||
return Logger.Info()
|
||||
}
|
||||
func Warn() *zerolog.Event {
|
||||
return Logger.Warn()
|
||||
}
|
||||
|
||||
func Error() *zerolog.Event {
|
||||
return Logger.Error()
|
||||
}
|
||||
|
||||
func Err(err error) *zerolog.Event {
|
||||
return Logger.Err(err)
|
||||
}
|
||||
|
||||
func Fatal() *zerolog.Event {
|
||||
return Logger.Fatal()
|
||||
}
|
||||
|
||||
func Panic() *zerolog.Event {
|
||||
return Logger.Panic()
|
||||
}
|
@@ -1,149 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/Infisical/infisical-merge/detect/report"
|
||||
)
|
||||
|
||||
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
|
||||
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
|
||||
reader := bufio.NewReader(r)
|
||||
buf := make([]byte, 1000*bufSize)
|
||||
findings := []report.Finding{}
|
||||
|
||||
for {
|
||||
n, err := reader.Read(buf)
|
||||
|
||||
// "Callers should always process the n > 0 bytes returned before considering the error err."
|
||||
// https://pkg.go.dev/io#Reader
|
||||
if n > 0 {
|
||||
// Try to split chunks across large areas of whitespace, if possible.
|
||||
peekBuf := bytes.NewBuffer(buf[:n])
|
||||
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
|
||||
return findings, readErr
|
||||
}
|
||||
|
||||
fragment := Fragment{
|
||||
Raw: peekBuf.String(),
|
||||
}
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
findings = append(findings, finding)
|
||||
if d.Verbose {
|
||||
printFinding(finding, d.NoColor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return findings, err
|
||||
}
|
||||
}
|
||||
|
||||
return findings, nil
|
||||
}
|
||||
|
||||
// StreamDetectReader streams the detection results from the provided io.Reader.
|
||||
// It reads data using the specified buffer size (in KB) and processes each chunk through
|
||||
// the existing detection logic. Findings are sent down the returned findings channel as soon as
|
||||
// they are detected, while a separate error channel signals a terminal error (or nil upon successful completion).
|
||||
// The function returns two channels:
|
||||
// - findingsCh: a receive-only channel that emits report.Finding objects as they are found.
|
||||
// - errCh: a receive-only channel that emits a single final error (or nil if no error occurred)
|
||||
// once the stream ends.
|
||||
//
|
||||
// Recommended Usage:
|
||||
//
|
||||
// Since there will only ever be a single value on the errCh, it is recommended to consume the findingsCh
|
||||
// first. Once findingsCh is closed, the consumer should then read from errCh to determine
|
||||
// if the stream completed successfully or if an error occurred.
|
||||
//
|
||||
// This design avoids the need for a select loop, keeping client code simple.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// // Assume detector is an instance of *Detector and myReader implements io.Reader.
|
||||
// findingsCh, errCh := detector.StreamDetectReader(myReader, 64) // using 64 KB buffer size
|
||||
//
|
||||
// // Process findings as they arrive.
|
||||
// for finding := range findingsCh {
|
||||
// fmt.Printf("Found secret: %+v\n", finding)
|
||||
// }
|
||||
//
|
||||
// // After the findings channel is closed, check the final error.
|
||||
// if err := <-errCh; err != nil {
|
||||
// log.Fatalf("StreamDetectReader encountered an error: %v", err)
|
||||
// } else {
|
||||
// fmt.Println("Scanning completed successfully.")
|
||||
// }
|
||||
func (d *Detector) StreamDetectReader(r io.Reader, bufSize int) (<-chan report.Finding, <-chan error) {
|
||||
findingsCh := make(chan report.Finding, 1)
|
||||
errCh := make(chan error, 1)
|
||||
|
||||
go func() {
|
||||
defer close(findingsCh)
|
||||
defer close(errCh)
|
||||
|
||||
reader := bufio.NewReader(r)
|
||||
buf := make([]byte, 1000*bufSize)
|
||||
|
||||
for {
|
||||
n, err := reader.Read(buf)
|
||||
|
||||
if n > 0 {
|
||||
peekBuf := bytes.NewBuffer(buf[:n])
|
||||
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
|
||||
errCh <- readErr
|
||||
return
|
||||
}
|
||||
|
||||
fragment := Fragment{Raw: peekBuf.String()}
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
findingsCh <- finding
|
||||
if d.Verbose {
|
||||
printFinding(finding, d.NoColor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
errCh <- nil
|
||||
return
|
||||
}
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return findingsCh, errCh
|
||||
}
|
@@ -1,37 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
//go:build !gore2regex
|
||||
|
||||
package regexp
|
||||
|
||||
import (
|
||||
re "regexp"
|
||||
)
|
||||
|
||||
const Version = "stdlib"
|
||||
|
||||
type Regexp = re.Regexp
|
||||
|
||||
func MustCompile(str string) *re.Regexp {
|
||||
return re.MustCompile(str)
|
||||
}
|
@@ -1,37 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
//go:build gore2regex
|
||||
|
||||
package regexp
|
||||
|
||||
import (
|
||||
re "github.com/wasilibs/go-re2"
|
||||
)
|
||||
|
||||
const Version = "github.com/wasilibs/go-re2"
|
||||
|
||||
type Regexp = re.Regexp
|
||||
|
||||
func MustCompile(str string) *re.Regexp {
|
||||
return re.MustCompile(str)
|
||||
}
|
@@ -1,26 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
const version = "v8.0.0"
|
||||
const driver = "gitleaks"
|
@@ -1,100 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type CsvReporter struct {
|
||||
}
|
||||
|
||||
var _ Reporter = (*CsvReporter)(nil)
|
||||
|
||||
func (r *CsvReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
if len(findings) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
cw = csv.NewWriter(w)
|
||||
err error
|
||||
)
|
||||
columns := []string{"RuleID",
|
||||
"Commit",
|
||||
"File",
|
||||
"SymlinkFile",
|
||||
"Secret",
|
||||
"Match",
|
||||
"StartLine",
|
||||
"EndLine",
|
||||
"StartColumn",
|
||||
"EndColumn",
|
||||
"Author",
|
||||
"Message",
|
||||
"Date",
|
||||
"Email",
|
||||
"Fingerprint",
|
||||
"Tags",
|
||||
}
|
||||
// A miserable attempt at "omitempty" so tests don't yell at me.
|
||||
if findings[0].Link != "" {
|
||||
columns = append(columns, "Link")
|
||||
}
|
||||
|
||||
if err = cw.Write(columns); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, f := range findings {
|
||||
row := []string{f.RuleID,
|
||||
f.Commit,
|
||||
f.File,
|
||||
f.SymlinkFile,
|
||||
f.Secret,
|
||||
f.Match,
|
||||
strconv.Itoa(f.StartLine),
|
||||
strconv.Itoa(f.EndLine),
|
||||
strconv.Itoa(f.StartColumn),
|
||||
strconv.Itoa(f.EndColumn),
|
||||
f.Author,
|
||||
f.Message,
|
||||
f.Date,
|
||||
f.Email,
|
||||
f.Fingerprint,
|
||||
strings.Join(f.Tags, " "),
|
||||
}
|
||||
if findings[0].Link != "" {
|
||||
row = append(row, f.Link)
|
||||
}
|
||||
|
||||
if err = cw.Write(row); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
cw.Flush()
|
||||
return cw.Error()
|
||||
}
|
@@ -1,92 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Finding contains information about strings that
|
||||
// have been captured by a tree-sitter query.
|
||||
type Finding struct {
|
||||
// Rule is the name of the rule that was matched
|
||||
RuleID string
|
||||
Description string
|
||||
|
||||
StartLine int
|
||||
EndLine int
|
||||
StartColumn int
|
||||
EndColumn int
|
||||
|
||||
Line string `json:"-"`
|
||||
|
||||
Match string
|
||||
|
||||
// Secret contains the full content of what is matched in
|
||||
// the tree-sitter query.
|
||||
Secret string
|
||||
|
||||
// File is the name of the file containing the finding
|
||||
File string
|
||||
SymlinkFile string
|
||||
Commit string
|
||||
Link string `json:",omitempty"`
|
||||
|
||||
// Entropy is the shannon entropy of Value
|
||||
Entropy float32
|
||||
|
||||
Author string
|
||||
Email string
|
||||
Date string
|
||||
Message string
|
||||
Tags []string
|
||||
|
||||
// unique identifier
|
||||
Fingerprint string
|
||||
}
|
||||
|
||||
// Redact removes sensitive information from a finding.
|
||||
func (f *Finding) Redact(percent uint) {
|
||||
secret := maskSecret(f.Secret, percent)
|
||||
if percent >= 100 {
|
||||
secret = "REDACTED"
|
||||
}
|
||||
f.Line = strings.Replace(f.Line, f.Secret, secret, -1)
|
||||
f.Match = strings.Replace(f.Match, f.Secret, secret, -1)
|
||||
f.Secret = secret
|
||||
}
|
||||
|
||||
func maskSecret(secret string, percent uint) string {
|
||||
if percent > 100 {
|
||||
percent = 100
|
||||
}
|
||||
len := float64(len(secret))
|
||||
if len <= 0 {
|
||||
return secret
|
||||
}
|
||||
prc := float64(100 - percent)
|
||||
lth := int64(math.RoundToEven(len * prc / float64(100)))
|
||||
|
||||
return secret[:lth] + "..."
|
||||
}
|
@@ -1,39 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
)
|
||||
|
||||
type JsonReporter struct {
|
||||
}
|
||||
|
||||
var _ Reporter = (*JsonReporter)(nil)
|
||||
|
||||
func (t *JsonReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
encoder := json.NewEncoder(w)
|
||||
encoder.SetIndent("", " ")
|
||||
return encoder.Encode(findings)
|
||||
}
|
@@ -1,129 +0,0 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type JunitReporter struct {
|
||||
}
|
||||
|
||||
var _ Reporter = (*JunitReporter)(nil)
|
||||
|
||||
func (r *JunitReporter) Write(w io.WriteCloser, findings []Finding) error {
|
||||
testSuites := TestSuites{
|
||||
TestSuites: getTestSuites(findings),
|
||||
}
|
||||
|
||||
io.WriteString(w, xml.Header)
|
||||
encoder := xml.NewEncoder(w)
|
||||
encoder.Indent("", "\t")
|
||||
return encoder.Encode(testSuites)
|
||||
}
|
||||
|
||||
func getTestSuites(findings []Finding) []TestSuite {
|
||||
return []TestSuite{
|
||||
{
|
||||
Failures: strconv.Itoa(len(findings)),
|
||||
Name: "gitleaks",
|
||||
Tests: strconv.Itoa(len(findings)),
|
||||
TestCases: getTestCases(findings),
|
||||
Time: "",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func getTestCases(findings []Finding) []TestCase {
|
||||
testCases := []TestCase{}
|
||||
for _, f := range findings {
|
||||
testCase := TestCase{
|
||||
Classname: f.Description,
|
||||
Failure: getFailure(f),
|
||||
File: f.File,
|
||||
Name: getMessage(f),
|
||||
Time: "",
|
||||
}
|
||||
testCases = append(testCases, testCase)
|
||||
}
|
||||
return testCases
|
||||
}
|
||||
|
||||
func getFailure(f Finding) Failure {
|
||||
return Failure{
|
||||
Data: getData(f),
|
||||
Message: getMessage(f),
|
||||
Type: f.Description,
|
||||
}
|
||||
}
|
||||
|
||||
func getData(f Finding) string {
|
||||
data, err := json.MarshalIndent(f, "", "\t")
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return ""
|
||||
}
|
||||
return string(data)
|
||||
}
|
||||
|
||||
func getMessage(f Finding) string {
|
||||
if f.Commit == "" {
|
||||
return fmt.Sprintf("%s has detected a secret in file %s, line %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine))
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s has detected a secret in file %s, line %s, at commit %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine), f.Commit)
|
||||
}
|
||||
|
||||
type TestSuites struct {
|
||||
XMLName xml.Name `xml:"testsuites"`
|
||||
TestSuites []TestSuite
|
||||
}
|
||||
|
||||
type TestSuite struct {
|
||||
XMLName xml.Name `xml:"testsuite"`
|
||||
Failures string `xml:"failures,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
Tests string `xml:"tests,attr"`
|
||||
TestCases []TestCase `xml:"testcase"`
|
||||
Time string `xml:"time,attr"`
|
||||
}
|
||||
|
||||
type TestCase struct {
|
||||
XMLName xml.Name `xml:"testcase"`
|
||||
Classname string `xml:"classname,attr"`
|
||||
Failure Failure `xml:"failure"`
|
||||
File string `xml:"file,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
Time string `xml:"time,attr"`
|
||||
}
|
||||
|
||||
type Failure struct {
|
||||
XMLName xml.Name `xml:"failure"`
|
||||
Data string `xml:",chardata"`
|
||||
Message string `xml:"message,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user