Compare commits

...

90 Commits

Author SHA1 Message Date
Sheen Capadngan
9374ee3c2e doc: add bootstrap to API reference 2025-07-30 03:57:59 +08:00
carlosmonastyrski
dece214073 Merge pull request #4264 from Infisical/fix/secretHistoryActorLink
Fix secret version history link to user/machine details page
2025-07-29 14:26:58 -03:00
Carlos Monastyrski
992df5c7d0 Fix secret version history link to user/machine details page 2025-07-29 14:22:39 -03:00
Scott Wilson
00e382d774 Merge pull request #4257 from Infisical/secret-scanning-findings-badge
improvement(frontend): add back secret scanning unresolved finding count to sidebar
2025-07-29 08:14:44 -07:00
Sheen
f63c434c0e Merge pull request #4262 from Infisical/misc/removed-cli
misc: removed CLI repository
2025-07-29 22:21:56 +08:00
Sheen Capadngan
9f0250caf2 misc: removed unnecessary CLI files in root 2025-07-29 20:54:55 +08:00
Sheen Capadngan
d47f6f7ec9 misc: removed CLI directory 2025-07-29 20:49:54 +08:00
Maidul Islam
1126c6b0fa Merge pull request #4244 from Infisical/feature/secrets-detection-in-secrets-manager
feat: secrets detection in secret manager
2025-07-28 23:41:50 -04:00
Maidul Islam
7949142ea7 update text for secret params 2025-07-28 23:32:05 -04:00
Scott Wilson
122de99606 improvement: add back secret scanning unresolved finding count to sidebar 2025-07-28 15:29:26 -07:00
Sheen Capadngan
57fcfdaf21 Merge remote-tracking branch 'origin/main' into feature/secrets-detection-in-secrets-manager 2025-07-29 04:57:54 +08:00
Sheen Capadngan
e430abfc9e misc: addressed comments 2025-07-29 04:56:50 +08:00
Scott Wilson
7d1bc86702 Merge pull request #4236 from Infisical/improve-access-denied-banner-design
improvement(frontend): revise access restricted banner and refactor/update relevant locations
2025-07-28 10:31:14 -07:00
Scott Wilson
975b621bc8 fix: remove passthrough on banner guard for kms pages 2025-07-28 10:26:22 -07:00
Daniel Hougaard
ba9da3e6ec Merge pull request #4254 from Infisical/allow-click-outside-close-rotation-modal
improvement(frontend): remove click outside moda tol close disabling on various modals
2025-07-28 21:06:33 +04:00
carlosmonastyrski
d2274a622a Merge pull request #4251 from Infisical/fix/azureOAuthSeparateEnvVars
Separate Azure OAuth env vars to different env variables for each app connection
2025-07-28 14:06:01 -03:00
Scott Wilson
41ba7edba2 improvement: remove click outside modal close disabling on sync/data source/rotation modals 2025-07-28 09:50:18 -07:00
carlosmonastyrski
7acefbca29 Merge pull request #4220 from Infisical/feat/multipleApprovalEnvs
Allow multiple environments on secret and access policies
2025-07-28 12:22:40 -03:00
Daniel Hougaard
e246f6bbfe Merge pull request #4252 from Infisical/daniel/form-data-cve
Daniel/form data CVE
2025-07-28 19:01:27 +04:00
Carlos Monastyrski
f265fa6d37 Minor improvements to azure multi env variables 2025-07-28 10:14:21 -03:00
Daniel Hougaard
8eebd7228f Update package.json 2025-07-28 16:43:13 +04:00
Daniel Hougaard
2a5593ea30 update axios in oidc sink server 2025-07-28 16:42:21 +04:00
Daniel Hougaard
17af33372c uninstall axios in root 2025-07-28 16:40:58 +04:00
Daniel Hougaard
27da14df9d Fix CVE's 2025-07-28 16:40:20 +04:00
Carlos Monastyrski
cd4b9cd03a Improve azure client secrets env var name 2025-07-28 09:30:37 -03:00
Carlos Monastyrski
0779091d1f Separate Azure OAuth env vars to different env variables for each app connection 2025-07-28 09:14:43 -03:00
Maidul Islam
c421057cf1 Merge pull request #4250 from Infisical/fix/oracle-db-rotation-failing
fix: potential fix for oracle db rotation failing
2025-07-27 14:47:08 -04:00
Akhil Mohan
8df4616265 Update backend/src/ee/services/secret-rotation-v2/shared/sql-credentials/sql-credentials-rotation-fns.ts
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-07-28 00:09:30 +05:30
=
484f34a257 fix: potential fix for oracle db rotation failing 2025-07-28 00:03:01 +05:30
carlosmonastyrski
32851565a7 Merge pull request #4247 from Infisical/fix/azureClientSecretsPermissions
Fix/azure client secrets permissions
2025-07-25 20:52:04 -03:00
Carlos Monastyrski
68401a799e Fix env variables name on doc 2025-07-25 20:48:18 -03:00
Carlos Monastyrski
0adf2c830d Fix azure client secrets OAuth URL to use graph instead of vault 2025-07-25 20:47:17 -03:00
Carlos Monastyrski
3400a8f911 Small UI fix for environments label 2025-07-25 17:24:15 -03:00
Carlos Monastyrski
e6588b5d0e Set correct environmentName on listApprovalRequests 2025-07-25 17:00:11 -03:00
Daniel Hougaard
c68138ac21 Merge pull request #4245 from Infisical/daniel/fips-improvements
fix(fips): increased image size and migrations
2025-07-25 23:40:27 +04:00
Carlos Monastyrski
608979efa7 Merge branch 'main' into feat/multipleApprovalEnvs 2025-07-25 16:29:04 -03:00
Sheen Capadngan
585cb1b30c misc: used promise all 2025-07-26 03:26:24 +08:00
Sheen Capadngan
7fdee073d8 misc: add secret checker in change policy branch 2025-07-26 03:16:39 +08:00
Daniel Hougaard
d4f0301104 Update Dockerfile.fips.standalone-infisical 2025-07-25 23:13:26 +04:00
Daniel Hougaard
253c46f21d fips improvements 2025-07-25 23:09:23 +04:00
Maidul Islam
d8e39aed16 Merge pull request #4243 from Infisical/fix/secretReminderMigration
Add manual migration to secret imports rework
2025-07-25 15:01:04 -04:00
Sheen Capadngan
c368178cb1 feat: secrets detection in secret manager 2025-07-26 03:00:44 +08:00
Carlos Monastyrski
72ee468208 Remove previous queue running the migration 2025-07-25 15:20:23 -03:00
carlosmonastyrski
18238b46a7 Merge pull request #4229 from Infisical/feat/azureClientSecretsNewAuth
Add client secrets authentication on Azure CS app connection
2025-07-25 15:00:49 -03:00
Carlos Monastyrski
d0ffae2c10 Add uuid validation to Azure client secrets 2025-07-25 14:53:46 -03:00
Carlos Monastyrski
7ce11cde95 Add cycle logic to next reminder migration 2025-07-25 14:47:57 -03:00
Carlos Monastyrski
af32948a05 Minor improvements on reminders migration 2025-07-25 13:35:06 -03:00
Daniel Hougaard
25753fc995 Merge pull request #4242 from Infisical/daniel/render-sync-auto-redeploy
feat(secret-sync/render): auto redeploy on sync
2025-07-25 20:31:47 +04:00
Carlos Monastyrski
cd71848800 Avoid migrating existing reminders 2025-07-25 13:10:54 -03:00
Carlos Monastyrski
4afc7a1981 Add manual migration to secret imports rework 2025-07-25 13:06:29 -03:00
Daniel Hougaard
11ca76ccca fix: restructure and requested changes 2025-07-25 20:05:20 +04:00
Daniel Hougaard
418aca8af0 feat(secret-sync/render): auto redeploy on sync 2025-07-25 19:50:28 +04:00
Carlos Monastyrski
99e8bdef58 Minor fixes on policies multi env migration 2025-07-25 01:37:25 -03:00
Carlos Monastyrski
7365f60835 Small code improvements 2025-07-25 01:23:01 -03:00
Scott Wilson
929822514e Merge pull request #4230 from Infisical/secret-dashboard-sing-env-col-resize
improvement(frontend): add col resize to secret dashboard env view
2025-07-24 20:08:18 -07:00
Daniel Hougaard
616ccb97f2 Merge pull request #4238 from Infisical/daniel/docs-fix
Update docs.json
2025-07-25 04:59:32 +04:00
Daniel Hougaard
7917a767e6 Update docs.json 2025-07-25 04:57:15 +04:00
carlosmonastyrski
ccff675e0d Merge pull request #4237 from Infisical/fix/remindersMigrationFix
Fix secret reminders migration job
2025-07-24 21:25:47 -03:00
Carlos Monastyrski
ad905b2ff7 Fix secret reminders migration job 2025-07-24 20:42:39 -03:00
Scott Wilson
4e960445a4 chore: remove unused tw css 2025-07-24 15:56:14 -07:00
Scott Wilson
7af5a4ad8d improvement: revise access restricted banner and refactor/update relevant locations 2025-07-24 15:52:29 -07:00
carlosmonastyrski
2ada753527 Merge pull request #4235 from Infisical/fix/renderRateLimit
Improve render retries and rate limits
2025-07-24 19:07:17 -03:00
Carlos Monastyrski
c031736701 Improve render api usage 2025-07-24 18:51:44 -03:00
Daniel Hougaard
91a1c34637 Merge pull request #4211 from Infisical/daniel/vault-import
feat(external-migrations): vault migrations
2025-07-25 01:16:50 +04:00
Carlos Monastyrski
eadb1a63fa Improve render retries and rate limits 2025-07-24 17:49:28 -03:00
Scott Wilson
f70a1e3db6 Merge pull request #4233 from Infisical/fix-identity-role-invalidation
fix(frontend): correct org identity mutation table invalidation
2025-07-24 12:17:03 -07:00
Scott Wilson
fc6ab94a06 fix: correct org identity mutation table invalidation 2025-07-24 12:08:41 -07:00
Scott Wilson
4feb3314e7 Merge pull request #4232 from Infisical/create-project-modal-dropdown
improvement(frontend): Adjust select dropdown styling in add project modal
2025-07-24 11:57:23 -07:00
Scott Wilson
d9a57d1391 fix: make side prop optional 2025-07-24 11:50:05 -07:00
Scott Wilson
2c99d41592 improvement: adjust select dropdown styling in add project modal 2025-07-24 11:42:04 -07:00
Scott Wilson
2535d1bc4b Merge pull request #4228 from Infisical/project-audit-logs-page
feature(project-audit-logs): add project audit logs pages
2025-07-24 10:49:02 -07:00
x032205
a8a1bc5f4a Merge pull request #4227 from Infisical/ENG-3345
feat(machine-identity): Add AWS attributes for ABAC
2025-07-24 11:59:17 -04:00
Daniel Hougaard
d2a4f265de Update ExternalMigrationsTab.tsx 2025-07-24 19:58:29 +04:00
x032205
3483f185a8 Doc tweaks 2025-07-24 11:44:10 -04:00
Daniel Hougaard
4af872e504 fix: ui state 2025-07-24 19:14:50 +04:00
Daniel Hougaard
716b88fa49 requested changes and docs 2025-07-24 19:09:24 +04:00
Carlos Monastyrski
b05ea8a69a Fix migration 2025-07-24 12:07:01 -03:00
Carlos Monastyrski
0d97bb4c8c Merge branch 'main' into feat/multipleApprovalEnvs 2025-07-24 12:03:07 -03:00
Daniel Hougaard
716f061c01 Merge branch 'heads/main' into daniel/vault-import 2025-07-24 17:29:55 +04:00
Carlos Monastyrski
5af939992c Update docs 2025-07-24 10:04:25 -03:00
Carlos Monastyrski
aec4ee905e Add client secrets authentication on Azure CS app connection 2025-07-24 09:40:54 -03:00
Scott Wilson
d935b28925 feature: add project audit logs 2025-07-23 16:48:54 -07:00
x032205
60620840f2 Tweaks 2025-07-23 16:48:06 -04:00
x032205
e798eb2a4e feat(machine-identity): Add AWS attributes for ABAC 2025-07-23 16:30:55 -04:00
Carlos Monastyrski
60657f0bc6 Addressed PR suggestions 2025-07-23 10:37:23 -03:00
Carlos Monastyrski
05408bc151 Allow multiple environments on secret and access policies 2025-07-23 09:54:41 -03:00
Daniel Hougaard
464e32b0e9 Update VaultPlatformModal.tsx 2025-07-22 13:04:00 +04:00
Daniel Hougaard
bfd8b64871 requested changes 2025-07-22 02:15:21 +04:00
Daniel Hougaard
185cc4efba Update VaultPlatformModal copy.tsx 2025-07-22 01:50:28 +04:00
Daniel Hougaard
7150b9314d feat(external-migrations): vault migrations 2025-07-22 01:35:02 +04:00
449 changed files with 4371 additions and 28080 deletions

View File

@@ -123,8 +123,17 @@ INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID=
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=

View File

@@ -1,153 +0,0 @@
name: Build and release CLI
on:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest-8-cores
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
with:
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: Install deb-s3
run: gem install deb-s3
- name: Configure GPG Key
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
- name: Invalidate Cloudfront cache
run: aws cloudfront create-invalidation --distribution-id $CLOUDFRONT_DISTRIBUTION_ID --paths '/deb/dists/stable/*'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}

View File

@@ -1,55 +0,0 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_dispatch:
workflow_call:
secrets:
CLI_TESTS_UA_CLIENT_ID:
required: true
CLI_TESTS_UA_CLIENT_SECRET:
required: true
CLI_TESTS_SERVICE_TOKEN:
required: true
CLI_TESTS_PROJECT_ID:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@@ -1,241 +0,0 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
# before:
# hooks:
# # You may remove this if you don't use go modules.
# - cd cli && go mod tidy
# # you may remove this if you don't need go generate
# - cd cli && go generate ./...
before:
hooks:
- ./cli/scripts/completions.sh
- ./cli/scripts/manpages.sh
monorepo:
tag_prefix: infisical-cli/
dir: cli
builds:
- id: darwin-build
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
env:
- CGO_ENABLED=1
- CC=/home/runner/work/osxcross/target/bin/o64-clang
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
goos:
- darwin
ignore:
- goos: darwin
goarch: "386"
dir: ./cli
- id: all-other-builds
env:
- CGO_ENABLED=0
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
goos:
- freebsd
- linux
- netbsd
- openbsd
- windows
goarch:
- "386"
- amd64
- arm
- arm64
goarm:
- "6"
- "7"
ignore:
- goos: windows
goarch: "386"
- goos: freebsd
goarch: "386"
dir: ./cli
archives:
- format_overrides:
- goos: windows
format: zip
files:
- ../README*
- ../LICENSE*
- ../manpages/*
- ../completions/*
release:
replace_existing_draft: true
mode: "replace"
checksum:
name_template: "checksums.txt"
snapshot:
name_template: "{{ .Version }}-devel"
# publishers:
# - name: fury.io
# ids:
# - infisical
# dir: "{{ dir .ArtifactPath }}"
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
brews:
- name: infisical
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
- name: "infisical@{{.Version}}"
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
nfpms:
- id: infisical
package_name: infisical
builds:
- all-other-builds
vendor: Infisical, Inc
homepage: https://infisical.com/
maintainer: Infisical, Inc
description: The offical Infisical CLI
license: MIT
formats:
- rpm
- deb
- apk
- archlinux
bindir: /usr/bin
contents:
- src: ./completions/infisical.bash
dst: /etc/bash_completion.d/infisical
- src: ./completions/infisical.fish
dst: /usr/share/fish/vendor_completions.d/infisical.fish
- src: ./completions/infisical.zsh
dst: /usr/share/zsh/site-functions/_infisical
- src: ./manpages/infisical.1.gz
dst: /usr/share/man/man1/infisical.1.gz
scoop:
bucket:
owner: Infisical
name: scoop-infisical
commit_author:
name: "Infisical"
email: ai@infisical.com
homepage: "https://infisical.com"
description: "The official Infisical CLI"
license: MIT
winget:
- name: infisical
publisher: infisical
license: MIT
homepage: https://infisical.com
short_description: "The official Infisical CLI"
repository:
owner: infisical
name: winget-pkgs
branch: "infisical-{{.Version}}"
pull_request:
enabled: true
draft: false
base:
owner: microsoft
name: winget-pkgs
branch: master
aurs:
- name: infisical-bin
homepage: "https://infisical.com"
description: "The official Infisical CLI"
maintainers:
- Infisical, Inc <support@infisical.com>
license: MIT
private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
package: |-
# bin
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
# license
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
# completions
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
# man pages
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"

View File

@@ -145,7 +145,11 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips
&& make install_fips \
&& cd / \
&& rm -rf /openssl-build \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
@@ -186,12 +190,11 @@ ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
ENV NODE_OPTIONS="--max-old-space-size=1024"
ENV NODE_OPTIONS="--max-old-space-size=8192 --force-fips"
# FIPS mode of operation:
ENV OPENSSL_CONF=/backend/nodejs.fips.cnf
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
ENV NODE_OPTIONS=--force-fips
ENV FIPS_ENABLED=true

View File

@@ -59,7 +59,11 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips
&& make install_fips \
&& cd / \
&& rm -rf /openssl-build \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# ? App setup

View File

@@ -7,6 +7,7 @@
"": {
"name": "backend",
"version": "1.0.0",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
@@ -61,7 +62,7 @@
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios": "^1.11.0",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",
@@ -13699,14 +13700,16 @@
}
},
"node_modules/@types/request/node_modules/form-data": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.2.tgz",
"integrity": "sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==",
"version": "2.5.5",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
"mime-types": "^2.1.12",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.35",
"safe-buffer": "^5.2.1"
},
"engines": {
@@ -15230,13 +15233,13 @@
}
},
"node_modules/axios": {
"version": "1.7.9",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
@@ -18761,13 +18764,15 @@
}
},
"node_modules/form-data": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {

View File

@@ -181,7 +181,7 @@
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios": "^1.11.0",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",

View File

@@ -126,6 +126,15 @@ declare module "@fastify/request-context" {
namespace: string;
name: string;
};
aws?: {
accountId: string;
arn: string;
userId: string;
partition: string;
service: string;
resourceType: string;
resourceName: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };

View File

@@ -489,6 +489,11 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import {
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/access-approval-policies-environments";
import {
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
@@ -510,6 +515,11 @@ import {
TRemindersRecipientsInsert,
TRemindersRecipientsUpdate
} from "@app/db/schemas/reminders-recipients";
import {
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/secret-approval-policies-environments";
import {
TSecretReminderRecipients,
TSecretReminderRecipientsInsert,
@@ -887,6 +897,12 @@ declare module "knex/types/tables" {
TAccessApprovalPoliciesBypassersUpdate
>;
[TableName.AccessApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
>;
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
@@ -935,6 +951,11 @@ declare module "knex/types/tables" {
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
>;
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,

View File

@@ -0,0 +1,96 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment))) {
await knex.schema.createTable(TableName.AccessApprovalPolicyEnvironment, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment);
t.timestamps(true, true, true);
t.unique(["policyId", "envId"]);
});
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
const existingAccessApprovalPolicies = await knex(TableName.AccessApprovalPolicy)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.whereNotNull(`${TableName.AccessApprovalPolicy}.envId`);
const accessApprovalPolicies = existingAccessApprovalPolicies.map(async (policy) => {
await knex(TableName.AccessApprovalPolicyEnvironment).insert({
policyId: policy.id,
envId: policy.envId
});
});
await Promise.all(accessApprovalPolicies);
}
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment))) {
await knex.schema.createTable(TableName.SecretApprovalPolicyEnvironment, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment);
t.timestamps(true, true, true);
t.unique(["policyId", "envId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
const existingSecretApprovalPolicies = await knex(TableName.SecretApprovalPolicy)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.whereNotNull(`${TableName.SecretApprovalPolicy}.envId`);
const secretApprovalPolicies = existingSecretApprovalPolicies.map(async (policy) => {
await knex(TableName.SecretApprovalPolicyEnvironment).insert({
policyId: policy.id,
envId: policy.envId
});
});
await Promise.all(secretApprovalPolicies);
}
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment)) {
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyEnvironment);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
}
if (await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment)) {
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyEnvironment);
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
}
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
}

View File

@@ -0,0 +1,111 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { TableName } from "../schemas";
import { TReminders, TRemindersInsert } from "../schemas/reminders";
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing secret reminders migration");
const hasReminderTable = await knex.schema.hasTable(TableName.Reminder);
if (hasReminderTable) {
const secretsWithLatestVersions = await knex(TableName.SecretV2)
.whereNotNull(`${TableName.SecretV2}.reminderRepeatDays`)
.whereRaw(`"${TableName.SecretV2}"."reminderRepeatDays" > 0`)
.innerJoin(TableName.SecretVersionV2, (qb) => {
void qb
.on(`${TableName.SecretVersionV2}.secretId`, "=", `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.reminderRepeatDays`, "=", `${TableName.SecretV2}.reminderRepeatDays`);
})
.whereIn([`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretVersionV2}.version`], (qb) => {
void qb
.select(["v2.secretId", knex.raw("MIN(v2.version) as version")])
.from(`${TableName.SecretVersionV2} as v2`)
.innerJoin(`${TableName.SecretV2} as s2`, "v2.secretId", "s2.id")
.whereRaw(`v2."reminderRepeatDays" = s2."reminderRepeatDays"`)
.whereNotNull("v2.reminderRepeatDays")
.whereRaw(`v2."reminderRepeatDays" > 0`)
.groupBy("v2.secretId");
})
// Add LEFT JOIN with Reminder table to check for existing reminders
.leftJoin(TableName.Reminder, `${TableName.Reminder}.secretId`, `${TableName.SecretV2}.id`)
// Only include secrets that don't already have reminders
.whereNull(`${TableName.Reminder}.secretId`)
.select(
knex.ref("id").withSchema(TableName.SecretV2).as("secretId"),
knex.ref("reminderRepeatDays").withSchema(TableName.SecretV2).as("reminderRepeatDays"),
knex.ref("reminderNote").withSchema(TableName.SecretV2).as("reminderNote"),
knex.ref("createdAt").withSchema(TableName.SecretVersionV2).as("createdAt")
);
logger.info(`Found ${secretsWithLatestVersions.length} reminders to migrate`);
const reminderInserts: TRemindersInsert[] = [];
if (secretsWithLatestVersions.length > 0) {
secretsWithLatestVersions.forEach((secret) => {
if (!secret.reminderRepeatDays) return;
const now = new Date();
const createdAt = new Date(secret.createdAt);
let nextReminderDate = new Date(createdAt);
nextReminderDate.setDate(nextReminderDate.getDate() + secret.reminderRepeatDays);
// If the next reminder date is in the past, calculate the proper next occurrence
if (nextReminderDate < now) {
const daysSinceCreation = Math.floor((now.getTime() - createdAt.getTime()) / (1000 * 60 * 60 * 24));
const daysIntoCurrentCycle = daysSinceCreation % secret.reminderRepeatDays;
const daysUntilNextReminder = secret.reminderRepeatDays - daysIntoCurrentCycle;
nextReminderDate = new Date(now);
nextReminderDate.setDate(now.getDate() + daysUntilNextReminder);
}
reminderInserts.push({
secretId: secret.secretId,
message: secret.reminderNote,
repeatDays: secret.reminderRepeatDays,
nextReminderDate
});
});
const commitBatches = chunkArray(reminderInserts, 2000);
for (const commitBatch of commitBatches) {
const insertedReminders = (await knex
.batchInsert(TableName.Reminder, commitBatch)
.returning("*")) as TReminders[];
const insertedReminderSecretIds = insertedReminders.map((reminder) => reminder.secretId).filter(Boolean);
const recipients = await knex(TableName.SecretReminderRecipients)
.whereRaw(`??.?? IN (${insertedReminderSecretIds.map(() => "?").join(",")})`, [
TableName.SecretReminderRecipients,
"secretId",
...insertedReminderSecretIds
])
.select(
knex.ref("userId").withSchema(TableName.SecretReminderRecipients).as("userId"),
knex.ref("secretId").withSchema(TableName.SecretReminderRecipients).as("secretId")
);
const reminderRecipients = recipients.map((recipient) => ({
reminderId: insertedReminders.find((reminder) => reminder.secretId === recipient.secretId)?.id,
userId: recipient.userId
}));
const filteredRecipients = reminderRecipients.filter((recipient) => Boolean(recipient.reminderId));
await knex.batchInsert(TableName.ReminderRecipient, filteredRecipients);
}
logger.info(`Successfully migrated ${reminderInserts.length} secret reminders`);
}
logger.info("Secret reminders migration completed");
} else {
logger.warn("Reminder table does not exist, skipping migration");
}
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret reminders fix migration");
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues"))) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.specificType("secretDetectionIgnoreValues", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues")) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("secretDetectionIgnoreValues");
});
}
}

View File

@@ -53,7 +53,7 @@ export const getMigrationEnvConfig = async (superAdminDAL: TSuperAdminDALFactory
let envCfg = Object.freeze(parsedEnv.data);
const fipsEnabled = await crypto.initialize(superAdminDAL);
const fipsEnabled = await crypto.initialize(superAdminDAL, envCfg);
// Fix for 128-bit entropy encryption key expansion issue:
// In FIPS it is not ideal to expand a 128-bit key into 256-bit. We solved this issue in the past by creating the ROOT_ENCRYPTION_KEY.

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesEnvironmentsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalPoliciesEnvironments = z.infer<typeof AccessApprovalPoliciesEnvironmentsSchema>;
export type TAccessApprovalPoliciesEnvironmentsInsert = Omit<
z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>,
TImmutableDBKeys
>;
export type TAccessApprovalPoliciesEnvironmentsUpdate = Partial<
Omit<z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
>;

View File

@@ -100,6 +100,7 @@ export enum TableName {
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
AccessApprovalRequest = "access_approval_requests",
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
AccessApprovalPolicyEnvironment = "access_approval_policies_environments",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
@@ -107,6 +108,7 @@ export enum TableName {
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
SecretApprovalRequestSecret = "secret_approval_requests_secrets",
SecretApprovalRequestSecretTag = "secret_approval_request_secret_tags",
SecretApprovalPolicyEnvironment = "secret_approval_policies_environments",
SecretRotation = "secret_rotations",
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",

View File

@@ -30,7 +30,8 @@ export const ProjectsSchema = z.object({
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false),
defaultProduct: z.string().nullable().optional()
defaultProduct: z.string().nullable().optional(),
secretDetectionIgnoreValues: z.string().array().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesEnvironmentsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretApprovalPoliciesEnvironments = z.infer<typeof SecretApprovalPoliciesEnvironmentsSchema>;
export type TSecretApprovalPoliciesEnvironmentsInsert = Omit<
z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalPoliciesEnvironmentsUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
>;

View File

@@ -17,52 +17,66 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
rateLimit: writeLimit
},
schema: {
body: z.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z.string().trim().min(1, { message: "Secret path cannot be empty" }).transform(removeTrailingSlash),
environment: z.string(),
approvers: z
.discriminatedUnion("type", [
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
body: z
.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z
.string()
.trim()
.min(1, { message: "Secret path cannot be empty" })
.transform(removeTrailingSlash),
environment: z.string().optional(),
environments: z.string().array().optional(),
approvers: z
.discriminatedUnion("type", [
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
})
])
.array()
.max(100, "Cannot have more than 100 approvers")
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({
type: z.literal(BypasserType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
])
.array()
.max(100, "Cannot have more than 100 approvers")
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
.array()
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
.array()
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
})
.refine(
(val) => Boolean(val.environment) || Boolean(val.environments),
"Must provide either environment or environments"
),
response: {
200: z.object({
approval: sapPubSchema
@@ -78,7 +92,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
actorOrgId: req.permission.orgId,
...req.body,
projectSlug: req.body.projectSlug,
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
name:
req.body.name ?? `${req.body.environment || req.body.environments?.join("-").substring(0, 250)}-${nanoid(3)}`,
enforcementLevel: req.body.enforcementLevel
});
return { approval };
@@ -211,6 +226,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true),
environments: z.array(z.string()).optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),

View File

@@ -17,34 +17,45 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
rateLimit: writeLimit
},
schema: {
body: z.object({
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string(),
secretPath: z
.string()
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
body: z
.object({
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string().optional(),
environments: z.string().array().optional(),
secretPath: z
.string()
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({
type: z.literal(BypasserType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
})
.refine((data) => data.environment || data.environments, "At least one environment should be provided"),
response: {
200: z.object({
approval: sapPubSchema
@@ -60,7 +71,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
actorOrgId: req.permission.orgId,
projectId: req.body.workspaceId,
...req.body,
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
name: req.body.name ?? `${req.body.environment || req.body.environments?.join(",")}-${nanoid(3)}`,
enforcementLevel: req.body.enforcementLevel
});
return { approval };
@@ -103,7 +114,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.optional()
.transform((val) => (val ? removeTrailingSlash(val) : undefined)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
environments: z.array(z.string()).optional()
}),
response: {
200: z.object({

View File

@@ -26,6 +26,7 @@ export interface TAccessApprovalPolicyDALFactory
>,
customFilter?: {
policyId?: string;
envId?: string;
},
tx?: Knex
) => Promise<
@@ -55,11 +56,6 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
@@ -72,6 +68,11 @@ export interface TAccessApprovalPolicyDALFactory
type: BypasserType.Group;
}
)[];
environments: {
id: string;
name: string;
slug: string;
}[];
}[]
>;
findById: (
@@ -95,11 +96,11 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
environments: {
id: string;
name: string;
slug: string;
};
}[];
projectId: string;
}
| undefined
@@ -143,6 +144,26 @@ export interface TAccessApprovalPolicyDALFactory
}
| undefined
>;
findPolicyByEnvIdAndSecretPath: (
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
tx?: Knex
) => Promise<{
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
}>;
}
export interface TAccessApprovalPolicyServiceFactory {
@@ -367,6 +388,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
customFilter?: {
policyId?: string;
envId?: string;
}
) => {
const result = await tx(TableName.AccessApprovalPolicy)
@@ -377,7 +399,17 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
void qb.where(`${TableName.AccessApprovalPolicy}.id`, "=", customFilter.policyId);
}
})
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
)
.join(TableName.Environment, `${TableName.AccessApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
.where((qb) => {
if (customFilter?.envId) {
void qb.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
}
})
.leftJoin(
TableName.AccessApprovalPolicyApprover,
`${TableName.AccessApprovalPolicy}.id`,
@@ -404,7 +436,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
.select(tx.ref("bypasserGroupId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
.select(tx.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(tx.ref("projectId").withSchema(TableName.Environment))
.select(selectAllTableCols(TableName.AccessApprovalPolicy));
@@ -448,6 +480,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
sequence: approverSequence,
approvalsRequired
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
@@ -470,11 +511,6 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
data: docs,
key: "id",
parentMapper: (data) => ({
environment: {
id: data.envId,
name: data.envName,
slug: data.envSlug
},
projectId: data.projectId,
...AccessApprovalPoliciesSchema.parse(data)
// secretPath: data.secretPath || undefined,
@@ -517,6 +553,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
id,
type: BypasserType.Group as const
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
@@ -545,14 +590,20 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
envId,
secretPath
},
TableName.AccessApprovalPolicy
)
)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", envId)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.first();
return result;
@@ -561,5 +612,81 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
}
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById, findLastValidPolicy };
const findPolicyByEnvIdAndSecretPath: TAccessApprovalPolicyDALFactory["findPolicyByEnvIdAndSecretPath"] = async (
{ envIds, secretPath },
tx
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicy)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.join(
TableName.Environment,
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
$in: {
envId: envIds
}
},
TableName.AccessApprovalPolicyEnvironment
)
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
secretPath
},
TableName.AccessApprovalPolicy
)
)
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(db.ref("projectId").withSchema(TableName.Environment));
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
projectId: data.projectId,
...AccessApprovalPoliciesSchema.parse(data)
}),
childrenMapper: [
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
return formattedDocs?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
}
};
return {
...accessApprovalPolicyOrm,
find,
findById,
softDeleteById,
findLastValidPolicy,
findPolicyByEnvIdAndSecretPath
};
};

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TAccessApprovalPolicyEnvironmentDALFactory = ReturnType<typeof accessApprovalPolicyEnvironmentDALFactory>;
export const accessApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
const accessApprovalPolicyEnvironmentOrm = ormify(db, TableName.AccessApprovalPolicyEnvironment);
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicyEnvironment)
.join(
TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ envId }, TableName.AccessApprovalPolicyEnvironment))
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
.select(selectAllTableCols(TableName.AccessApprovalPolicyEnvironment));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
}
};
return { ...accessApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
};

View File

@@ -21,6 +21,7 @@ import {
TAccessApprovalPolicyBypasserDALFactory
} from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import { TAccessApprovalPolicyEnvironmentDALFactory } from "./access-approval-policy-environment-dal";
import {
ApproverType,
BypasserType,
@@ -45,12 +46,14 @@ type TAccessApprovalPolicyServiceFactoryDep = {
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update" | "delete">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find">;
accessApprovalPolicyEnvironmentDAL: TAccessApprovalPolicyEnvironmentDALFactory;
};
export const accessApprovalPolicyServiceFactory = ({
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
accessApprovalPolicyEnvironmentDAL,
groupDAL,
permissionService,
projectEnvDAL,
@@ -63,21 +66,22 @@ export const accessApprovalPolicyServiceFactory = ({
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => {
const $policyExists = async ({
envId,
envIds,
secretPath,
policyId
}: {
envId: string;
envId?: string;
envIds?: string[];
secretPath: string;
policyId?: string;
}) => {
const policy = await accessApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
if (!envId && !envIds) {
throw new BadRequestError({ message: "Must provide either envId or envIds" });
}
const policy = await accessApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
secretPath,
envIds: envId ? [envId] : (envIds as string[])
});
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
@@ -93,6 +97,7 @@ export const accessApprovalPolicyServiceFactory = ({
bypassers,
projectSlug,
environment,
environments,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
@@ -125,13 +130,23 @@ export const accessApprovalPolicyServiceFactory = ({
ProjectPermissionActions.Create,
ProjectPermissionSub.SecretApproval
);
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` });
const mergedEnvs = (environment ? [environment] : environments) || [];
if (mergedEnvs.length === 0) {
throw new BadRequestError({ message: "Must provide either environment or environments" });
}
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId: project.id });
if (!envs.length || envs.length !== mergedEnvs.length) {
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
for (const env of envs) {
// eslint-disable-next-line no-await-in-loop
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
});
}
}
let approverUserIds = userApprovers;
@@ -199,7 +214,7 @@ export const accessApprovalPolicyServiceFactory = ({
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.create(
{
envId: env.id,
envId: envs[0].id,
approvals,
secretPath,
name,
@@ -208,6 +223,10 @@ export const accessApprovalPolicyServiceFactory = ({
},
tx
);
await accessApprovalPolicyEnvironmentDAL.insertMany(
envs.map((el) => ({ policyId: doc.id, envId: el.id })),
tx
);
if (approverUserIds.length) {
await accessApprovalPolicyApproverDAL.insertMany(
@@ -260,7 +279,7 @@ export const accessApprovalPolicyServiceFactory = ({
return doc;
});
return { ...accessApproval, environment: env, projectId: project.id };
return { ...accessApproval, environments: envs, projectId: project.id, environment: envs[0] };
};
const getAccessApprovalPolicyByProjectSlug: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyByProjectSlug"] =
@@ -279,7 +298,10 @@ export const accessApprovalPolicyServiceFactory = ({
});
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies;
return accessApprovalPolicies.map((policy) => ({
...policy,
environment: policy.environments[0]
}));
};
const updateAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["updateAccessApprovalPolicy"] = async ({
@@ -295,7 +317,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
environments
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
@@ -323,16 +346,27 @@ export const accessApprovalPolicyServiceFactory = ({
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
}
let envs = accessApprovalPolicy.environments;
if (
await $policyExists({
envId: accessApprovalPolicy.envId,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
environments &&
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${accessApprovalPolicy.environment.slug}'`
});
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: accessApprovalPolicy.projectId });
}
for (const env of envs) {
if (
// eslint-disable-next-line no-await-in-loop
await $policyExists({
envId: env.id,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath || accessApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
});
}
}
const { permission } = await permissionService.getProjectPermission({
@@ -488,6 +522,14 @@ export const accessApprovalPolicyServiceFactory = ({
);
}
if (environments) {
await accessApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
await accessApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({ policyId: doc.id, envId: env.id })),
tx
);
}
await accessApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
@@ -517,7 +559,8 @@ export const accessApprovalPolicyServiceFactory = ({
return {
...updatedPolicy,
environment: accessApprovalPolicy.environment,
environments: accessApprovalPolicy.environments,
environment: accessApprovalPolicy.environments[0],
projectId: accessApprovalPolicy.projectId
};
};
@@ -568,7 +611,10 @@ export const accessApprovalPolicyServiceFactory = ({
}
});
return policy;
return {
...policy,
environment: policy.environments[0]
};
};
const getAccessPolicyCountByEnvSlug: TAccessApprovalPolicyServiceFactory["getAccessPolicyCountByEnvSlug"] = async ({
@@ -598,11 +644,13 @@ export const accessApprovalPolicyServiceFactory = ({
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policies = await accessApprovalPolicyDAL.find({
envId: environment.id,
projectId: project.id,
deletedAt: null
});
const policies = await accessApprovalPolicyDAL.find(
{
projectId: project.id,
deletedAt: null
},
{ envId: environment.id }
);
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
return { count: policies.length };
@@ -634,7 +682,10 @@ export const accessApprovalPolicyServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
return policy;
return {
...policy,
environment: policy.environments[0]
};
};
return {

View File

@@ -26,7 +26,8 @@ export enum BypasserType {
export type TCreateAccessApprovalPolicy = {
approvals: number;
secretPath: string;
environment: string;
environment?: string;
environments?: string[];
approvers: (
| { type: ApproverType.Group; id: string; sequence?: number }
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
@@ -58,6 +59,7 @@ export type TUpdateAccessApprovalPolicy = {
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
environments?: string[];
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {
@@ -113,6 +115,15 @@ export interface TAccessApprovalPolicyServiceFactory {
slug: string;
position: number;
};
environments: {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
projectId: string;
slug: string;
position: number;
}[];
projectId: string;
name: string;
id: string;
@@ -153,6 +164,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
}>;
updateAccessApprovalPolicy: ({
@@ -168,13 +184,19 @@ export interface TAccessApprovalPolicyServiceFactory {
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
environments
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
name: string;
id: string;
@@ -225,6 +247,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
bypassers: (
| {
@@ -276,6 +303,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
bypassers: (
| {

View File

@@ -65,7 +65,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
environments: string[];
requestedByUser: {
userId: string;
email: string | null | undefined;
@@ -515,7 +515,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`accessApprovalReviewerUser.id`
)
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.leftJoin(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
)
.leftJoin(
TableName.Environment,
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
@@ -683,6 +693,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
lastName,
username
})
},
{
key: "environment",
label: "environments" as const,
mapper: ({ environment }) => environment
}
]
});

View File

@@ -86,6 +86,25 @@ export const accessApprovalRequestServiceFactory = ({
projectMicrosoftTeamsConfigDAL,
projectSlackConfigDAL
}: TSecretApprovalRequestServiceFactoryDep): TAccessApprovalRequestServiceFactory => {
const $getEnvironmentFromPermissions = (permissions: unknown): string | null => {
if (!Array.isArray(permissions) || permissions.length === 0) {
return null;
}
const firstPermission = permissions[0] as unknown[];
if (!Array.isArray(firstPermission) || firstPermission.length < 3) {
return null;
}
const metadata = firstPermission[2] as Record<string, unknown>;
if (typeof metadata === "object" && metadata !== null && "environment" in metadata) {
const env = metadata.environment;
return typeof env === "string" ? env : null;
}
return null;
};
const createAccessApprovalRequest: TAccessApprovalRequestServiceFactory["createAccessApprovalRequest"] = async ({
isTemporary,
temporaryRange,
@@ -308,6 +327,15 @@ export const accessApprovalRequestServiceFactory = ({
requests = requests.filter((request) => request.environment === envSlug);
}
requests = requests.map((request) => {
const permissionEnvironment = $getEnvironmentFromPermissions(request.permissions);
if (permissionEnvironment) {
request.environmentName = permissionEnvironment;
}
return request;
});
return { requests };
};
@@ -325,13 +353,27 @@ export const accessApprovalRequestServiceFactory = ({
throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` });
}
const { policy, environment } = accessApprovalRequest;
const { policy, environments, permissions } = accessApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this access request has been deleted."
});
}
const permissionEnvironment = $getEnvironmentFromPermissions(permissions);
if (
!permissionEnvironment ||
(!environments.includes(permissionEnvironment) && status === ApprovalStatus.APPROVED)
) {
throw new BadRequestError({
message: `The original policy ${policy.name} is not attached to environment '${permissionEnvironment}'.`
});
}
const environment = await projectEnvDAL.findOne({
projectId: accessApprovalRequest.projectId,
slug: permissionEnvironment
});
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
@@ -553,7 +595,7 @@ export const accessApprovalRequestServiceFactory = ({
requesterEmail: actingUser.email,
bypassReason: bypassReason || "No reason provided",
secretPath: policy.secretPath || "/",
environment,
environment: environment?.name || permissionEnvironment,
approvalUrl: `${cfg.SITE_URL}/projects/secret-management/${project.id}/approval`,
requestType: "access"
},

View File

@@ -23,6 +23,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
customFilter?: {
sapId?: string;
envId?: string;
}
) =>
tx(TableName.SecretApprovalPolicy)
@@ -33,7 +34,17 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
void qb.where(`${TableName.SecretApprovalPolicy}.id`, "=", customFilter.sapId);
}
})
.join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
TableName.SecretApprovalPolicyEnvironment,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.join(TableName.Environment, `${TableName.SecretApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
.where((qb) => {
if (customFilter?.envId) {
void qb.where(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
}
})
.leftJoin(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
@@ -97,7 +108,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
.select(
tx.ref("name").withSchema(TableName.Environment).as("envName"),
tx.ref("slug").withSchema(TableName.Environment).as("envSlug"),
tx.ref("id").withSchema(TableName.Environment).as("envId"),
tx.ref("id").withSchema(TableName.Environment).as("environmentId"),
tx.ref("projectId").withSchema(TableName.Environment)
)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
@@ -146,6 +157,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
firstName,
lastName
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId, envName, envSlug }) => ({
id: environmentId,
name: envName,
slug: envSlug
})
}
]
});
@@ -160,6 +180,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
customFilter?: {
sapId?: string;
envId?: string;
},
tx?: Knex
) => {
@@ -221,6 +242,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
mapper: ({ approverGroupUserId: userId }) => ({
userId
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId, envName, envSlug }) => ({
id: environmentId,
name: envName,
slug: envSlug
})
}
]
});
@@ -235,5 +265,74 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
return softDeletedPolicy;
};
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById };
const findPolicyByEnvIdAndSecretPath = async (
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
tx?: Knex
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicy)
.join(
TableName.SecretApprovalPolicyEnvironment,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.join(
TableName.Environment,
`${TableName.SecretApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
$in: {
envId: envIds
}
},
TableName.SecretApprovalPolicyEnvironment
)
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
secretPath
},
TableName.SecretApprovalPolicy
)
)
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(db.ref("projectId").withSchema(TableName.Environment));
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
projectId: data.projectId,
...SecretApprovalPoliciesSchema.parse(data)
}),
childrenMapper: [
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
return formattedDocs?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
}
};
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById, findPolicyByEnvIdAndSecretPath };
};

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TSecretApprovalPolicyEnvironmentDALFactory = ReturnType<typeof secretApprovalPolicyEnvironmentDALFactory>;
export const secretApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
const secretApprovalPolicyEnvironmentOrm = ormify(db, TableName.SecretApprovalPolicyEnvironment);
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicyEnvironment)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ envId }, TableName.SecretApprovalPolicyEnvironment))
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
.select(selectAllTableCols(TableName.SecretApprovalPolicyEnvironment));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
}
};
return { ...secretApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
};

View File

@@ -19,6 +19,7 @@ import {
TSecretApprovalPolicyBypasserDALFactory
} from "./secret-approval-policy-approver-dal";
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
import { TSecretApprovalPolicyEnvironmentDALFactory } from "./secret-approval-policy-environment-dal";
import {
TCreateSapDTO,
TDeleteSapDTO,
@@ -36,12 +37,13 @@ const getPolicyScore = (policy: { secretPath?: string | null }) =>
type TSecretApprovalPolicyServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "find">;
userDAL: Pick<TUserDALFactory, "find">;
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
secretApprovalPolicyBypasserDAL: TSecretApprovalPolicyBypasserDALFactory;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "update">;
secretApprovalPolicyEnvironmentDAL: TSecretApprovalPolicyEnvironmentDALFactory;
};
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
@@ -51,27 +53,30 @@ export const secretApprovalPolicyServiceFactory = ({
permissionService,
secretApprovalPolicyApproverDAL,
secretApprovalPolicyBypasserDAL,
secretApprovalPolicyEnvironmentDAL,
projectEnvDAL,
userDAL,
licenseService,
secretApprovalRequestDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const $policyExists = async ({
envIds,
envId,
secretPath,
policyId
}: {
envId: string;
envIds?: string[];
envId?: string;
secretPath: string;
policyId?: string;
}) => {
const policy = await secretApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
if (!envIds && !envId) {
throw new BadRequestError({ message: "At least one environment should be provided" });
}
const policy = await secretApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
envIds: envId ? [envId] : envIds || [],
secretPath
});
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
@@ -88,6 +93,7 @@ export const secretApprovalPolicyServiceFactory = ({
projectId,
secretPath,
environment,
environments,
enforcementLevel,
allowedSelfApprovals
}: TCreateSapDTO) => {
@@ -127,17 +133,23 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) {
throw new NotFoundError({
message: `Environment with slug '${environment}' not found in project with ID ${projectId}`
});
const mergedEnvs = (environment ? [environment] : environments) || [];
if (mergedEnvs.length === 0) {
throw new BadRequestError({ message: "Must provide either environment or environments" });
}
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId });
if (!envs.length || envs.length !== mergedEnvs.length) {
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
for (const env of envs) {
// eslint-disable-next-line no-await-in-loop
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
});
}
}
let groupBypassers: string[] = [];
@@ -181,7 +193,7 @@ export const secretApprovalPolicyServiceFactory = ({
const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => {
const doc = await secretApprovalPolicyDAL.create(
{
envId: env.id,
envId: envs[0].id,
approvals,
secretPath,
name,
@@ -190,6 +202,13 @@ export const secretApprovalPolicyServiceFactory = ({
},
tx
);
await secretApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({
envId: env.id,
policyId: doc.id
})),
tx
);
let userApproverIds = userApprovers;
if (userApproverNames.length) {
@@ -253,12 +272,13 @@ export const secretApprovalPolicyServiceFactory = ({
return doc;
});
return { ...secretApproval, environment: env, projectId };
return { ...secretApproval, environments: envs, projectId, environment: envs[0] };
};
const updateSecretApprovalPolicy = async ({
approvers,
bypassers,
environments,
secretPath,
name,
actorId,
@@ -288,17 +308,26 @@ export const secretApprovalPolicyServiceFactory = ({
message: `Secret approval policy with ID '${secretPolicyId}' not found`
});
}
let envs = secretApprovalPolicy.environments;
if (
await $policyExists({
envId: secretApprovalPolicy.envId,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
environments &&
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${secretApprovalPolicy.environment.slug}'`
});
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: secretApprovalPolicy.projectId });
}
for (const env of envs) {
if (
// eslint-disable-next-line no-await-in-loop
await $policyExists({
envId: env.id,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath || secretApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
});
}
}
const { permission } = await permissionService.getProjectPermission({
@@ -415,6 +444,17 @@ export const secretApprovalPolicyServiceFactory = ({
);
}
if (environments) {
await secretApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
await secretApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({
envId: env.id,
policyId: doc.id
})),
tx
);
}
await secretApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
@@ -441,7 +481,8 @@ export const secretApprovalPolicyServiceFactory = ({
});
return {
...updatedSap,
environment: secretApprovalPolicy.environment,
environments: secretApprovalPolicy.environments,
environment: secretApprovalPolicy.environments[0],
projectId: secretApprovalPolicy.projectId
};
};
@@ -487,7 +528,12 @@ export const secretApprovalPolicyServiceFactory = ({
const updatedPolicy = await secretApprovalPolicyDAL.softDeleteById(secretPolicyId, tx);
return updatedPolicy;
});
return { ...deletedPolicy, projectId: sapPolicy.projectId, environment: sapPolicy.environment };
return {
...deletedPolicy,
projectId: sapPolicy.projectId,
environments: sapPolicy.environments,
environment: sapPolicy.environments[0]
};
};
const getSecretApprovalPolicyByProjectId = async ({
@@ -520,7 +566,7 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
const policies = await secretApprovalPolicyDAL.find({ envId: env.id, deletedAt: null });
const policies = await secretApprovalPolicyDAL.find({ deletedAt: null }, { envId: env.id });
if (!policies.length) return;
// this will filter policies either without scoped to secret path or the one that matches with secret path
const policiesFilteredByPath = policies.filter(

View File

@@ -5,7 +5,8 @@ import { ApproverType, BypasserType } from "../access-approval-policy/access-app
export type TCreateSapDTO = {
approvals: number;
secretPath: string;
environment: string;
environment?: string;
environments?: string[];
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
bypassers?: (
| { type: BypasserType.Group; id: string }
@@ -29,6 +30,7 @@ export type TUpdateSapDTO = {
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals?: boolean;
environments?: string[];
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSapDTO = {

View File

@@ -40,6 +40,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.leftJoin(TableName.SecretApprovalPolicyEnvironment, (bd) => {
bd.on(
`${TableName.SecretApprovalPolicy}.id`,
"=",
`${TableName.SecretApprovalPolicyEnvironment}.policyId`
).andOn(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", `${TableName.SecretFolder}.envId`);
})
.leftJoin<TUsers>(
db(TableName.Users).as("statusChangedByUser"),
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
@@ -146,7 +153,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("projectId").withSchema(TableName.Environment),
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicyEnvironment).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),

View File

@@ -69,6 +69,7 @@ import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
import { scanSecretPolicyViolations } from "../secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
import { sendApprovalEmailsFn } from "./secret-approval-request-fns";
@@ -537,6 +538,11 @@ export const secretApprovalRequestServiceFactory = ({
message: "The policy associated with this secret approval request has been deleted."
});
}
if (!policy.envId) {
throw new BadRequestError({
message: "The policy associated with this secret approval request is not linked to the environment."
});
}
const { hasRole } = await permissionService.getProjectPermission({
actor: ActorType.USER,
@@ -1407,6 +1413,20 @@ export const secretApprovalRequestServiceFactory = ({
projectId
});
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
...(data[SecretOperations.Create] || []),
...(data[SecretOperations.Update] || []).filter((el) => el.secretValue)
].map((el) => ({
secretKey: el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
// for created secret approval change
const createdSecrets = data[SecretOperations.Create];
if (createdSecrets && createdSecrets?.length) {

View File

@@ -7,12 +7,13 @@ import {
TRotationFactoryRevokeCredentials,
TRotationFactoryRotateCredentials
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
executeWithPotentialGateway,
SQL_CONNECTION_ALTER_LOGIN_STATEMENT
} from "@app/services/app-connection/shared/sql";
import { generatePassword } from "../utils";
import { DEFAULT_PASSWORD_REQUIREMENTS, generatePassword } from "../utils";
import {
TSqlCredentialsRotationGeneratedCredentials,
TSqlCredentialsRotationWithConnection
@@ -32,6 +33,11 @@ const redactPasswords = (e: unknown, credentials: TSqlCredentialsRotationGenerat
return redactedMessage;
};
const ORACLE_PASSWORD_REQUIREMENTS = {
...DEFAULT_PASSWORD_REQUIREMENTS,
length: 30
};
export const sqlCredentialsRotationFactory: TRotationFactory<
TSqlCredentialsRotationWithConnection,
TSqlCredentialsRotationGeneratedCredentials
@@ -43,6 +49,9 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
secretsMapping
} = secretRotation;
const passwordRequirement =
connection.app === AppConnection.OracleDB ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
const executeOperation = <T>(
operation: (client: Knex) => Promise<T>,
credentialsOverride?: TSqlCredentialsRotationGeneratedCredentials[number]
@@ -65,7 +74,7 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
const $validateCredentials = async (credentials: TSqlCredentialsRotationGeneratedCredentials[number]) => {
try {
await executeOperation(async (client) => {
await client.raw("SELECT 1");
await client.raw(connection.app === AppConnection.OracleDB ? `SELECT 1 FROM DUAL` : `Select 1`);
}, credentials);
} catch (error) {
throw new Error(redactPasswords(error, [credentials]));
@@ -75,11 +84,13 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
const issueCredentials: TRotationFactoryIssueCredentials<TSqlCredentialsRotationGeneratedCredentials> = async (
callback
) => {
// For SQL, since we get existing users, we change both their passwords
// on issue to invalidate their existing passwords
// For SQL, since we get existing users, we change both their passwords
// on issue to invalidate their existing passwords
const credentialsSet = [
{ username: username1, password: generatePassword() },
{ username: username2, password: generatePassword() }
{ username: username1, password: generatePassword(passwordRequirement) },
{ username: username2, password: generatePassword(passwordRequirement) }
];
try {
@@ -105,7 +116,10 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
credentialsToRevoke,
callback
) => {
const revokedCredentials = credentialsToRevoke.map(({ username }) => ({ username, password: generatePassword() }));
const revokedCredentials = credentialsToRevoke.map(({ username }) => ({
username,
password: generatePassword(passwordRequirement)
}));
try {
await executeOperation(async (client) => {
@@ -128,7 +142,10 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
callback
) => {
// generate new password for the next active user
const credentials = { username: activeIndex === 0 ? username2 : username1, password: generatePassword() };
const credentials = {
username: activeIndex === 0 ? username2 : username1,
password: generatePassword(passwordRequirement)
};
try {
await executeOperation(async (client) => {

View File

@@ -11,7 +11,7 @@ type TPasswordRequirements = {
allowedSymbols?: string;
};
const DEFAULT_PASSWORD_REQUIREMENTS: TPasswordRequirements = {
export const DEFAULT_PASSWORD_REQUIREMENTS: TPasswordRequirements = {
length: 48,
required: {
lowercase: 1,

View File

@@ -1,11 +1,21 @@
import { AxiosError } from "axios";
import { exec } from "child_process";
import { join } from "path";
import picomatch from "picomatch";
import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import {
createTempFolder,
deleteTempFolder,
readFindingsFile,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
@@ -46,6 +56,19 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
});
}
export function scanFile(inputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
exec(command, (error) => {
if (error && error.code === 77) {
reject(error);
} else {
resolve();
}
});
});
}
export const scanGitRepositoryAndGetFindings = async (
scanPath: string,
findingsPath: string,
@@ -140,3 +163,47 @@ export const parseScanErrorMessage = (err: unknown): string => {
? errorMessage
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};
export const scanSecretPolicyViolations = async (
projectId: string,
secretPath: string,
secrets: { secretKey: string; secretValue: string }[],
ignoreValues: string[]
) => {
const appCfg = getConfig();
if (!appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENABLED) {
return;
}
const match = appCfg.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.find(
(el) => el.projectId === projectId && picomatch.isMatch(secretPath, el.secretPath, { strictSlashes: false })
);
if (!match) {
return;
}
const tempFolder = await createTempFolder();
try {
const scanPromises = secrets
.filter((secret) => !ignoreValues.includes(secret.secretValue))
.map(async (secret) => {
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
try {
await scanFile(secretFilePath);
} catch (error) {
throw new BadRequestError({
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
name: "SecretPolicyViolation"
});
}
});
await Promise.all(scanPromises);
} finally {
await deleteTempFolder(tempFolder);
}
};

View File

@@ -704,7 +704,8 @@ export const PROJECTS = {
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project.",
defaultProduct: "The default product in which the project will open"
defaultProduct: "The default product in which the project will open",
secretDetectionIgnoreValues: "The list of secret values to ignore for secret detection."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -2245,7 +2246,9 @@ export const AppConnections = {
},
AZURE_CLIENT_SECRETS: {
code: "The OAuth code to use to connect with Azure Client Secrets.",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets."
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
AZURE_DEVOPS: {
code: "The OAuth code to use to connect with Azure DevOps.",
@@ -2373,6 +2376,10 @@ export const SecretSyncs = {
keyId: "The AWS KMS key ID or alias to use when encrypting parameters synced by Infisical.",
tags: "Optional tags to add to secrets synced by Infisical.",
syncSecretMetadataAsTags: `Whether Infisical secret metadata should be added as tags to secrets synced by Infisical.`
},
RENDER: {
autoRedeployServices:
"Whether Infisical should automatically redeploy the configured Render service upon secret changes."
}
},
DESTINATION_CONFIG: {

View File

@@ -204,6 +204,17 @@ const envSchema = z
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
// Special Detection Feature
PARAMS_FOLDER_SECRET_DETECTION_PATHS: zpStr(
z
.string()
.optional()
.transform((val) => {
if (!val) return undefined;
return JSON.parse(val) as { secretPath: string; projectId: string }[];
})
),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
@@ -261,10 +272,26 @@ const envSchema = z
// gcp app
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
// azure app
// Legacy Single Multi Purpose Azure App Connection
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure App Configuration App Connection
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure Key Vault App Connection
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure Client Secrets App Connection
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure DevOps App Connection
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET: zpStr(z.string().optional()),
// datadog
SHOULD_USE_DATADOG_TRACER: zodStrBool.default("false"),
DATADOG_PROFILING_ENABLED: zodStrBool.default("false"),
@@ -341,7 +368,24 @@ const envSchema = z
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(","),
PARAMS_FOLDER_SECRET_DETECTION_ENABLED: (data.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.length ?? 0) > 0,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET
}));
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
@@ -451,15 +495,54 @@ export const overwriteSchema: {
}
]
},
azure: {
name: "Azure",
azureAppConfiguration: {
name: "Azure App Configuration",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_ID",
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRET",
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureKeyVault: {
name: "Azure Key Vault",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureClientSecrets: {
name: "Azure Client Secrets",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureDevOps: {
name: "Azure DevOps",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]

View File

@@ -14,7 +14,7 @@ import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal
import { ADMIN_CONFIG_DB_UUID } from "@app/services/super-admin/super-admin-service";
import { isBase64 } from "../../base64";
import { getConfig } from "../../config/env";
import { getConfig, TEnvConfig } from "../../config/env";
import { CryptographyError } from "../../errors";
import { logger } from "../../logger";
import { asymmetricFipsValidated } from "./asymmetric-fips";
@@ -106,12 +106,12 @@ const cryptographyFactory = () => {
}
};
const $setFipsModeEnabled = (enabled: boolean) => {
const $setFipsModeEnabled = (enabled: boolean, envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">) => {
// If FIPS is enabled, we need to validate that the ENCRYPTION_KEY is in a base64 format, and is a 256-bit key.
if (enabled) {
crypto.setFips(true);
const appCfg = getConfig();
const appCfg = envCfg || getConfig();
if (appCfg.ENCRYPTION_KEY) {
// we need to validate that the ENCRYPTION_KEY is a base64 encoded 256-bit key
@@ -141,14 +141,14 @@ const cryptographyFactory = () => {
$isInitialized = true;
};
const initialize = async (superAdminDAL: TSuperAdminDALFactory) => {
const initialize = async (superAdminDAL: TSuperAdminDALFactory, envCfg?: Pick<TEnvConfig, "ENCRYPTION_KEY">) => {
if ($isInitialized) {
return isFipsModeEnabled();
}
if (process.env.FIPS_ENABLED !== "true") {
logger.info("Cryptography module initialized in normal operation mode.");
$setFipsModeEnabled(false);
$setFipsModeEnabled(false, envCfg);
return false;
}
@@ -158,11 +158,11 @@ const cryptographyFactory = () => {
if (serverCfg) {
if (serverCfg.fipsEnabled) {
logger.info("[FIPS]: Instance is configured for FIPS mode of operation. Continuing startup with FIPS enabled.");
$setFipsModeEnabled(true);
$setFipsModeEnabled(true, envCfg);
return true;
}
logger.info("[FIPS]: Instance age predates FIPS mode inception date. Continuing without FIPS.");
$setFipsModeEnabled(false);
$setFipsModeEnabled(false, envCfg);
return false;
}
@@ -171,7 +171,7 @@ const cryptographyFactory = () => {
// TODO(daniel): check if it's an enterprise deployment
// if there is no server cfg, and FIPS_MODE is `true`, its a fresh FIPS deployment. We need to set the fipsEnabled to true.
$setFipsModeEnabled(true);
$setFipsModeEnabled(true, envCfg);
return true;
};

View File

@@ -162,6 +162,12 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
kubernetes: token?.identityAuth?.kubernetes
});
}
if (token?.identityAuth?.aws) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
aws: token?.identityAuth?.aws
});
}
break;
}
case AuthMode.SERVICE_TOKEN: {

View File

@@ -11,6 +11,7 @@ import {
accessApprovalPolicyBypasserDALFactory
} from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal";
import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal";
import { accessApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-environment-dal";
import { accessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { accessApprovalRequestDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-dal";
import { accessApprovalRequestReviewerDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-reviewer-dal";
@@ -76,6 +77,7 @@ import {
secretApprovalPolicyBypasserDALFactory
} from "@app/ee/services/secret-approval-policy/secret-approval-policy-approver-dal";
import { secretApprovalPolicyDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-dal";
import { secretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal";
import { secretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
@@ -425,9 +427,11 @@ export const registerRoutes = async (
const accessApprovalPolicyApproverDAL = accessApprovalPolicyApproverDALFactory(db);
const accessApprovalPolicyBypasserDAL = accessApprovalPolicyBypasserDALFactory(db);
const accessApprovalRequestReviewerDAL = accessApprovalRequestReviewerDALFactory(db);
const accessApprovalPolicyEnvironmentDAL = accessApprovalPolicyEnvironmentDALFactory(db);
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
const sapBypasserDAL = secretApprovalPolicyBypasserDALFactory(db);
const sapEnvironmentDAL = secretApprovalPolicyEnvironmentDALFactory(db);
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
@@ -561,6 +565,7 @@ export const registerRoutes = async (
projectEnvDAL,
secretApprovalPolicyApproverDAL: sapApproverDAL,
secretApprovalPolicyBypasserDAL: sapBypasserDAL,
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL,
permissionService,
secretApprovalPolicyDAL,
licenseService,
@@ -1156,7 +1161,9 @@ export const registerRoutes = async (
keyStore,
licenseService,
projectDAL,
folderDAL
folderDAL,
accessApprovalPolicyEnvironmentDAL,
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL
});
const projectRoleService = projectRoleServiceFactory({
@@ -1231,6 +1238,7 @@ export const registerRoutes = async (
const secretV2BridgeService = secretV2BridgeServiceFactory({
folderDAL,
projectDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretQueueService,
@@ -1317,6 +1325,7 @@ export const registerRoutes = async (
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
accessApprovalPolicyEnvironmentDAL,
groupDAL,
permissionService,
projectEnvDAL,

View File

@@ -93,6 +93,13 @@ export const sapPubSchema = SecretApprovalPoliciesSchema.merge(
name: z.string(),
slug: z.string()
}),
environments: z.array(
z.object({
id: z.string(),
name: z.string(),
slug: z.string()
})
),
projectId: z.string()
})
);
@@ -264,7 +271,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
auditLogsRetentionDays: true,
hasDeleteProtection: true,
secretSharing: true,
showSnapshotsLegacy: true
showSnapshotsLegacy: true,
secretDetectionIgnoreValues: true
});
export const SanitizedTagSchema = SecretTagsSchema.pick({

View File

@@ -52,7 +52,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean()
kubernetesAutoFetchServiceAccountToken: z.boolean(),
paramsFolderSecretDetectionEnabled: z.boolean()
})
})
}
@@ -67,7 +68,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
fipsEnabled: crypto.isFipsModeEnabled(),
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
}
};
}
@@ -685,6 +687,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
hide: false,
body: z.object({
email: z.string().email().trim().min(1),
password: z.string().trim().min(1),

View File

@@ -369,7 +369,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.describe(PROJECTS.UPDATE.slug),
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy),
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct)
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct),
secretDetectionIgnoreValues: z
.array(z.string())
.optional()
.describe(PROJECTS.UPDATE.secretDetectionIgnoreValues)
}),
response: {
200: z.object({
@@ -392,7 +396,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
hasDeleteProtection: req.body.hasDeleteProtection,
slug: req.body.slug,
secretSharing: req.body.secretSharing,
showSnapshotsLegacy: req.body.showSnapshotsLegacy
showSnapshotsLegacy: req.body.showSnapshotsLegacy,
secretDetectionIgnoreValues: req.body.secretDetectionIgnoreValues
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@@ -1,9 +1,11 @@
import fastifyMultipart from "@fastify/multipart";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { VaultMappingType } from "@app/services/external-migration/external-migration-types";
const MB25_IN_BYTES = 26214400;
@@ -15,7 +17,7 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
bodyLimit: MB25_IN_BYTES,
url: "/env-key",
config: {
rateLimit: readLimit
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
@@ -52,4 +54,30 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
});
}
});
server.route({
method: "POST",
url: "/vault",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
vaultAccessToken: z.string(),
vaultNamespace: z.string().trim().optional(),
vaultUrl: z.string(),
mappingType: z.nativeEnum(VaultMappingType)
})
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
await server.services.migration.importVaultData({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body
});
}
});
};

View File

@@ -11,5 +11,5 @@ export const registerV3Routes = async (server: FastifyZodProvider) => {
await server.register(registerUserRouter, { prefix: "/users" });
await server.register(registerSecretRouter, { prefix: "/secrets" });
await server.register(registerSecretBlindIndexRouter, { prefix: "/workspaces" });
await server.register(registerExternalMigrationRouter, { prefix: "/migrate" });
await server.register(registerExternalMigrationRouter, { prefix: "/external-migration" });
};

View File

@@ -14,13 +14,13 @@ import {
} from "./azure-app-configuration-connection-types";
export const getAzureAppConfigurationConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID } = getConfig();
return {
name: "Azure App Configuration" as const,
app: AppConnection.AzureAppConfiguration as const,
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
};
};
@@ -29,9 +29,16 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const {
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
SITE_URL
} = getConfig();
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -47,8 +54,8 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://azconfig.io/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -1,3 +1,4 @@
export enum AzureClientSecretsConnectionMethod {
OAuth = "oauth"
OAuth = "oauth",
ClientSecret = "client-secret"
}

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-case-declarations */
import { AxiosError, AxiosResponse } from "axios";
import { getConfig } from "@app/lib/config/env";
@@ -16,18 +17,22 @@ import { AppConnection } from "../app-connection-enums";
import { AzureClientSecretsConnectionMethod } from "./azure-client-secrets-connection-enums";
import {
ExchangeCodeAzureResponse,
TAzureClientSecretsConnectionClientSecretCredentials,
TAzureClientSecretsConnectionConfig,
TAzureClientSecretsConnectionCredentials
} from "./azure-client-secrets-connection-types";
export const getAzureClientSecretsConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID } = getConfig();
return {
name: "Azure Client Secrets" as const,
app: AppConnection.AzureClientSecrets as const,
methods: Object.values(AzureClientSecretsConnectionMethod) as [AzureClientSecretsConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
methods: Object.values(AzureClientSecretsConnectionMethod) as [
AzureClientSecretsConnectionMethod.OAuth,
AzureClientSecretsConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID
};
};
@@ -37,12 +42,6 @@ export const getAzureConnectionAccessToken = async (
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
}
const appConnection = await appConnectionDAL.findById(connectionId);
if (!appConnection) {
@@ -63,104 +62,195 @@ export const getAzureConnectionAccessToken = async (
const { refreshToken } = credentials;
const currentTime = Date.now();
switch (appConnection.method) {
case AzureClientSecretsConnectionMethod.OAuth:
if (
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure OAuth environment variables have not been configured`
});
}
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
refresh_token: refreshToken
})
);
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
refresh_token: refreshToken
})
);
const updatedCredentials = {
...credentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
const updatedCredentials = {
...credentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId: appConnection.orgId,
kmsService
});
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
return data.access_token;
case AzureClientSecretsConnectionMethod.ClientSecret:
const accessTokenCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureClientSecretsConnectionClientSecretCredentials;
const { accessToken, expiresAt, clientId, clientSecret, tenantId } = accessTokenCredentials;
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
return accessToken;
}
return data.access_token;
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://graph.microsoft.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
const updatedClientCredentials = {
...accessTokenCredentials,
accessToken: clientData.access_token,
expiresAt: currentTime + clientData.expires_in * 1000
};
const encryptedClientCredentials = await encryptAppConnectionCredentials({
credentials: updatedClientCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
return clientData.access_token;
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${appConnection.method as AzureClientSecretsConnectionMethod}`
});
}
};
export const validateAzureClientSecretsConnectionCredentials = async (config: TAzureClientSecretsConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
if (!SITE_URL) {
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
const {
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
SITE_URL
} = getConfig();
switch (method) {
case AzureClientSecretsConnectionMethod.OAuth:
if (!SITE_URL) {
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
return {
tenantId: inputCredentials.tenantId,
accessToken: tokenResp.data.access_token,
refreshToken: tokenResp.data.refresh_token,
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
};
case AzureClientSecretsConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret } = inputCredentials;
try {
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://graph.microsoft.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
return {
tenantId,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000,
clientId,
clientSecret
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureClientSecretsConnectionMethod}`

View File

@@ -26,6 +26,36 @@ export const AzureClientSecretsConnectionOAuthOutputCredentialsSchema = z.object
expiresAt: z.number()
});
export const AzureClientSecretsConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_CLIENT_SECRETS.clientId),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_CLIENT_SECRETS.clientSecret),
tenantId: z
.string()
.uuid()
.trim()
.min(1, "Tenant ID required")
.describe(AppConnections.CREDENTIALS.AZURE_CLIENT_SECRETS.tenantId)
});
export const AzureClientSecretsConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureClientSecretsConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -34,6 +64,14 @@ export const ValidateAzureClientSecretsConnectionCredentialsSchema = z.discrimin
credentials: AzureClientSecretsConnectionOAuthInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureClientSecrets).credentials
)
}),
z.object({
method: z
.literal(AzureClientSecretsConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureClientSecrets).method),
credentials: AzureClientSecretsConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureClientSecrets).credentials
)
})
]);
@@ -43,9 +81,13 @@ export const CreateAzureClientSecretsConnectionSchema = ValidateAzureClientSecre
export const UpdateAzureClientSecretsConnectionSchema = z
.object({
credentials: AzureClientSecretsConnectionOAuthInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureClientSecrets).credentials
)
credentials: z
.union([
AzureClientSecretsConnectionOAuthInputCredentialsSchema,
AzureClientSecretsConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureClientSecrets).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureClientSecrets));
@@ -59,6 +101,10 @@ export const AzureClientSecretsConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureClientSecretsConnectionMethod.OAuth),
credentials: AzureClientSecretsConnectionOAuthOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureClientSecretsConnectionMethod.ClientSecret),
credentials: AzureClientSecretsConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -69,6 +115,13 @@ export const SanitizedAzureClientSecretsConnectionSchema = z.discriminatedUnion(
credentials: AzureClientSecretsConnectionOAuthOutputCredentialsSchema.pick({
tenantId: true
})
}),
BaseAzureClientSecretsConnectionSchema.extend({
method: z.literal(AzureClientSecretsConnectionMethod.ClientSecret),
credentials: AzureClientSecretsConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true
})
})
]);

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureClientSecretsConnectionClientSecretOutputCredentialsSchema,
AzureClientSecretsConnectionOAuthOutputCredentialsSchema,
AzureClientSecretsConnectionSchema,
CreateAzureClientSecretsConnectionSchema,
@@ -30,6 +31,10 @@ export type TAzureClientSecretsConnectionCredentials = z.infer<
typeof AzureClientSecretsConnectionOAuthOutputCredentialsSchema
>;
export type TAzureClientSecretsConnectionClientSecretCredentials = z.infer<
typeof AzureClientSecretsConnectionClientSecretOutputCredentialsSchema
>;
export interface ExchangeCodeAzureResponse {
token_type: string;
scope: string;

View File

@@ -23,7 +23,7 @@ import {
} from "./azure-devops-types";
export const getAzureDevopsConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID } = getConfig();
return {
name: "Azure DevOps" as const,
@@ -32,7 +32,7 @@ export const getAzureDevopsConnectionListItem = () => {
AzureDevOpsConnectionMethod.OAuth,
AzureDevOpsConnectionMethod.AccessToken
],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
};
};
@@ -63,7 +63,7 @@ export const getAzureDevopsConnection = async (
switch (appConnection.method) {
case AzureDevOpsConnectionMethod.OAuth:
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
@@ -81,8 +81,8 @@ export const getAzureDevopsConnection = async (
new URLSearchParams({
grant_type: "refresh_token",
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
refresh_token: refreshToken
})
);
@@ -119,7 +119,8 @@ export const getAzureDevopsConnection = async (
export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDevOpsConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID, INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET, SITE_URL } =
getConfig();
switch (method) {
case AzureDevOpsConnectionMethod.OAuth:
@@ -127,7 +128,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -144,8 +145,8 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
grant_type: "authorization_code",
code: oauthCredentials.code,
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -26,7 +26,10 @@ export const getAzureConnectionAccessToken = async (
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
@@ -57,8 +60,8 @@ export const getAzureConnectionAccessToken = async (
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
refresh_token: credentials.refreshToken
})
);
@@ -92,22 +95,23 @@ export const getAzureConnectionAccessToken = async (
};
export const getAzureKeyVaultConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID } = getConfig();
return {
name: "Azure Key Vault" as const,
app: AppConnection.AzureKeyVault as const,
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
};
};
export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureKeyVaultConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
getConfig();
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -123,8 +127,8 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -1,32 +1,26 @@
import slugify from "@sindresorhus/slugify";
import sjcl from "sjcl";
import tweetnacl from "tweetnacl";
import tweetnaclUtil from "tweetnacl-util";
import { SecretType, TSecretFolders } from "@app/db/schemas";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { chunkArray } from "@app/lib/fn";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { CommitType, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectServiceFactory } from "../project/project-service";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TProjectEnvServiceFactory } from "../project-env/project-env-service";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretTagDALFactory } from "../secret-tag/secret-tag-dal";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { fnSecretBulkInsert, getAllSecretReferences } from "../secret-v2-bridge/secret-v2-bridge-fns";
import type { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bridge-service";
import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal";
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
import { InfisicalImportData, TEnvKeyExportJSON, TImportInfisicalDataCreate } from "./external-migration-types";
import { TFolderCommitServiceFactory } from "../../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../../kms/kms-service";
import { TProjectDALFactory } from "../../project/project-dal";
import { TProjectServiceFactory } from "../../project/project-service";
import { TProjectEnvDALFactory } from "../../project-env/project-env-dal";
import { TProjectEnvServiceFactory } from "../../project-env/project-env-service";
import { TResourceMetadataDALFactory } from "../../resource-metadata/resource-metadata-dal";
import { TSecretFolderDALFactory } from "../../secret-folder/secret-folder-dal";
import { TSecretFolderVersionDALFactory } from "../../secret-folder/secret-folder-version-dal";
import { TSecretTagDALFactory } from "../../secret-tag/secret-tag-dal";
import { TSecretV2BridgeDALFactory } from "../../secret-v2-bridge/secret-v2-bridge-dal";
import type { TSecretV2BridgeServiceFactory } from "../../secret-v2-bridge/secret-v2-bridge-service";
import { TSecretVersionV2DALFactory } from "../../secret-v2-bridge/secret-version-dal";
import { TSecretVersionV2TagDALFactory } from "../../secret-v2-bridge/secret-version-tag-dal";
import { InfisicalImportData, TEnvKeyExportJSON, TImportInfisicalDataCreate } from "../external-migration-types";
export type TImportDataIntoInfisicalDTO = {
projectDAL: Pick<TProjectDALFactory, "transaction">;
@@ -499,326 +493,3 @@ export const parseEnvKeyDataFn = async (decryptedJson: string): Promise<Infisica
return infisicalImportData;
};
export const importDataIntoInfisicalFn = async ({
projectService,
projectEnvDAL,
projectDAL,
secretDAL,
kmsService,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL,
resourceMetadataDAL,
folderVersionDAL,
folderCommitService,
input: { data, actor, actorId, actorOrgId, actorAuthMethod }
}: TImportDataIntoInfisicalDTO) => {
// Import data to infisical
if (!data || !data.projects) {
throw new BadRequestError({ message: "No projects found in data" });
}
const originalToNewProjectId = new Map<string, string>();
const originalToNewEnvironmentId = new Map<
string,
{ envId: string; envSlug: string; rootFolderId: string; projectId: string }
>();
const originalToNewFolderId = new Map<
string,
{
folderId: string;
projectId: string;
}
>();
const projectsNotImported: string[] = [];
await projectDAL.transaction(async (tx) => {
for await (const project of data.projects) {
const newProject = await projectService
.createProject({
actor,
actorId,
actorOrgId,
actorAuthMethod,
workspaceName: project.name,
createDefaultEnvs: false,
tx
})
.catch((e) => {
logger.error(e, `Failed to import to project [name:${project.name}]`);
throw new BadRequestError({ message: `Failed to import to project [name:${project.name}]` });
});
originalToNewProjectId.set(project.id, newProject.id);
}
// Import environments
if (data.environments) {
for await (const environment of data.environments) {
const projectId = originalToNewProjectId.get(environment.projectId);
const slug = slugify(`${environment.name}-${alphaNumericNanoId(4)}`);
if (!projectId) {
projectsNotImported.push(environment.projectId);
// eslint-disable-next-line no-continue
continue;
}
const existingEnv = await projectEnvDAL.findOne({ projectId, slug }, tx);
if (existingEnv) {
throw new BadRequestError({
message: `Environment with slug '${slug}' already exist`,
name: "CreateEnvironment"
});
}
const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx);
const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx);
const folder = await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
originalToNewEnvironmentId.set(environment.id, {
envSlug: doc.slug,
envId: doc.id,
rootFolderId: folder.id,
projectId
});
}
}
if (data.folders) {
for await (const folder of data.folders) {
const parentEnv = originalToNewEnvironmentId.get(folder.parentFolderId as string);
if (!parentEnv) {
// eslint-disable-next-line no-continue
continue;
}
const newFolder = await folderDAL.create(
{
name: folder.name,
envId: parentEnv.envId,
parentId: parentEnv.rootFolderId
},
tx
);
const newFolderVersion = await folderVersionDAL.create(
{
name: newFolder.name,
envId: newFolder.envId,
version: newFolder.version,
folderId: newFolder.id
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Changed by external migration",
folderId: parentEnv.rootFolderId,
changes: [
{
type: CommitType.ADD,
folderVersionId: newFolderVersion.id
}
]
},
tx
);
originalToNewFolderId.set(folder.id, {
folderId: newFolder.id,
projectId: parentEnv.projectId
});
}
}
// Useful for debugging:
// console.log("data.secrets", data.secrets);
// console.log("data.folders", data.folders);
// console.log("data.environment", data.environments);
if (data.secrets && data.secrets.length > 0) {
const mappedToEnvironmentId = new Map<
string,
{
secretKey: string;
secretValue: string;
folderId?: string;
isFromBlock?: boolean;
}[]
>();
for (const secret of data.secrets) {
const targetId = secret.folderId || secret.environmentId;
// Skip if we can't find either an environment or folder mapping for this secret
if (!originalToNewEnvironmentId.get(secret.environmentId) && !originalToNewFolderId.get(targetId)) {
logger.info({ secret }, "[importDataIntoInfisicalFn]: Could not find environment or folder for secret");
// eslint-disable-next-line no-continue
continue;
}
if (!mappedToEnvironmentId.has(targetId)) {
mappedToEnvironmentId.set(targetId, []);
}
const alreadyHasSecret = mappedToEnvironmentId
.get(targetId)!
.find((el) => el.secretKey === secret.name && el.folderId === secret.folderId);
if (alreadyHasSecret && alreadyHasSecret.isFromBlock) {
// remove the existing secret if any
mappedToEnvironmentId
.get(targetId)!
.splice(mappedToEnvironmentId.get(targetId)!.indexOf(alreadyHasSecret), 1);
}
mappedToEnvironmentId.get(targetId)!.push({
secretKey: secret.name,
secretValue: secret.value || "",
folderId: secret.folderId,
isFromBlock: secret.appBlockOrderIndex !== undefined
});
}
// for each of the mappedEnvironmentId
for await (const [targetId, secrets] of mappedToEnvironmentId) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for targetId", targetId);
let selectedFolder: TSecretFolders | undefined;
let selectedProjectId: string | undefined;
// Case 1: Secret belongs to a folder / branch / branch of a block
const foundFolder = originalToNewFolderId.get(targetId);
if (foundFolder) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for folder");
selectedFolder = await folderDAL.findById(foundFolder.folderId, tx);
selectedProjectId = foundFolder.projectId;
} else {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for normal environment");
const environment = data.environments.find((env) => env.id === targetId);
if (!environment) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const projectId = originalToNewProjectId.get(environment.projectId)!;
if (!projectId) {
throw new BadRequestError({ message: `Failed to import secret, project not found` });
}
const env = originalToNewEnvironmentId.get(targetId);
if (!env) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const folder = await folderDAL.findBySecretPath(projectId, env.envSlug, "/", tx);
if (!folder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug (${env.envSlug}) & secret path (/)`,
name: "Create secret"
});
}
selectedFolder = folder;
selectedProjectId = projectId;
}
if (!selectedFolder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
if (!selectedProjectId) {
throw new NotFoundError({
message: `Project not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: selectedProjectId
},
tx
);
const secretBatches = chunkArray(secrets, 2500);
for await (const secretBatch of secretBatches) {
const secretsByKeys = await secretDAL.findBySecretKeys(
selectedFolder.id,
secretBatch.map((el) => ({
key: el.secretKey,
type: SecretType.Shared
})),
tx
);
if (secretsByKeys.length) {
throw new BadRequestError({
message: `Secret already exist: ${secretsByKeys.map((el) => el.key).join(",")}`
});
}
await fnSecretBulkInsert({
inputSecrets: secretBatch.map((el) => {
const references = getAllSecretReferences(el.secretValue).nestedReferences;
return {
version: 1,
encryptedValue: el.secretValue
? secretManagerEncrypt({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob
: undefined,
key: el.secretKey,
references,
type: SecretType.Shared
};
}),
folderId: selectedFolder.id,
orgId: actorOrgId,
resourceMetadataDAL,
secretDAL,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderCommitService,
actor: {
type: actor,
actorId
},
tx
});
}
}
}
});
return { projectsNotImported };
};

View File

@@ -0,0 +1,352 @@
import slugify from "@sindresorhus/slugify";
import { SecretType, TSecretFolders } from "@app/db/schemas";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { chunkArray } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { CommitType } from "@app/services/folder-commit/folder-commit-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { fnSecretBulkInsert, getAllSecretReferences } from "@app/services/secret-v2-bridge/secret-v2-bridge-fns";
import { TImportDataIntoInfisicalDTO } from "./envkey";
export const importDataIntoInfisicalFn = async ({
projectService,
projectEnvDAL,
projectDAL,
secretDAL,
kmsService,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderDAL,
resourceMetadataDAL,
folderVersionDAL,
folderCommitService,
input: { data, actor, actorId, actorOrgId, actorAuthMethod }
}: TImportDataIntoInfisicalDTO) => {
// Import data to infisical
if (!data || !data.projects) {
throw new BadRequestError({ message: "No projects found in data" });
}
const originalToNewProjectId = new Map<string, string>();
const originalToNewEnvironmentId = new Map<
string,
{ envId: string; envSlug: string; rootFolderId?: string; projectId: string }
>();
const originalToNewFolderId = new Map<
string,
{
envId: string;
envSlug: string;
folderId: string;
projectId: string;
}
>();
const projectsNotImported: string[] = [];
await projectDAL.transaction(async (tx) => {
for await (const project of data.projects) {
const newProject = await projectService
.createProject({
actor,
actorId,
actorOrgId,
actorAuthMethod,
workspaceName: project.name,
createDefaultEnvs: false,
tx
})
.catch((e) => {
logger.error(e, `Failed to import to project [name:${project.name}]`);
throw new BadRequestError({ message: `Failed to import to project [name:${project.name}]` });
});
originalToNewProjectId.set(project.id, newProject.id);
}
// Import environments
if (data.environments) {
for await (const environment of data.environments) {
const projectId = originalToNewProjectId.get(environment.projectId);
const slug = slugify(`${environment.name}-${alphaNumericNanoId(4)}`);
if (!projectId) {
projectsNotImported.push(environment.projectId);
// eslint-disable-next-line no-continue
continue;
}
const existingEnv = await projectEnvDAL.findOne({ projectId, slug }, tx);
if (existingEnv) {
throw new BadRequestError({
message: `Environment with slug '${slug}' already exist`,
name: "CreateEnvironment"
});
}
const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx);
const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx);
const folder = await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
originalToNewEnvironmentId.set(environment.id, {
envSlug: doc.slug,
envId: doc.id,
rootFolderId: folder.id,
projectId
});
}
}
if (data.folders) {
for await (const folder of data.folders) {
const parentEnv = originalToNewEnvironmentId.get(folder.parentFolderId as string);
const parentFolder = originalToNewFolderId.get(folder.parentFolderId as string);
let newFolder: TSecretFolders;
if (parentEnv?.rootFolderId) {
newFolder = await folderDAL.create(
{
name: folder.name,
envId: parentEnv.envId,
parentId: parentEnv.rootFolderId
},
tx
);
} else if (parentFolder) {
newFolder = await folderDAL.create(
{
name: folder.name,
envId: parentFolder.envId,
parentId: parentFolder.folderId
},
tx
);
} else {
logger.info({ folder }, "No parent environment found for folder");
// eslint-disable-next-line no-continue
continue;
}
const newFolderVersion = await folderVersionDAL.create(
{
name: newFolder.name,
envId: newFolder.envId,
version: newFolder.version,
folderId: newFolder.id
},
tx
);
await folderCommitService.createCommit(
{
actor: {
type: actor,
metadata: {
id: actorId
}
},
message: "Changed by external migration",
folderId: parentEnv?.rootFolderId || parentFolder?.folderId || "",
changes: [
{
type: CommitType.ADD,
folderVersionId: newFolderVersion.id
}
]
},
tx
);
originalToNewFolderId.set(folder.id, {
folderId: newFolder.id,
envId: parentEnv?.envId || parentFolder?.envId || "",
envSlug: parentEnv?.envSlug || parentFolder?.envSlug || "",
projectId: parentEnv?.projectId || parentFolder?.projectId || ""
});
}
}
// Useful for debugging:
// console.log("data.secrets", data.secrets);
// console.log("data.folders", data.folders);
// console.log("data.environment", data.environments);
if (data.secrets && data.secrets.length > 0) {
const mappedToEnvironmentId = new Map<
string,
{
secretKey: string;
secretValue: string;
folderId?: string;
isFromBlock?: boolean;
}[]
>();
for (const secret of data.secrets) {
const targetId = secret.folderId || secret.environmentId;
// Skip if we can't find either an environment or folder mapping for this secret
if (!originalToNewEnvironmentId.get(secret.environmentId) && !originalToNewFolderId.get(targetId)) {
logger.info({ secret }, "[importDataIntoInfisicalFn]: Could not find environment or folder for secret");
// eslint-disable-next-line no-continue
continue;
}
if (!mappedToEnvironmentId.has(targetId)) {
mappedToEnvironmentId.set(targetId, []);
}
const alreadyHasSecret = mappedToEnvironmentId
.get(targetId)!
.find((el) => el.secretKey === secret.name && el.folderId === secret.folderId);
if (alreadyHasSecret && alreadyHasSecret.isFromBlock) {
// remove the existing secret if any
mappedToEnvironmentId
.get(targetId)!
.splice(mappedToEnvironmentId.get(targetId)!.indexOf(alreadyHasSecret), 1);
}
mappedToEnvironmentId.get(targetId)!.push({
secretKey: secret.name,
secretValue: secret.value || "",
folderId: secret.folderId,
isFromBlock: secret.appBlockOrderIndex !== undefined
});
}
// for each of the mappedEnvironmentId
for await (const [targetId, secrets] of mappedToEnvironmentId) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for targetId", targetId);
let selectedFolder: TSecretFolders | undefined;
let selectedProjectId: string | undefined;
// Case 1: Secret belongs to a folder / branch / branch of a block
const foundFolder = originalToNewFolderId.get(targetId);
if (foundFolder) {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for folder");
selectedFolder = await folderDAL.findById(foundFolder.folderId, tx);
selectedProjectId = foundFolder.projectId;
} else {
logger.info("[importDataIntoInfisicalFn]: Processing secrets for normal environment");
const environment = data.environments.find((env) => env.id === targetId);
if (!environment) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const projectId = originalToNewProjectId.get(environment.projectId)!;
if (!projectId) {
throw new BadRequestError({ message: `Failed to import secret, project not found` });
}
const env = originalToNewEnvironmentId.get(targetId);
if (!env) {
logger.info(
{
targetId
},
"[importDataIntoInfisicalFn]: Could not find environment for secret"
);
// eslint-disable-next-line no-continue
continue;
}
const folder = await folderDAL.findBySecretPath(projectId, env.envSlug, "/", tx);
if (!folder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug (${env.envSlug}) & secret path (/)`,
name: "Create secret"
});
}
selectedFolder = folder;
selectedProjectId = projectId;
}
if (!selectedFolder) {
throw new NotFoundError({
message: `Folder not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
if (!selectedProjectId) {
throw new NotFoundError({
message: `Project not found for the given environment slug & secret path`,
name: "CreateSecret"
});
}
const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: selectedProjectId
},
tx
);
const secretBatches = chunkArray(secrets, 2500);
for await (const secretBatch of secretBatches) {
const secretsByKeys = await secretDAL.findBySecretKeys(
selectedFolder.id,
secretBatch.map((el) => ({
key: el.secretKey,
type: SecretType.Shared
})),
tx
);
if (secretsByKeys.length) {
throw new BadRequestError({
message: `Secret already exist: ${secretsByKeys.map((el) => el.key).join(",")}`
});
}
await fnSecretBulkInsert({
inputSecrets: secretBatch.map((el) => {
const references = getAllSecretReferences(el.secretValue).nestedReferences;
return {
version: 1,
encryptedValue: el.secretValue
? secretManagerEncrypt({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob
: undefined,
key: el.secretKey,
references,
type: SecretType.Shared
};
}),
folderId: selectedFolder.id,
orgId: actorOrgId,
resourceMetadataDAL,
secretDAL,
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
folderCommitService,
actor: {
type: actor,
actorId
},
tx
});
}
}
}
});
return { projectsNotImported };
};

View File

@@ -0,0 +1,3 @@
export * from "./envkey";
export * from "./import";
export * from "./vault";

View File

@@ -0,0 +1,341 @@
import axios, { AxiosInstance } from "axios";
import { v4 as uuidv4 } from "uuid";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { InfisicalImportData, VaultMappingType } from "../external-migration-types";
type VaultData = {
namespace: string;
mount: string;
path: string;
secretData: Record<string, string>;
};
const vaultFactory = () => {
const getMounts = async (request: AxiosInstance) => {
const response = await request
.get<
Record<
string,
{
accessor: string;
options: {
version?: string;
} | null;
type: string;
}
>
>("/v1/sys/mounts")
.catch((err) => {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault mounts");
}
throw err;
});
return response.data;
};
const getPaths = async (
request: AxiosInstance,
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string }
) => {
try {
// For KV v2: /v1/{mount}/metadata/{path}?list=true
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
const response = await request.get<{
data: {
keys: string[];
};
}>(`/v1/${path}?list=true`);
return response.data.data.keys;
} catch (err) {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault paths");
if (err.response?.status === 404) {
return null;
}
}
throw err;
}
};
const getSecrets = async (
request: AxiosInstance,
{ mountPath, secretPath }: { mountPath: string; secretPath: string }
) => {
// For KV v2: /v1/{mount}/data/{path}
const response = await request
.get<{
data: {
data: Record<string, string>; // KV v2 has nested data structure
metadata: {
created_time: string;
deletion_time: string;
destroyed: boolean;
version: number;
};
};
}>(`/v1/${mountPath}/data/${secretPath}`)
.catch((err) => {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
}
throw err;
});
return response.data.data.data;
};
// helper function to check if a mount is KV v2 (will be useful if we add support for Vault KV v1)
// const isKvV2Mount = (mountInfo: { type: string; options?: { version?: string } | null }) => {
// return mountInfo.type === "kv" && mountInfo.options?.version === "2";
// };
const recursivelyGetAllPaths = async (
request: AxiosInstance,
mountPath: string,
currentPath: string = ""
): Promise<string[]> => {
const paths = await getPaths(request, { mountPath, secretPath: currentPath });
if (paths === null || paths.length === 0) {
return [];
}
const allSecrets: string[] = [];
for await (const path of paths) {
const cleanPath = path.endsWith("/") ? path.slice(0, -1) : path;
const fullItemPath = currentPath ? `${currentPath}/${cleanPath}` : cleanPath;
if (path.endsWith("/")) {
// it's a folder so we recurse into it
const subSecrets = await recursivelyGetAllPaths(request, mountPath, fullItemPath);
allSecrets.push(...subSecrets);
} else {
// it's a secret so we add it to our results
allSecrets.push(`${mountPath}/${fullItemPath}`);
}
}
return allSecrets;
};
async function collectVaultData({
baseUrl,
namespace,
accessToken
}: {
baseUrl: string;
namespace?: string;
accessToken: string;
}): Promise<VaultData[]> {
const request = axios.create({
baseURL: baseUrl,
headers: {
"X-Vault-Token": accessToken,
...(namespace ? { "X-Vault-Namespace": namespace } : {})
}
});
const allData: VaultData[] = [];
// Get all mounts in this namespace
const mounts = await getMounts(request);
for (const mount of Object.keys(mounts)) {
if (!mount.endsWith("/")) {
delete mounts[mount];
}
}
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
// skip non-KV mounts
if (!mountInfo.type.startsWith("kv")) {
// eslint-disable-next-line no-continue
continue;
}
// get all paths in this mount
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`);
const cleanMountPath = mountPath.replace(/\/$/, "");
for await (const secretPath of paths) {
// get the actual secret data
const secretData = await getSecrets(request, {
mountPath: cleanMountPath,
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
});
allData.push({
namespace: namespace || "",
mount: mountPath.replace(/\/$/, ""),
path: secretPath.replace(`${cleanMountPath}/`, ""),
secretData
});
}
}
return allData;
}
return {
collectVaultData,
getMounts,
getPaths,
getSecrets,
recursivelyGetAllPaths
};
};
export const transformToInfisicalFormatNamespaceToProjects = (
vaultData: VaultData[],
mappingType: VaultMappingType
): InfisicalImportData => {
const projects: Array<{ name: string; id: string }> = [];
const environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }> = [];
const folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }> = [];
const secrets: Array<{ id: string; name: string; environmentId: string; value: string; folderId?: string }> = [];
// track created entities to avoid duplicates
const projectMap = new Map<string, string>(); // namespace -> projectId
const environmentMap = new Map<string, string>(); // namespace:mount -> environmentId
const folderMap = new Map<string, string>(); // namespace:mount:folderPath -> folderId
let environmentId: string = "";
for (const data of vaultData) {
const { namespace, mount, path, secretData } = data;
if (mappingType === VaultMappingType.Namespace) {
// create project (namespace)
if (!projectMap.has(namespace)) {
const projectId = uuidv4();
projectMap.set(namespace, projectId);
projects.push({
name: namespace,
id: projectId
});
}
const projectId = projectMap.get(namespace)!;
// create environment (mount)
const envKey = `${namespace}:${mount}`;
if (!environmentMap.has(envKey)) {
environmentId = uuidv4();
environmentMap.set(envKey, environmentId);
environments.push({
name: mount,
id: environmentId,
projectId
});
}
environmentId = environmentMap.get(envKey)!;
} else if (mappingType === VaultMappingType.KeyVault) {
if (!projectMap.has(mount)) {
const projectId = uuidv4();
projectMap.set(mount, projectId);
projects.push({
name: mount,
id: projectId
});
}
const projectId = projectMap.get(mount)!;
// create single "Production" environment per project, because we have no good way of determining environments from vault
if (!environmentMap.has(mount)) {
environmentId = uuidv4();
environmentMap.set(mount, environmentId);
environments.push({
name: "Production",
id: environmentId,
projectId
});
}
environmentId = environmentMap.get(mount)!;
}
// create folder structure
let currentFolderId: string | undefined;
let currentPath = "";
if (path.includes("/")) {
const pathParts = path.split("/").filter(Boolean);
const folderParts = pathParts;
// create nested folder structure for the entire path
for (const folderName of folderParts) {
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
const folderKey = `${namespace}:${mount}:${currentPath}`;
if (!folderMap.has(folderKey)) {
const folderId = uuidv4();
folderMap.set(folderKey, folderId);
folders.push({
id: folderId,
name: folderName,
environmentId,
parentFolderId: currentFolderId || environmentId
});
currentFolderId = folderId;
} else {
currentFolderId = folderMap.get(folderKey)!;
}
}
}
for (const [key, value] of Object.entries(secretData)) {
secrets.push({
id: uuidv4(),
name: key,
environmentId,
value: String(value),
folderId: currentFolderId
});
}
}
return {
projects,
environments,
folders,
secrets
};
};
export const importVaultDataFn = async ({
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType
}: {
vaultAccessToken: string;
vaultNamespace?: string;
vaultUrl: string;
mappingType: VaultMappingType;
}) => {
await blockLocalAndPrivateIpAddresses(vaultUrl);
if (mappingType === VaultMappingType.Namespace && !vaultNamespace) {
throw new BadRequestError({
message: "Vault namespace is required when project mapping type is set to namespace."
});
}
const vaultApi = vaultFactory();
const vaultData = await vaultApi.collectVaultData({
accessToken: vaultAccessToken,
baseUrl: vaultUrl,
namespace: vaultNamespace
});
const infisicalData = transformToInfisicalFormatNamespaceToProjects(vaultData, mappingType);
return infisicalData;
};

View File

@@ -19,7 +19,7 @@ import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-d
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { importDataIntoInfisicalFn } from "./external-migration-fns";
import { ExternalPlatforms, TImportInfisicalDataCreate } from "./external-migration-types";
import { ExternalPlatforms, ImportType, TImportInfisicalDataCreate } from "./external-migration-types";
export type TExternalMigrationQueueFactoryDep = {
smtpService: TSmtpService;
@@ -67,6 +67,7 @@ export const externalMigrationQueueFactory = ({
const startImport = async (dto: {
actorEmail: string;
data: {
importType: ImportType;
iv: string;
tag: string;
ciphertext: string;

View File

@@ -4,9 +4,9 @@ import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
import { TUserDALFactory } from "../user/user-dal";
import { decryptEnvKeyDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
import { decryptEnvKeyDataFn, importVaultDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
import { TExternalMigrationQueueFactory } from "./external-migration-queue";
import { TImportEnvKeyDataCreate } from "./external-migration-types";
import { ImportType, TImportEnvKeyDataDTO, TImportVaultDataDTO } from "./external-migration-types";
type TExternalMigrationServiceFactoryDep = {
permissionService: TPermissionServiceFactory;
@@ -28,7 +28,7 @@ export const externalMigrationServiceFactory = ({
actorId,
actorOrgId,
actorAuthMethod
}: TImportEnvKeyDataCreate) => {
}: TImportEnvKeyDataDTO) => {
if (crypto.isFipsModeEnabled()) {
throw new BadRequestError({ message: "EnvKey migration is not supported when running in FIPS mode." });
}
@@ -60,11 +60,65 @@ export const externalMigrationServiceFactory = ({
await externalMigrationQueue.startImport({
actorEmail: user.email!,
data: encrypted
data: {
importType: ImportType.EnvKey,
...encrypted
}
});
};
const importVaultData = async ({
vaultAccessToken,
vaultNamespace,
mappingType,
vaultUrl,
actor,
actorId,
actorOrgId,
actorAuthMethod
}: TImportVaultDataDTO) => {
const { membership } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
if (membership.role !== OrgMembershipRole.Admin) {
throw new ForbiddenRequestError({ message: "Only admins can import data" });
}
const user = await userDAL.findById(actorId);
const vaultData = await importVaultDataFn({
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType
});
const stringifiedJson = JSON.stringify({
data: vaultData,
actor,
actorId,
actorOrgId,
actorAuthMethod
});
const encrypted = crypto.encryption().symmetric().encryptWithRootEncryptionKey(stringifiedJson);
await externalMigrationQueue.startImport({
actorEmail: user.email!,
data: {
importType: ImportType.Vault,
...encrypted
}
});
};
return {
importEnvKeyData
importEnvKeyData,
importVaultData
};
};

View File

@@ -1,5 +1,17 @@
import { TOrgPermission } from "@app/lib/types";
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
export enum ImportType {
EnvKey = "envkey",
Vault = "vault"
}
export enum VaultMappingType {
Namespace = "namespace",
KeyVault = "key-vault"
}
export type InfisicalImportData = {
projects: Array<{ name: string; id: string }>;
environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }>;
@@ -14,14 +26,17 @@ export type InfisicalImportData = {
}>;
};
export type TImportEnvKeyDataCreate = {
export type TImportEnvKeyDataDTO = {
decryptionKey: string;
encryptedJson: { nonce: string; data: string };
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
};
} & Omit<TOrgPermission, "orgId">;
export type TImportVaultDataDTO = {
vaultAccessToken: string;
vaultNamespace?: string;
mappingType: VaultMappingType;
vaultUrl: string;
} & Omit<TOrgPermission, "orgId">;
export type TImportInfisicalDataCreate = {
data: InfisicalImportData;

View File

@@ -15,5 +15,16 @@ export type TIdentityAccessTokenJwtPayload = {
namespace: string;
name: string;
};
aws?: {
accountId: string;
arn: string;
userId: string;
// Derived from ARN
partition: string; // "aws", "aws-gov", "aws-cn"
service: string; // "iam", "sts"
resourceType: string; // "user" or "role"
resourceName: string;
};
};
};

View File

@@ -1,67 +1,91 @@
interface PrincipalArnEntity {
Partition: string;
Service: "iam" | "sts";
AccountNumber: string;
Type: "user" | "role" | "instance-profile";
Path: string;
FriendlyName: string;
SessionInfo: string; // Only populated for assumed-role
}
export const extractPrincipalArnEntity = (arn: string): PrincipalArnEntity => {
// split the ARN into parts using ":" as the delimiter
const fullParts = arn.split(":");
if (fullParts.length !== 6) {
throw new Error(`Unrecognized ARN: "${arn}" contains ${fullParts.length} colon-separated parts, expected 6`);
}
const [prefix, partition, service, , accountNumber, resource] = fullParts;
if (prefix !== "arn") {
throw new Error(`Unrecognized ARN: "${arn}" does not begin with "arn:"`);
}
// validate the service is either 'iam' or 'sts'
if (service !== "iam" && service !== "sts") {
throw new Error(`Unrecognized service: "${service}" in ARN "${arn}", expected "iam" or "sts"`);
}
// parse the last part of the ARN which describes the resource
const parts = resource.split("/");
if (parts.length < 2) {
throw new Error(
`Unrecognized ARN: "${resource}" in ARN "${arn}" contains fewer than 2 slash-separated parts (expected type/name)`
);
}
const [rawType, ...rest] = parts;
let finalType: PrincipalArnEntity["Type"];
let friendlyName: string = parts[parts.length - 1];
let path: string = "";
let sessionInfo: string = "";
// handle different types of resources
switch (rawType) {
case "assumed-role": {
if (rest.length < 2) {
throw new Error(
`Unrecognized ARN: "${resource}" for assumed-role in ARN "${arn}" contains fewer than 3 slash-separated parts (type/roleName/sessionId)`
);
}
// assumed roles use a special format where the friendly name is the role name
const [roleName, sessionId] = rest;
finalType = "role"; // treat assumed role case as role
friendlyName = roleName;
sessionInfo = sessionId;
break;
}
case "user":
case "role":
case "instance-profile":
finalType = rawType;
path = rest.slice(0, -1).join("/");
break;
default:
throw new Error(
`Unrecognized principal type: "${rawType}" in ARN "${arn}". Expected "user", "role", "instance-profile", or "assumed-role".`
);
}
const entity: PrincipalArnEntity = {
Partition: partition,
Service: service,
AccountNumber: accountNumber,
Type: finalType,
Path: path,
FriendlyName: friendlyName,
SessionInfo: sessionInfo
};
return entity;
};
/**
* Extracts the identity ARN from the GetCallerIdentity response to one of the following formats:
* - arn:aws:iam::123456789012:user/MyUserName
* - arn:aws:iam::123456789012:role/MyRoleName
*/
export const extractPrincipalArn = (arn: string) => {
// split the ARN into parts using ":" as the delimiter
const fullParts = arn.split(":");
if (fullParts.length !== 6) {
throw new Error(`Unrecognized ARN: contains ${fullParts.length} colon-separated parts, expected 6`);
}
const [prefix, partition, service, , accountNumber, resource] = fullParts;
if (prefix !== "arn") {
throw new Error('Unrecognized ARN: does not begin with "arn:"');
}
// structure to hold the parsed data
const entity = {
Partition: partition,
Service: service,
AccountNumber: accountNumber,
Type: "",
Path: "",
FriendlyName: "",
SessionInfo: ""
};
// validate the service is either 'iam' or 'sts'
if (entity.Service !== "iam" && entity.Service !== "sts") {
throw new Error(`Unrecognized service: ${entity.Service}, not one of iam or sts`);
}
// parse the last part of the ARN which describes the resource
const parts = resource.split("/");
if (parts.length < 2) {
throw new Error(`Unrecognized ARN: "${resource}" contains fewer than 2 slash-separated parts`);
}
const [type, ...rest] = parts;
entity.Type = type;
entity.FriendlyName = parts[parts.length - 1];
// handle different types of resources
switch (entity.Type) {
case "assumed-role": {
if (rest.length < 2) {
throw new Error(`Unrecognized ARN: "${resource}" contains fewer than 3 slash-separated parts`);
}
// assumed roles use a special format where the friendly name is the role name
const [roleName, sessionId] = rest;
entity.Type = "role"; // treat assumed role case as role
entity.FriendlyName = roleName;
entity.SessionInfo = sessionId;
break;
}
case "user":
case "role":
case "instance-profile":
// standard cases: just join back the path if there's any
entity.Path = rest.slice(0, -1).join("/");
break;
default:
throw new Error(`Unrecognized principal type: "${entity.Type}"`);
}
const entity = extractPrincipalArnEntity(arn);
return `arn:aws:iam::${entity.AccountNumber}:${entity.Type}/${entity.FriendlyName}`;
};

View File

@@ -22,7 +22,7 @@ import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identit
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
import { validateIdentityUpdateForSuperAdminPrivileges } from "../super-admin/super-admin-fns";
import { TIdentityAwsAuthDALFactory } from "./identity-aws-auth-dal";
import { extractPrincipalArn } from "./identity-aws-auth-fns";
import { extractPrincipalArn, extractPrincipalArnEntity } from "./identity-aws-auth-fns";
import {
TAttachAwsAuthDTO,
TAwsGetCallerIdentityHeaders,
@@ -107,7 +107,7 @@ export const identityAwsAuthServiceFactory = ({
const {
data: {
GetCallerIdentityResponse: {
GetCallerIdentityResult: { Account, Arn }
GetCallerIdentityResult: { Account, Arn, UserId }
}
}
}: { data: TGetCallerIdentityResponse } = await axios({
@@ -168,11 +168,25 @@ export const identityAwsAuthServiceFactory = ({
});
const appCfg = getConfig();
const splitArn = extractPrincipalArnEntity(Arn);
const accessToken = crypto.jwt().sign(
{
identityId: identityAwsAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN,
identityAuth: {
aws: {
accountId: Account,
arn: Arn,
userId: UserId,
// Derived from ARN
partition: splitArn.Partition,
service: splitArn.Service,
resourceType: splitArn.Type,
resourceName: splitArn.FriendlyName
}
}
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error

View File

@@ -1,9 +1,11 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TAccessApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-environment-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSecretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@@ -20,6 +22,8 @@ type TProjectEnvServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem" | "waitTillReady">;
accessApprovalPolicyEnvironmentDAL: Pick<TAccessApprovalPolicyEnvironmentDALFactory, "findAvailablePoliciesByEnvId">;
secretApprovalPolicyEnvironmentDAL: Pick<TSecretApprovalPolicyEnvironmentDALFactory, "findAvailablePoliciesByEnvId">;
};
export type TProjectEnvServiceFactory = ReturnType<typeof projectEnvServiceFactory>;
@@ -30,7 +34,9 @@ export const projectEnvServiceFactory = ({
licenseService,
keyStore,
projectDAL,
folderDAL
folderDAL,
accessApprovalPolicyEnvironmentDAL,
secretApprovalPolicyEnvironmentDAL
}: TProjectEnvServiceFactoryDep) => {
const createEnvironment = async ({
projectId,
@@ -220,6 +226,20 @@ export const projectEnvServiceFactory = ({
}
const env = await projectEnvDAL.transaction(async (tx) => {
const secretApprovalPolicies = await secretApprovalPolicyEnvironmentDAL.findAvailablePoliciesByEnvId(id, tx);
if (secretApprovalPolicies.length > 0) {
throw new BadRequestError({
message: "Environment is in use by a secret approval policy",
name: "DeleteEnvironment"
});
}
const accessApprovalPolicies = await accessApprovalPolicyEnvironmentDAL.findAvailablePoliciesByEnvId(id, tx);
if (accessApprovalPolicies.length > 0) {
throw new BadRequestError({
message: "Environment is in use by an access approval policy",
name: "DeleteEnvironment"
});
}
const [doc] = await projectEnvDAL.delete({ id, projectId }, tx);
if (!doc)
throw new NotFoundError({

View File

@@ -645,7 +645,7 @@ export const projectServiceFactory = ({
const updateProject = async ({ actor, actorId, actorOrgId, actorAuthMethod, update, filter }: TUpdateProjectDTO) => {
const project = await projectDAL.findProjectByFilter(filter);
const { permission } = await permissionService.getProjectPermission({
const { permission, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
@@ -667,6 +667,12 @@ export const projectServiceFactory = ({
}
}
if (update.secretDetectionIgnoreValues && !hasRole(ProjectMembershipRole.Admin)) {
throw new ForbiddenRequestError({
message: "Only admins can update secret detection ignore values"
});
}
const updatedProject = await projectDAL.updateById(project.id, {
name: update.name,
description: update.description,
@@ -676,7 +682,8 @@ export const projectServiceFactory = ({
slug: update.slug,
secretSharing: update.secretSharing,
defaultProduct: update.defaultProduct,
showSnapshotsLegacy: update.showSnapshotsLegacy
showSnapshotsLegacy: update.showSnapshotsLegacy,
secretDetectionIgnoreValues: update.secretDetectionIgnoreValues
});
return updatedProject;

View File

@@ -96,6 +96,7 @@ export type TUpdateProjectDTO = {
slug?: string;
secretSharing?: boolean;
showSnapshotsLegacy?: boolean;
secretDetectionIgnoreValues?: string[];
};
} & Omit<TProjectPermission, "projectId">;

View File

@@ -11,7 +11,7 @@ import { TReminderServiceFactory } from "./reminder-types";
type TDailyReminderQueueServiceFactoryDep = {
reminderService: TReminderServiceFactory;
queueService: TQueueServiceFactory;
secretDAL: Pick<TSecretV2BridgeDALFactory, "transaction" | "findSecretsWithReminderRecipients">;
secretDAL: Pick<TSecretV2BridgeDALFactory, "transaction" | "findSecretsWithReminderRecipientsOld">;
secretReminderRecipientsDAL: Pick<TSecretReminderRecipientsDALFactory, "delete">;
};
@@ -69,7 +69,7 @@ export const dailyReminderQueueServiceFactory = ({
// Find existing secrets with pagination
// eslint-disable-next-line no-await-in-loop
const secrets = await secretDAL.findSecretsWithReminderRecipients(batchIds, REMINDER_PRUNE_BATCH_SIZE);
const secrets = await secretDAL.findSecretsWithReminderRecipientsOld(batchIds, REMINDER_PRUNE_BATCH_SIZE);
const secretsWithReminder = secrets.filter((secret) => secret.reminderRepeatDays);
const foundSecretIds = new Set(secretsWithReminder.map((secret) => secret.id));
@@ -173,12 +173,6 @@ export const dailyReminderQueueServiceFactory = ({
{ pattern: "0 */1 * * *", utc: true },
QueueName.SecretReminderMigration // just a job id
);
await queueService.queue(QueueName.SecretReminderMigration, QueueJobs.SecretReminderMigration, undefined, {
delay: 5000,
jobId: QueueName.SecretReminderMigration,
repeat: { pattern: "0 */1 * * *", utc: true }
});
};
queueService.listen(QueueName.DailyReminders, "failed", (_, err) => {

View File

@@ -308,12 +308,11 @@ export const reminderServiceFactory = ({
);
const newReminders = await reminderDAL.insertMany(
processedReminders.map(({ secretId, message, repeatDays, nextReminderDate, projectId }) => ({
processedReminders.map(({ secretId, message, repeatDays, nextReminderDate }) => ({
secretId,
message,
repeatDays,
nextReminderDate,
projectId
nextReminderDate
})),
tx
);

View File

@@ -8,7 +8,26 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
import { TRenderSecret, TRenderSyncWithCredentials } from "./render-sync-types";
const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredentials) => {
const MAX_RETRIES = 5;
const retrySleep = async () =>
new Promise((resolve) => {
setTimeout(resolve, 60000);
});
const makeRequestWithRetry = async <T>(requestFn: () => Promise<T>, attempt = 0): Promise<T> => {
try {
return await requestFn();
} catch (error) {
if (isAxiosError(error) && error.response?.status === 429 && attempt < MAX_RETRIES) {
await retrySleep();
return await makeRequestWithRetry(requestFn, attempt + 1);
}
throw error;
}
};
const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredentials): Promise<TRenderSecret[]> => {
const {
destinationConfig,
connection: {
@@ -22,20 +41,23 @@ const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredential
do {
const url = cursor ? `${baseUrl}?cursor=${cursor}` : baseUrl;
const { data } = await request.get<
{
envVar: {
key: string;
value: string;
};
cursor: string;
}[]
>(url, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
});
const { data } = await makeRequestWithRetry(() =>
request.get<
{
envVar: {
key: string;
value: string;
};
cursor: string;
}[]
>(url, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
})
);
const secrets = data.map((item) => ({
key: item.envVar.key,
@@ -44,13 +66,20 @@ const getRenderEnvironmentSecrets = async (secretSync: TRenderSyncWithCredential
allSecrets.push(...secrets);
cursor = data[data.length - 1]?.cursor;
if (data.length > 0 && data[data.length - 1]?.cursor) {
cursor = data[data.length - 1].cursor;
} else {
cursor = undefined;
}
} while (cursor);
return allSecrets;
};
const putEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secretMap: TSecretMap, key: string) => {
const batchUpdateEnvironmentSecrets = async (
secretSync: TRenderSyncWithCredentials,
envVars: Array<{ key: string; value: string }>
): Promise<void> => {
const {
destinationConfig,
connection: {
@@ -58,22 +87,17 @@ const putEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secr
}
} = secretSync;
await request.put(
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars/${key}`,
{
key,
value: secretMap[key].value
},
{
await makeRequestWithRetry(() =>
request.put(`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars`, envVars, {
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
}
})
);
};
const deleteEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secret: Pick<TRenderSecret, "key">) => {
const redeployService = async (secretSync: TRenderSyncWithCredentials) => {
const {
destinationConfig,
connection: {
@@ -81,70 +105,81 @@ const deleteEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, s
}
} = secretSync;
try {
await request.delete(
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars/${secret.key}`,
await makeRequestWithRetry(() =>
request.post(
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/deploys`,
{},
{
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json"
}
}
);
} catch (error) {
if (isAxiosError(error) && error.response?.status === 404) {
// If the secret does not exist, we can ignore this error
return;
}
throw error;
}
)
);
};
const sleep = async () =>
new Promise((resolve) => {
setTimeout(resolve, 500);
});
export const RenderSyncFns = {
syncSecrets: async (secretSync: TRenderSyncWithCredentials, secretMap: TSecretMap) => {
const renderSecrets = await getRenderEnvironmentSecrets(secretSync);
for await (const key of Object.keys(secretMap)) {
// If value is empty skip it as render does not allow empty variables
if (secretMap[key].value === "") {
// eslint-disable-next-line no-continue
continue;
const finalEnvVars: Array<{ key: string; value: string }> = [];
for (const renderSecret of renderSecrets) {
const shouldKeep =
secretMap[renderSecret.key] ||
(secretSync.syncOptions.disableSecretDeletion &&
!matchesSchema(renderSecret.key, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema));
if (shouldKeep && !secretMap[renderSecret.key]) {
finalEnvVars.push({
key: renderSecret.key,
value: renderSecret.value
});
}
await putEnvironmentSecret(secretSync, secretMap, key);
await sleep();
}
if (secretSync.syncOptions.disableSecretDeletion) return;
for await (const renderSecret of renderSecrets) {
if (!matchesSchema(renderSecret.key, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema))
for (const [key, secret] of Object.entries(secretMap)) {
// Skip empty values as render does not allow empty variables
if (secret.value === "") {
// eslint-disable-next-line no-continue
continue;
if (!secretMap[renderSecret.key]) {
await deleteEnvironmentSecret(secretSync, renderSecret);
await sleep();
}
finalEnvVars.push({
key,
value: secret.value
});
}
await batchUpdateEnvironmentSecrets(secretSync, finalEnvVars);
if (secretSync.syncOptions.autoRedeployServices) {
await redeployService(secretSync);
}
},
getSecrets: async (secretSync: TRenderSyncWithCredentials): Promise<TSecretMap> => {
const renderSecrets = await getRenderEnvironmentSecrets(secretSync);
return Object.fromEntries(renderSecrets.map((secret) => [secret.key, { value: secret.value ?? "" }]));
},
removeSecrets: async (secretSync: TRenderSyncWithCredentials, secretMap: TSecretMap) => {
const encryptedSecrets = await getRenderEnvironmentSecrets(secretSync);
const renderSecrets = await getRenderEnvironmentSecrets(secretSync);
const finalEnvVars: Array<{ key: string; value: string }> = [];
for await (const encryptedSecret of encryptedSecrets) {
if (encryptedSecret.key in secretMap) {
await deleteEnvironmentSecret(secretSync, encryptedSecret);
await sleep();
for (const renderSecret of renderSecrets) {
if (!(renderSecret.key in secretMap)) {
finalEnvVars.push({
key: renderSecret.key,
value: renderSecret.value
});
}
}
await batchUpdateEnvironmentSecrets(secretSync, finalEnvVars);
if (secretSync.syncOptions.autoRedeployServices) {
await redeployService(secretSync);
}
}
};

View File

@@ -20,23 +20,33 @@ const RenderSyncDestinationConfigSchema = z.discriminatedUnion("scope", [
})
]);
const RenderSyncOptionsSchema = z.object({
autoRedeployServices: z.boolean().optional().describe(SecretSyncs.ADDITIONAL_SYNC_OPTIONS.RENDER.autoRedeployServices)
});
const RenderSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
export const RenderSyncSchema = BaseSecretSyncSchema(SecretSync.Render, RenderSyncOptionsConfig).extend({
export const RenderSyncSchema = BaseSecretSyncSchema(
SecretSync.Render,
RenderSyncOptionsConfig,
RenderSyncOptionsSchema
).extend({
destination: z.literal(SecretSync.Render),
destinationConfig: RenderSyncDestinationConfigSchema
});
export const CreateRenderSyncSchema = GenericCreateSecretSyncFieldsSchema(
SecretSync.Render,
RenderSyncOptionsConfig
RenderSyncOptionsConfig,
RenderSyncOptionsSchema
).extend({
destinationConfig: RenderSyncDestinationConfigSchema
});
export const UpdateRenderSyncSchema = GenericUpdateSecretSyncFieldsSchema(
SecretSync.Render,
RenderSyncOptionsConfig
RenderSyncOptionsConfig,
RenderSyncOptionsSchema
).extend({
destinationConfig: RenderSyncDestinationConfigSchema.optional()
});

View File

@@ -875,6 +875,48 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
}
};
const findSecretsWithReminderRecipientsOld = async (ids: string[], limit: number, tx?: Knex) => {
try {
// Create a subquery to get limited secret IDs
const limitedSecretIds = (tx || db)(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.id`, ids)
.limit(limit)
.select("id");
// Join with all recipients for the limited secrets
const docs = await (tx || db)(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.id`, limitedSecretIds)
.leftJoin(TableName.Reminder, `${TableName.SecretV2}.id`, `${TableName.Reminder}.secretId`)
.leftJoin(
TableName.SecretReminderRecipients,
`${TableName.SecretV2}.id`,
`${TableName.SecretReminderRecipients}.secretId`
)
.select(selectAllTableCols(TableName.SecretV2))
.select(db.ref("userId").withSchema(TableName.SecretReminderRecipients).as("reminderRecipientUserId"));
const data = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (el) => ({
_id: el.id,
...SecretsV2Schema.parse(el)
}),
childrenMapper: [
{
key: "reminderRecipientUserId",
label: "recipients" as const,
mapper: ({ reminderRecipientUserId }) => reminderRecipientUserId
}
]
});
return data;
} catch (error) {
throw new DatabaseError({ error, name: "findSecretsWithReminderRecipientsOld" });
}
};
return {
...secretOrm,
update,
@@ -893,6 +935,7 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
findOne,
find,
invalidateSecretCacheByProjectId,
findSecretsWithReminderRecipients
findSecretsWithReminderRecipients,
findSecretsWithReminderRecipientsOld
};
};

View File

@@ -25,6 +25,7 @@ import {
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { scanSecretPolicyViolations } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { DatabaseErrorCode } from "@app/lib/error-codes";
@@ -38,6 +39,7 @@ import { ActorType } from "../auth/auth-type";
import { TCommitResourceChangeDTO, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TReminderServiceFactory } from "../reminder/reminder-types";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
@@ -88,6 +90,7 @@ import { TSecretVersionV2TagDALFactory } from "./secret-version-tag-dal";
type TSecretV2BridgeServiceFactoryDep = {
secretDAL: TSecretV2BridgeDALFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
secretVersionDAL: TSecretVersionV2DALFactory;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@@ -126,6 +129,7 @@ export type TSecretV2BridgeServiceFactory = ReturnType<typeof secretV2BridgeServ
*/
export const secretV2BridgeServiceFactory = ({
secretDAL,
projectDAL,
projectEnvDAL,
secretTagDAL,
secretVersionDAL,
@@ -295,6 +299,19 @@ export const secretV2BridgeServiceFactory = ({
})
);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.secretName,
secretValue: inputSecret.secretValue
}
],
project.secretDetectionIgnoreValues || []
);
const { nestedReferences, localReferences } = getAllSecretReferences(inputSecret.secretValue);
const allSecretReferences = nestedReferences.concat(
localReferences.map((el) => ({ secretKey: el, secretPath, environment }))
@@ -506,6 +523,21 @@ export const secretV2BridgeServiceFactory = ({
const { secretName, secretValue } = inputSecret;
if (secretValue) {
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.newSecretName || secretName,
secretValue
}
],
project.secretDetectionIgnoreValues || []
);
}
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
@@ -1585,6 +1617,9 @@ export const secretV2BridgeServiceFactory = ({
if (secrets.length)
throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` });
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(projectId, secretPath, inputSecrets, project.secretDetectionIgnoreValues || []);
// get all tags
const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds);
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : [];
@@ -1925,6 +1960,19 @@ export const secretV2BridgeServiceFactory = ({
});
await $validateSecretReferences(projectId, permission, secretReferences, tx);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
secretsToUpdate
.filter((el) => el.secretValue)
.map((el) => ({
secretKey: el.newSecretName || el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
folderId,
orgId: actorOrgId,

4
cli/.gitignore vendored
View File

@@ -1,4 +0,0 @@
.infisical.json
dist/
agent-config.test.yaml
.test.env

View File

@@ -1,3 +0,0 @@
bea0ff6e05a4de73a5db625d4ae181a015b50855:frontend/components/utilities/attemptLogin.js:stripe-access-token:147
bea0ff6e05a4de73a5db625d4ae181a015b50855:backend/src/json/integrations.json:generic-api-key:5
1961b92340e5d2613acae528b886c842427ce5d0:frontend/components/utilities/attemptLogin.js:stripe-access-token:148

View File

@@ -1,37 +0,0 @@
infisical:
address: "https://app.infisical.com/"
auth:
type: "universal-auth"
config:
client-id: "./client-id"
client-secret: "./client-secret"
remove_client_secret_on_read: false
sinks:
- type: "file"
config:
path: "access-token"
templates:
- template-content: |
{{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }}
{{- range . }}
{{ .Key }}={{ .Value }}
{{- end }}
{{- end }}
destination-path: my-dot-env-0.env
config:
polling-interval: 60s
execute:
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
- base64-template-content: e3stIHdpdGggc2VjcmV0ICIyMDJmMDRkNy1lNGNiLTQzZDQtYTI5Mi1lODkzNzEyZDYxZmMiICJkZXYiICIvIiB9fQp7ey0gcmFuZ2UgLiB9fQp7eyAuS2V5IH19PXt7IC5WYWx1ZSB9fQp7ey0gZW5kIH19Cnt7LSBlbmQgfX0=
destination-path: my-dot-env.env
config:
polling-interval: 60s
execute:
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
- source-path: my-dot-ev-secret-template1
destination-path: my-dot-env-1.env
config:
exec:
command: mkdir hello-world1

View File

@@ -1,103 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/Infisical/infisical-merge/detect/report"
)
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
// the code requires maintenance if/when the Finding struct changes
for _, b := range baseline {
if finding.RuleID == b.RuleID &&
finding.Description == b.Description &&
finding.StartLine == b.StartLine &&
finding.EndLine == b.EndLine &&
finding.StartColumn == b.StartColumn &&
finding.EndColumn == b.EndColumn &&
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
finding.File == b.File &&
finding.Commit == b.Commit &&
finding.Author == b.Author &&
finding.Email == b.Email &&
finding.Date == b.Date &&
finding.Message == b.Message &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Entropy == b.Entropy {
return false
}
}
return true
}
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
bytes, err := os.ReadFile(baselinePath)
if err != nil {
return nil, fmt.Errorf("could not open %s", baselinePath)
}
var previousFindings []report.Finding
err = json.Unmarshal(bytes, &previousFindings)
if err != nil {
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
}
return previousFindings, nil
}
func (d *Detector) AddBaseline(baselinePath string, source string) error {
if baselinePath != "" {
absoluteSource, err := filepath.Abs(source)
if err != nil {
return err
}
absoluteBaseline, err := filepath.Abs(baselinePath)
if err != nil {
return err
}
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
if err != nil {
return err
}
baseline, err := LoadBaseline(baselinePath)
if err != nil {
return err
}
d.baseline = baseline
baselinePath = relativeBaseline
}
d.baselinePath = baselinePath
return nil
}

View File

@@ -1,70 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package scm
import (
"fmt"
"strings"
)
type Platform int
const (
UnknownPlatform Platform = iota
NoPlatform // Explicitly disable the feature
GitHubPlatform
GitLabPlatform
AzureDevOpsPlatform
BitBucketPlatform
// TODO: Add others.
)
func (p Platform) String() string {
return [...]string{
"unknown",
"none",
"github",
"gitlab",
"azuredevops",
"bitbucket",
}[p]
}
func PlatformFromString(s string) (Platform, error) {
switch strings.ToLower(s) {
case "", "unknown":
return UnknownPlatform, nil
case "none":
return NoPlatform, nil
case "github":
return GitHubPlatform, nil
case "gitlab":
return GitLabPlatform, nil
case "azuredevops":
return AzureDevOpsPlatform, nil
case "bitbucket":
return BitBucketPlatform, nil
default:
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
}
}

View File

@@ -1,159 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"golang.org/x/exp/maps"
"github.com/Infisical/infisical-merge/detect/regexp"
)
type AllowlistMatchCondition int
const (
AllowlistMatchOr AllowlistMatchCondition = iota
AllowlistMatchAnd
)
func (a AllowlistMatchCondition) String() string {
return [...]string{
"OR",
"AND",
}[a]
}
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
type Allowlist struct {
// Short human readable description of the allowlist.
Description string
// MatchCondition determines whether all criteria must match.
MatchCondition AllowlistMatchCondition
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
Commits []string
// Paths is a slice of path regular expressions that are allowed to be ignored.
Paths []*regexp.Regexp
// Can be `match` or `line`.
//
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
//
// If `line` the _Regexes_ will be tested against the entire line.
//
// If RegexTarget is empty, it will be tested against the found secret.
RegexTarget string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// StopWords is a slice of stop words that are allowed to be ignored.
// This targets the _secret_, not the content of the regex match like the
// Regexes slice.
StopWords []string
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
func (a *Allowlist) Validate() error {
if a.validated {
return nil
}
// Disallow empty allowlists.
if len(a.Commits) == 0 &&
len(a.Paths) == 0 &&
len(a.Regexes) == 0 &&
len(a.StopWords) == 0 {
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
}
// Deduplicate commits and stopwords.
if len(a.Commits) > 0 {
uniqueCommits := make(map[string]struct{})
for _, commit := range a.Commits {
uniqueCommits[commit] = struct{}{}
}
a.Commits = maps.Keys(uniqueCommits)
}
if len(a.StopWords) > 0 {
uniqueStopwords := make(map[string]struct{})
for _, stopWord := range a.StopWords {
uniqueStopwords[stopWord] = struct{}{}
}
a.StopWords = maps.Keys(uniqueStopwords)
}
a.validated = true
return nil
}
// CommitAllowed returns true if the commit is allowed to be ignored.
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
if a == nil || c == "" {
return false, ""
}
for _, commit := range a.Commits {
if commit == c {
return true, c
}
}
return false, ""
}
// PathAllowed returns true if the path is allowed to be ignored.
func (a *Allowlist) PathAllowed(path string) bool {
if a == nil || path == "" {
return false
}
return anyRegexMatch(path, a.Paths)
}
// RegexAllowed returns true if the regex is allowed to be ignored.
func (a *Allowlist) RegexAllowed(secret string) bool {
if a == nil || secret == "" {
return false
}
return anyRegexMatch(secret, a.Regexes)
}
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
if a == nil || s == "" {
return false, ""
}
s = strings.ToLower(s)
for _, stopWord := range a.StopWords {
if strings.Contains(s, strings.ToLower(stopWord)) {
return true, stopWord
}
}
return false, ""
}

View File

@@ -1,426 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"errors"
"fmt"
"sort"
"strings"
"github.com/spf13/viper"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
var (
//go:embed gitleaks.toml
DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
extendDepth int
)
const maxExtendDepth = 2
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Title string
Description string
Extend Extend
Rules []struct {
ID string
Description string
Path string
Regex string
SecretGroup int
Entropy float64
Keywords []string
Tags []string
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperRuleAllowlist
Allowlists []*viperRuleAllowlist
}
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperGlobalAllowlist
Allowlists []*viperGlobalAllowlist
}
type viperRuleAllowlist struct {
Description string
Condition string
Commits []string
Paths []string
RegexTarget string
Regexes []string
StopWords []string
}
type viperGlobalAllowlist struct {
TargetRules []string
viperRuleAllowlist `mapstructure:",squash"`
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Title string
Extend Extend
Path string
Description string
Rules map[string]Rule
Keywords map[string]struct{}
// used to keep sarif results consistent
OrderedRules []string
Allowlists []*Allowlist
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
DisabledRules []string
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords = make(map[string]struct{})
orderedRules []string
rulesMap = make(map[string]Rule)
ruleAllowlists = make(map[string][]*Allowlist)
)
// Validate individual rules.
for _, vr := range vc.Rules {
var (
pathPat *regexp.Regexp
regexPat *regexp.Regexp
)
if vr.Path != "" {
pathPat = regexp.MustCompile(vr.Path)
}
if vr.Regex != "" {
regexPat = regexp.MustCompile(vr.Regex)
}
if vr.Keywords == nil {
vr.Keywords = []string{}
} else {
for i, k := range vr.Keywords {
keyword := strings.ToLower(k)
keywords[keyword] = struct{}{}
vr.Keywords[i] = keyword
}
}
if vr.Tags == nil {
vr.Tags = []string{}
}
cr := Rule{
RuleID: vr.ID,
Description: vr.Description,
Regex: regexPat,
SecretGroup: vr.SecretGroup,
Entropy: vr.Entropy,
Path: pathPat,
Keywords: vr.Keywords,
Tags: vr.Tags,
}
// Parse the rule allowlists, including the older format for backwards compatibility.
if vr.AllowList != nil {
// TODO: Remove this in v9.
if len(vr.Allowlists) > 0 {
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
}
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
}
for _, a := range vr.Allowlists {
allowlist, err := parseAllowlist(a)
if err != nil {
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
}
cr.Allowlists = append(cr.Allowlists, allowlist)
}
orderedRules = append(orderedRules, cr.RuleID)
rulesMap[cr.RuleID] = cr
}
// Assemble the config.
c := Config{
Title: vc.Title,
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Keywords: keywords,
OrderedRules: orderedRules,
}
// Parse the config allowlists, including the older format for backwards compatibility.
if vc.AllowList != nil {
// TODO: Remove this in v9.
if len(vc.Allowlists) > 0 {
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
}
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
}
for _, a := range vc.Allowlists {
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
if err != nil {
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
}
// Allowlists with |targetRules| aren't added to the global list.
if len(a.TargetRules) > 0 {
for _, ruleID := range a.TargetRules {
// It's not possible to validate |ruleID| until after extend.
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
}
} else {
c.Allowlists = append(c.Allowlists, allowlist)
}
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
if err := c.extendDefault(); err != nil {
return Config{}, err
}
} else if c.Extend.Path != "" {
if err := c.extendPath(); err != nil {
return Config{}, err
}
}
}
// Validate the rules after everything has been assembled (including extended configs).
if extendDepth == 0 {
for _, rule := range c.Rules {
if err := rule.Validate(); err != nil {
return Config{}, err
}
}
// Populate targeted configs.
for ruleID, allowlists := range ruleAllowlists {
rule, ok := c.Rules[ruleID]
if !ok {
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
}
rule.Allowlists = append(rule.Allowlists, allowlists...)
c.Rules[ruleID] = rule
}
}
return c, nil
}
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
var matchCondition AllowlistMatchCondition
switch strings.ToUpper(a.Condition) {
case "AND", "&&":
matchCondition = AllowlistMatchAnd
case "", "OR", "||":
matchCondition = AllowlistMatchOr
default:
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
}
// Validate the target.
regexTarget := a.RegexTarget
if regexTarget != "" {
switch regexTarget {
case "secret":
regexTarget = ""
case "match", "line":
// do nothing
default:
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
}
}
var allowlistRegexes []*regexp.Regexp
for _, a := range a.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range a.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
allowlist := &Allowlist{
Description: a.Description,
MatchCondition: matchCondition,
Commits: a.Commits,
Paths: allowlistPaths,
RegexTarget: regexTarget,
Regexes: allowlistRegexes,
StopWords: a.StopWords,
}
if err := allowlist.Validate(); err != nil {
return nil, err
}
return allowlist, nil
}
func (c *Config) GetOrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.OrderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() error {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
logging.Debug().Msg("extending config with default config")
c.extend(cfg)
return nil
}
func (c *Config) extendPath() error {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
return nil
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
// Get config name for helpful log messages.
var configName string
if c.Extend.Path != "" {
configName = c.Extend.Path
} else {
configName = "default"
}
// Convert |Config.DisabledRules| into a map for ease of access.
disabledRuleIDs := map[string]struct{}{}
for _, id := range c.Extend.DisabledRules {
if _, ok := extensionConfig.Rules[id]; !ok {
logging.Warn().
Str("rule-id", id).
Str("config", configName).
Msg("Disabled rule doesn't exist in extended config.")
}
disabledRuleIDs[id] = struct{}{}
}
for ruleID, baseRule := range extensionConfig.Rules {
// Skip the rule.
if _, ok := disabledRuleIDs[ruleID]; ok {
logging.Debug().
Str("rule-id", ruleID).
Str("config", configName).
Msg("Ignoring rule from extended config.")
continue
}
currentRule, ok := c.Rules[ruleID]
if !ok {
// Rule doesn't exist, add it to the config.
c.Rules[ruleID] = baseRule
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.OrderedRules = append(c.OrderedRules, ruleID)
} else {
// Rule exists, merge our changes into the base.
if currentRule.Description != "" {
baseRule.Description = currentRule.Description
}
if currentRule.Entropy != 0 {
baseRule.Entropy = currentRule.Entropy
}
if currentRule.SecretGroup != 0 {
baseRule.SecretGroup = currentRule.SecretGroup
}
if currentRule.Regex != nil {
baseRule.Regex = currentRule.Regex
}
if currentRule.Path != nil {
baseRule.Path = currentRule.Path
}
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
for _, a := range currentRule.Allowlists {
baseRule.Allowlists = append(baseRule.Allowlists, a)
}
// The keywords from the base rule and the extended rule must be merged into the global keywords list
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.Rules[ruleID] = baseRule
}
}
// append allowlists, not attempting to merge
for _, a := range extensionConfig.Allowlists {
c.Allowlists = append(c.Allowlists, a)
}
// sort to keep extended rules in order
sort.Strings(c.OrderedRules)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,114 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/detect/regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// RuleID is a unique identifier for this rule
RuleID string
// Description is the description of the rule.
Description string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
Allowlists []*Allowlist
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
// Validate guards against common misconfigurations.
func (r *Rule) Validate() error {
if r.validated {
return nil
}
// Ensure |id| is present.
if strings.TrimSpace(r.RuleID) == "" {
// Try to provide helpful context, since |id| is empty.
var context string
if r.Regex != nil {
context = ", regex: " + r.Regex.String()
} else if r.Path != nil {
context = ", path: " + r.Path.String()
} else if r.Description != "" {
context = ", description: " + r.Description
}
return fmt.Errorf("rule |id| is missing or empty" + context)
}
// Ensure the rule actually matches something.
if r.Regex == nil && r.Path == nil {
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
}
// Ensure |secretGroup| works.
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
}
for _, allowlist := range r.Allowlists {
// This will probably never happen.
if allowlist == nil {
continue
}
if err := allowlist.Validate(); err != nil {
return fmt.Errorf("%s: %w", r.RuleID, err)
}
}
r.validated = true
return nil
}

View File

@@ -1,46 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"github.com/Infisical/infisical-merge/detect/regexp"
)
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

View File

@@ -1,328 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"encoding/base64"
"fmt"
"regexp"
"unicode"
"github.com/Infisical/infisical-merge/detect/logging"
)
var b64LikelyChars [128]byte
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
var decoders = []func(string) ([]byte, error){
base64.StdEncoding.DecodeString,
base64.RawURLEncoding.DecodeString,
}
func init() {
// Basically look for anything that isn't just letters
for _, c := range `0123456789+/-_` {
b64LikelyChars[c] = 1
}
}
// EncodedSegment represents a portion of text that is encoded in some way.
// `decode` supports recusive decoding and can result in "segment trees".
// There can be multiple segments in the original text, so each can be thought
// of as its own tree with the root being the original segment.
type EncodedSegment struct {
// The parent segment in a segment tree. If nil, it is a root segment
parent *EncodedSegment
// Relative start/end are the bounds of the encoded value in the current pass.
relativeStart int
relativeEnd int
// Absolute start/end refer to the bounds of the root segment in this segment
// tree
absoluteStart int
absoluteEnd int
// Decoded start/end refer to the bounds of the decoded value in the current
// pass. These can differ from relative values because decoding can shrink
// or grow the size of the segment.
decodedStart int
decodedEnd int
// This is the actual decoded content in the segment
decodedValue string
// This is the type of encoding
encoding string
}
// isChildOf inspects the bounds of two segments to determine
// if one should be the child of another
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
}
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
return start <= s.decodedEnd && end >= s.decodedStart
}
// adjustMatchIndex takes the matchIndex from the current decoding pass and
// updates it to match the absolute matchIndex in the original text.
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
// The match is within the bounds of the segment so we just return
// the absolute start and end of the root segment.
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
return []int{
s.absoluteStart,
s.absoluteEnd,
}
}
// Since it overlaps one side and/or the other, we're going to have to adjust
// and climb parents until we're either at the root or we've determined
// we're fully inside one of the parent segments.
adjustedMatchIndex := make([]int, 2)
if matchIndex[0] < s.decodedStart {
// It starts before the encoded segment so adjust the start to match
// the location before it was decoded
matchStartDelta := s.decodedStart - matchIndex[0]
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
} else {
// It starts within the encoded segment so set the bound to the
// relative start
adjustedMatchIndex[0] = s.relativeStart
}
if matchIndex[1] > s.decodedEnd {
// It ends after the encoded segment so adjust the end to match
// the location before it was decoded
matchEndDelta := matchIndex[1] - s.decodedEnd
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
} else {
// It ends within the encoded segment so set the bound to the relative end
adjustedMatchIndex[1] = s.relativeEnd
}
// We're still not at a root segment so we'll need to keep on adjusting
if s.parent != nil {
return s.parent.adjustMatchIndex(adjustedMatchIndex)
}
return adjustedMatchIndex
}
// depth reports how many levels of decoding needed to be done (default is 1)
func (s EncodedSegment) depth() int {
depth := 1
// Climb the tree and increment the depth
for current := &s; current.parent != nil; current = current.parent {
depth++
}
return depth
}
// tags returns additional meta data tags related to the types of segments
func (s EncodedSegment) tags() []string {
return []string{
fmt.Sprintf("decoded:%s", s.encoding),
fmt.Sprintf("decode-depth:%d", s.depth()),
}
}
// Decoder decodes various types of data in place
type Decoder struct {
decodedMap map[string]string
}
// NewDecoder creates a default decoder struct
func NewDecoder() *Decoder {
return &Decoder{
decodedMap: make(map[string]string),
}
}
// decode returns the data with the values decoded in-place
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
segments := d.findEncodedSegments(data, parentSegments)
if len(segments) > 0 {
result := bytes.NewBuffer(make([]byte, 0, len(data)))
relativeStart := 0
for _, segment := range segments {
result.WriteString(data[relativeStart:segment.relativeStart])
result.WriteString(segment.decodedValue)
relativeStart = segment.relativeEnd
}
result.WriteString(data[relativeStart:])
return result.String(), segments
}
return data, segments
}
// findEncodedSegments finds the encoded segments in the data and updates the
// segment tree for this pass
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
if len(data) == 0 {
return []EncodedSegment{}
}
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
if matchIndices == nil {
return []EncodedSegment{}
}
segments := make([]EncodedSegment, 0, len(matchIndices))
// Keeps up with offsets from the text changing size as things are decoded
decodedShift := 0
for _, matchIndex := range matchIndices {
encodedValue := data[matchIndex[0]:matchIndex[1]]
if !isLikelyB64(encodedValue) {
d.decodedMap[encodedValue] = ""
continue
}
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
// We haven't decoded this yet, so go ahead and decode it
if !alreadyDecoded {
decodedValue = decodeValue(encodedValue)
d.decodedMap[encodedValue] = decodedValue
}
// Skip this segment because there was nothing to check
if len(decodedValue) == 0 {
continue
}
// Create a segment for the encoded data
segment := EncodedSegment{
relativeStart: matchIndex[0],
relativeEnd: matchIndex[1],
absoluteStart: matchIndex[0],
absoluteEnd: matchIndex[1],
decodedStart: matchIndex[0] + decodedShift,
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
decodedValue: decodedValue,
encoding: "base64",
}
// Shift decoded start and ends based on size changes
decodedShift += len(decodedValue) - len(encodedValue)
// Adjust the absolute position of segments contained in parent segments
for _, parentSegment := range parentSegments {
if segment.isChildOf(parentSegment) {
segment.absoluteStart = parentSegment.absoluteStart
segment.absoluteEnd = parentSegment.absoluteEnd
segment.parent = &parentSegment
break
}
}
logging.Debug().Msgf("segment found: %#v", segment)
segments = append(segments, segment)
}
return segments
}
// decoders tries a list of decoders and returns the first successful one
func decodeValue(encodedValue string) string {
for _, decoder := range decoders {
decodedValue, err := decoder(encodedValue)
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
return string(decodedValue)
}
}
return ""
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII || b[i] < '\t' {
return false
}
}
return true
}
// Skip a lot of method signatures and things at the risk of missing about
// 1% of base64
func isLikelyB64(s string) bool {
for _, c := range s {
if b64LikelyChars[c] != 0 {
return true
}
}
return false
}
// Find a segment where the decoded bounds overlaps a range
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
for _, segment := range encodedSegments {
if segment.decodedOverlaps(start, end) {
return &segment
}
}
return nil
}
func (s EncodedSegment) currentLine(currentRaw string) string {
start := 0
end := len(currentRaw)
// Find the start of the range
for i := s.decodedStart; i > -1; i-- {
c := currentRaw[i]
if c == '\n' {
start = i
break
}
}
// Find the end of the range
for i := s.decodedEnd; i < end; i++ {
c := currentRaw[i]
if c == '\n' {
end = i
break
}
}
return currentRaw[start:end]
}

View File

@@ -1,699 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"context"
"fmt"
"os"
"runtime"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
"github.com/Infisical/infisical-merge/detect/report"
ahocorasick "github.com/BobuSumisu/aho-corasick"
"github.com/fatih/semgroup"
"github.com/rs/zerolog"
"github.com/spf13/viper"
"golang.org/x/exp/maps"
)
const (
gitleaksAllowSignature = "gitleaks:allow"
chunkSize = 100 * 1_000 // 100kb
// SlowWarningThreshold is the amount of time to wait before logging that a file is slow.
// This is useful for identifying problematic files and tuning the allowlist.
SlowWarningThreshold = 5 * time.Second
)
var (
newLineRegexp = regexp.MustCompile("\n")
isWindows = runtime.GOOS == "windows"
)
// Detector is the main detector struct
type Detector struct {
// Config is the configuration for the detector
Config config.Config
// Redact is a flag to redact findings. This is exported
// so users using gitleaks as a library can set this flag
// without calling `detector.Start(cmd *cobra.Command)`
Redact uint
// verbose is a flag to print findings
Verbose bool
// MaxDecodeDepths limits how many recursive decoding passes are allowed
MaxDecodeDepth int
// files larger than this will be skipped
MaxTargetMegaBytes int
// followSymlinks is a flag to enable scanning symlink files
FollowSymlinks bool
// NoColor is a flag to disable color output
NoColor bool
// IgnoreGitleaksAllow is a flag to ignore gitleaks:allow comments.
IgnoreGitleaksAllow bool
// commitMap is used to keep track of commits that have been scanned.
// This is only used for logging purposes and git scans.
commitMap map[string]bool
// findingMutex is to prevent concurrent access to the
// findings slice when adding findings.
findingMutex *sync.Mutex
// findings is a slice of report.Findings. This is the result
// of the detector's scan which can then be used to generate a
// report.
findings []report.Finding
// prefilter is a ahocorasick struct used for doing efficient string
// matching given a set of words (keywords from the rules in the config)
prefilter ahocorasick.Trie
// a list of known findings that should be ignored
baseline []report.Finding
// path to baseline
baselinePath string
// gitleaksIgnore
gitleaksIgnore map[string]struct{}
// Sema (https://github.com/fatih/semgroup) controls the concurrency
Sema *semgroup.Group
// report-related settings.
ReportPath string
Reporter report.Reporter
TotalBytes atomic.Uint64
}
// Fragment contains the data to be scanned
type Fragment struct {
// Raw is the raw content of the fragment
Raw string
Bytes []byte
// FilePath is the path to the file, if applicable.
// The path separator MUST be normalized to `/`.
FilePath string
SymlinkFile string
// WindowsFilePath is the path with the original separator.
// This provides a backwards-compatible solution to https://github.com/gitleaks/gitleaks/issues/1565.
WindowsFilePath string `json:"-"` // TODO: remove this in v9.
// CommitSHA is the SHA of the commit if applicable
CommitSHA string
// newlineIndices is a list of indices of newlines in the raw content.
// This is used to calculate the line location of a finding
newlineIndices [][]int
}
// NewDetector creates a new detector with the given config
func NewDetector(cfg config.Config) *Detector {
return &Detector{
commitMap: make(map[string]bool),
gitleaksIgnore: make(map[string]struct{}),
findingMutex: &sync.Mutex{},
findings: make([]report.Finding, 0),
Config: cfg,
prefilter: *ahocorasick.NewTrieBuilder().AddStrings(maps.Keys(cfg.Keywords)).Build(),
Sema: semgroup.NewGroup(context.Background(), 40),
}
}
// NewDetectorDefaultConfig creates a new detector with the default config
func NewDetectorDefaultConfig() (*Detector, error) {
viper.SetConfigType("toml")
err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
if err != nil {
return nil, err
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
return nil, err
}
cfg, err := vc.Translate()
if err != nil {
return nil, err
}
return NewDetector(cfg), nil
}
func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
logging.Debug().Msgf("found .gitleaksignore file: %s", gitleaksIgnorePath)
file, err := os.Open(gitleaksIgnorePath)
if err != nil {
return err
}
defer func() {
// https://github.com/securego/gosec/issues/512
if err := file.Close(); err != nil {
logging.Warn().Msgf("Error closing .gitleaksignore file: %s\n", err)
}
}()
scanner := bufio.NewScanner(file)
replacer := strings.NewReplacer("\\", "/")
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip lines that start with a comment
if line == "" || strings.HasPrefix(line, "#") {
continue
}
// Normalize the path.
// TODO: Make this a breaking change in v9.
s := strings.Split(line, ":")
switch len(s) {
case 3:
// Global fingerprint.
// `file:rule-id:start-line`
s[0] = replacer.Replace(s[0])
case 4:
// Commit fingerprint.
// `commit:file:rule-id:start-line`
s[1] = replacer.Replace(s[1])
default:
logging.Warn().Str("fingerprint", line).Msg("Invalid .gitleaksignore entry")
}
d.gitleaksIgnore[strings.Join(s, ":")] = struct{}{}
}
return nil
}
// DetectBytes scans the given bytes and returns a list of findings
func (d *Detector) DetectBytes(content []byte) []report.Finding {
return d.DetectString(string(content))
}
// DetectString scans the given string and returns a list of findings
func (d *Detector) DetectString(content string) []report.Finding {
return d.Detect(Fragment{
Raw: content,
})
}
// Detect scans the given fragment and returns a list of findings
func (d *Detector) Detect(fragment Fragment) []report.Finding {
if fragment.Bytes == nil {
d.TotalBytes.Add(uint64(len(fragment.Raw)))
}
d.TotalBytes.Add(uint64(len(fragment.Bytes)))
var (
findings []report.Finding
logger = func() zerolog.Logger {
l := logging.With().Str("path", fragment.FilePath)
if fragment.CommitSHA != "" {
l = l.Str("commit", fragment.CommitSHA)
}
return l.Logger()
}()
)
// check if filepath is allowed
if fragment.FilePath != "" {
// is the path our config or baseline file?
if fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath) {
logging.Trace().Msg("skipping file: matches config or baseline path")
return findings
}
}
// check if commit or filepath is allowed.
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, d.Config.Allowlists); isAllowed {
event.Msg("skipping file: global allowlist")
return findings
}
// add newline indices for location calculation in detectRule
fragment.newlineIndices = newLineRegexp.FindAllStringIndex(fragment.Raw, -1)
// setup variables to handle different decoding passes
currentRaw := fragment.Raw
encodedSegments := []EncodedSegment{}
currentDecodeDepth := 0
decoder := NewDecoder()
for {
// build keyword map for prefiltering rules
keywords := make(map[string]bool)
normalizedRaw := strings.ToLower(currentRaw)
matches := d.prefilter.MatchString(normalizedRaw)
for _, m := range matches {
keywords[normalizedRaw[m.Pos():int(m.Pos())+len(m.Match())]] = true
}
for _, rule := range d.Config.Rules {
if len(rule.Keywords) == 0 {
// if no keywords are associated with the rule always scan the
// fragment using the rule
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
continue
}
// check if keywords are in the fragment
for _, k := range rule.Keywords {
if _, ok := keywords[strings.ToLower(k)]; ok {
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
break
}
}
}
// increment the depth by 1 as we start our decoding pass
currentDecodeDepth++
// stop the loop if we've hit our max decoding depth
if currentDecodeDepth > d.MaxDecodeDepth {
break
}
// decode the currentRaw for the next pass
currentRaw, encodedSegments = decoder.decode(currentRaw, encodedSegments)
// stop the loop when there's nothing else to decode
if len(encodedSegments) == 0 {
break
}
}
return filter(findings, d.Redact)
}
// detectRule scans the given fragment for the given rule and returns a list of findings
func (d *Detector) detectRule(fragment Fragment, currentRaw string, r config.Rule, encodedSegments []EncodedSegment) []report.Finding {
var (
findings []report.Finding
logger = func() zerolog.Logger {
l := logging.With().Str("rule-id", r.RuleID).Str("path", fragment.FilePath)
if fragment.CommitSHA != "" {
l = l.Str("commit", fragment.CommitSHA)
}
return l.Logger()
}()
)
// check if commit or file is allowed for this rule.
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, r.Allowlists); isAllowed {
event.Msg("skipping file: rule allowlist")
return findings
}
if r.Path != nil {
if r.Regex == nil && len(encodedSegments) == 0 {
// Path _only_ rule
if r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath)) {
finding := report.Finding{
RuleID: r.RuleID,
Description: r.Description,
File: fragment.FilePath,
SymlinkFile: fragment.SymlinkFile,
Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
Tags: r.Tags,
}
return append(findings, finding)
}
} else {
// if path is set _and_ a regex is set, then we need to check both
// so if the path does not match, then we should return early and not
// consider the regex
if !(r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath))) {
return findings
}
}
}
// if path only rule, skip content checks
if r.Regex == nil {
return findings
}
// if flag configure and raw data size bigger then the flag
if d.MaxTargetMegaBytes > 0 {
rawLength := len(currentRaw) / 1000000
if rawLength > d.MaxTargetMegaBytes {
logger.Debug().
Int("size", rawLength).
Int("max-size", d.MaxTargetMegaBytes).
Msg("skipping fragment: size")
return findings
}
}
// use currentRaw instead of fragment.Raw since this represents the current
// decoding pass on the text
for _, matchIndex := range r.Regex.FindAllStringIndex(currentRaw, -1) {
// Extract secret from match
secret := strings.Trim(currentRaw[matchIndex[0]:matchIndex[1]], "\n")
// For any meta data from decoding
var metaTags []string
currentLine := ""
// Check if the decoded portions of the segment overlap with the match
// to see if its potentially a new match
if len(encodedSegments) > 0 {
if segment := segmentWithDecodedOverlap(encodedSegments, matchIndex[0], matchIndex[1]); segment != nil {
matchIndex = segment.adjustMatchIndex(matchIndex)
metaTags = append(metaTags, segment.tags()...)
currentLine = segment.currentLine(currentRaw)
} else {
// This item has already been added to a finding
continue
}
} else {
// Fixes: https://github.com/gitleaks/gitleaks/issues/1352
// removes the incorrectly following line that was detected by regex expression '\n'
matchIndex[1] = matchIndex[0] + len(secret)
}
// determine location of match. Note that the location
// in the finding will be the line/column numbers of the _match_
// not the _secret_, which will be different if the secretGroup
// value is set for this rule
loc := location(fragment, matchIndex)
if matchIndex[1] > loc.endLineIndex {
loc.endLineIndex = matchIndex[1]
}
finding := report.Finding{
RuleID: r.RuleID,
Description: r.Description,
StartLine: loc.startLine,
EndLine: loc.endLine,
StartColumn: loc.startColumn,
EndColumn: loc.endColumn,
Line: fragment.Raw[loc.startLineIndex:loc.endLineIndex],
Match: secret,
Secret: secret,
File: fragment.FilePath,
SymlinkFile: fragment.SymlinkFile,
Tags: append(r.Tags, metaTags...),
}
if !d.IgnoreGitleaksAllow && strings.Contains(finding.Line, gitleaksAllowSignature) {
logger.Trace().
Str("finding", finding.Secret).
Msg("skipping finding: 'gitleaks:allow' signature")
continue
}
if currentLine == "" {
currentLine = finding.Line
}
// Set the value of |secret|, if the pattern contains at least one capture group.
// (The first element is the full match, hence we check >= 2.)
groups := r.Regex.FindStringSubmatch(finding.Secret)
if len(groups) >= 2 {
if r.SecretGroup > 0 {
if len(groups) <= r.SecretGroup {
// Config validation should prevent this
continue
}
finding.Secret = groups[r.SecretGroup]
} else {
// If |secretGroup| is not set, we will use the first suitable capture group.
for _, s := range groups[1:] {
if len(s) > 0 {
finding.Secret = s
break
}
}
}
}
// check entropy
entropy := shannonEntropy(finding.Secret)
finding.Entropy = float32(entropy)
if r.Entropy != 0.0 {
// entropy is too low, skip this finding
if entropy <= r.Entropy {
logger.Trace().
Str("finding", finding.Secret).
Float32("entropy", finding.Entropy).
Msg("skipping finding: low entropy")
continue
}
}
// check if the result matches any of the global allowlists.
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, d.Config.Allowlists); isAllowed {
event.Msg("skipping finding: global allowlist")
continue
}
// check if the result matches any of the rule allowlists.
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, r.Allowlists); isAllowed {
event.Msg("skipping finding: rule allowlist")
continue
}
findings = append(findings, finding)
}
return findings
}
// AddFinding synchronously adds a finding to the findings slice
func (d *Detector) AddFinding(finding report.Finding) {
globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
if finding.Commit != "" {
finding.Fingerprint = fmt.Sprintf("%s:%s:%s:%d", finding.Commit, finding.File, finding.RuleID, finding.StartLine)
} else {
finding.Fingerprint = globalFingerprint
}
// check if we should ignore this finding
logger := logging.With().Str("finding", finding.Secret).Logger()
if _, ok := d.gitleaksIgnore[globalFingerprint]; ok {
logger.Debug().
Str("fingerprint", globalFingerprint).
Msg("skipping finding: global fingerprint")
return
} else if finding.Commit != "" {
// Awkward nested if because I'm not sure how to chain these two conditions.
if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
logger.Debug().
Str("fingerprint", finding.Fingerprint).
Msgf("skipping finding: fingerprint")
return
}
}
if d.baseline != nil && !IsNew(finding, d.Redact, d.baseline) {
logger.Debug().
Str("fingerprint", finding.Fingerprint).
Msgf("skipping finding: baseline")
return
}
d.findingMutex.Lock()
d.findings = append(d.findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
d.findingMutex.Unlock()
}
// Findings returns the findings added to the detector
func (d *Detector) Findings() []report.Finding {
return d.findings
}
// AddCommit synchronously adds a commit to the commit slice
func (d *Detector) addCommit(commit string) {
d.commitMap[commit] = true
}
// checkCommitOrPathAllowed evaluates |fragment| against all provided |allowlists|.
//
// If the match condition is "OR", only commit and path are checked.
// Otherwise, if regexes or stopwords are defined this will fail.
func checkCommitOrPathAllowed(
logger zerolog.Logger,
fragment Fragment,
allowlists []*config.Allowlist,
) (bool, *zerolog.Event) {
if fragment.FilePath == "" && fragment.CommitSHA == "" {
return false, nil
}
for _, a := range allowlists {
var (
isAllowed bool
allowlistChecks []bool
commitAllowed, _ = a.CommitAllowed(fragment.CommitSHA)
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
)
// If the condition is "AND" we need to check all conditions.
if a.MatchCondition == config.AllowlistMatchAnd {
if len(a.Commits) > 0 {
allowlistChecks = append(allowlistChecks, commitAllowed)
}
if len(a.Paths) > 0 {
allowlistChecks = append(allowlistChecks, pathAllowed)
}
// These will be checked later.
if len(a.Regexes) > 0 {
continue
}
if len(a.StopWords) > 0 {
continue
}
isAllowed = allTrue(allowlistChecks)
} else {
isAllowed = commitAllowed || pathAllowed
}
if isAllowed {
event := logger.Trace().Str("condition", a.MatchCondition.String())
if commitAllowed {
event.Bool("allowed-commit", commitAllowed)
}
if pathAllowed {
event.Bool("allowed-path", pathAllowed)
}
return true, event
}
}
return false, nil
}
// checkFindingAllowed evaluates |finding| against all provided |allowlists|.
//
// If the match condition is "OR", only regex and stopwords are run. (Commit and path should be handled separately).
// Otherwise, all conditions are checked.
//
// TODO: The method signature is awkward. I can't think of a better way to log helpful info.
func checkFindingAllowed(
logger zerolog.Logger,
finding report.Finding,
fragment Fragment,
currentLine string,
allowlists []*config.Allowlist,
) (bool, *zerolog.Event) {
for _, a := range allowlists {
allowlistTarget := finding.Secret
switch a.RegexTarget {
case "match":
allowlistTarget = finding.Match
case "line":
allowlistTarget = currentLine
}
var (
checks []bool
isAllowed bool
commitAllowed bool
commit string
pathAllowed bool
regexAllowed = a.RegexAllowed(allowlistTarget)
containsStopword, word = a.ContainsStopWord(finding.Secret)
)
// If the condition is "AND" we need to check all conditions.
if a.MatchCondition == config.AllowlistMatchAnd {
// Determine applicable checks.
if len(a.Commits) > 0 {
commitAllowed, commit = a.CommitAllowed(fragment.CommitSHA)
checks = append(checks, commitAllowed)
}
if len(a.Paths) > 0 {
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
checks = append(checks, pathAllowed)
}
if len(a.Regexes) > 0 {
checks = append(checks, regexAllowed)
}
if len(a.StopWords) > 0 {
checks = append(checks, containsStopword)
}
isAllowed = allTrue(checks)
} else {
isAllowed = regexAllowed || containsStopword
}
if isAllowed {
event := logger.Trace().
Str("finding", finding.Secret).
Str("condition", a.MatchCondition.String())
if commitAllowed {
event.Str("allowed-commit", commit)
}
if pathAllowed {
event.Bool("allowed-path", pathAllowed)
}
if regexAllowed {
event.Bool("allowed-regex", regexAllowed)
}
if containsStopword {
event.Str("allowed-stopword", word)
}
return true, event
}
}
return false, nil
}
func allTrue(bools []bool) bool {
for _, check := range bools {
if !check {
return false
}
}
return true
}
func fileExists(fileName string) bool {
// check for a .infisicalignore file
info, err := os.Stat(fileName)
if err != nil && !os.IsNotExist(err) {
return false
}
if info != nil && err == nil {
if !info.IsDir() {
return true
}
}
return false
}

View File

@@ -1,225 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/h2non/filetype"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
const maxPeekSize = 25 * 1_000 // 10kb
func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
for pa := range paths {
d.Sema.Go(func() error {
logger := logging.With().Str("path", pa.Path).Logger()
logger.Trace().Msg("Scanning path")
f, err := os.Open(pa.Path)
if err != nil {
if os.IsPermission(err) {
logger.Warn().Msg("Skipping file: permission denied")
return nil
}
return err
}
defer func() {
_ = f.Close()
}()
// Get file size
fileInfo, err := f.Stat()
if err != nil {
return err
}
fileSize := fileInfo.Size()
if d.MaxTargetMegaBytes > 0 {
rawLength := fileSize / 1000000
if rawLength > int64(d.MaxTargetMegaBytes) {
logger.Debug().
Int64("size", rawLength).
Msg("Skipping file: exceeds --max-target-megabytes")
return nil
}
}
var (
// Buffer to hold file chunks
reader = bufio.NewReaderSize(f, chunkSize)
buf = make([]byte, chunkSize)
totalLines = 0
)
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Only check the filetype at the start of file.
if totalLines == 0 {
// TODO: could other optimizations be introduced here?
if mimetype, err := filetype.Match(buf[:n]); err != nil {
return nil
} else if mimetype.MIME.Type == "application" {
return nil // skip binary files
}
}
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return readErr
}
// Count the number of newlines in this chunk
chunk := peekBuf.String()
linesInChunk := strings.Count(chunk, "\n")
totalLines += linesInChunk
fragment := Fragment{
Raw: chunk,
Bytes: peekBuf.Bytes(),
}
if pa.Symlink != "" {
fragment.SymlinkFile = pa.Symlink
}
if isWindows {
fragment.FilePath = filepath.ToSlash(pa.Path)
fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
fragment.WindowsFilePath = pa.Path
} else {
fragment.FilePath = pa.Path
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logger.Debug().Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
// need to add 1 since line counting starts at 1
finding.StartLine += (totalLines - linesInChunk) + 1
finding.EndLine += (totalLines - linesInChunk) + 1
d.AddFinding(finding)
}
if timer != nil {
timer.Stop()
timer = nil
}
}
if err != nil {
if err == io.EOF {
return nil
}
return err
}
}
})
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
return d.findings, nil
}
// readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
// This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
if peekBuf.Len() == 0 {
return nil
}
// Does the buffer end in consecutive newlines?
var (
data = peekBuf.Bytes()
lastChar = data[len(data)-1]
newlineCount = 0 // Tracks consecutive newlines
)
if isWhitespace(lastChar) {
for i := len(data) - 1; i >= 0; i-- {
lastChar = data[i]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
return nil
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
break
}
}
}
// If not, read ahead until we (hopefully) find some.
newlineCount = 0
for {
data = peekBuf.Bytes()
// Check if the last character is a newline.
lastChar = data[len(data)-1]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
break
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
newlineCount = 0 // Reset if a non-newline character is found
}
// Stop growing the buffer if it reaches maxSize
if (peekBuf.Len() - n) >= maxPeekSize {
break
}
// Read additional data into a temporary buffer
b, err := r.ReadByte()
if err != nil {
if err == io.EOF {
break
}
return err
}
peekBuf.WriteByte(b)
}
return nil
}

View File

@@ -1,216 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"errors"
"fmt"
"net/url"
"os/exec"
"regexp"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.Finding, error) {
defer cmd.Wait()
var (
diffFilesCh = cmd.DiffFilesCh()
errCh = cmd.ErrCh()
)
// loop to range over both DiffFiles (stdout) and ErrCh (stderr)
for diffFilesCh != nil || errCh != nil {
select {
case gitdiffFile, open := <-diffFilesCh:
if !open {
diffFilesCh = nil
break
}
// skip binary files
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
continue
}
// Check if commit is allowed
commitSHA := ""
if gitdiffFile.PatchHeader != nil {
commitSHA = gitdiffFile.PatchHeader.SHA
for _, a := range d.Config.Allowlists {
if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
continue
}
}
}
d.addCommit(commitSHA)
d.Sema.Go(func() error {
for _, textFragment := range gitdiffFile.TextFragments {
if textFragment == nil {
return nil
}
fragment := Fragment{
Raw: textFragment.Raw(gitdiff.OpAdd),
CommitSHA: commitSHA,
FilePath: gitdiffFile.NewName,
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logging.Debug().
Str("commit", commitSHA[:7]).
Str("path", fragment.FilePath).
Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
d.AddFinding(augmentGitFinding(remote, finding, textFragment, gitdiffFile))
}
if timer != nil {
timer.Stop()
timer = nil
}
}
return nil
})
case err, open := <-errCh:
if !open {
errCh = nil
break
}
return d.findings, err
}
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
logging.Info().Msgf("%d commits scanned.", len(d.commitMap))
logging.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
return d.findings, nil
}
type RemoteInfo struct {
Platform scm.Platform
Url string
}
func NewRemoteInfo(platform scm.Platform, source string) *RemoteInfo {
if platform == scm.NoPlatform {
return &RemoteInfo{Platform: platform}
}
remoteUrl, err := getRemoteUrl(source)
if err != nil {
if strings.Contains(err.Error(), "No remote configured") {
logging.Debug().Msg("skipping finding links: repository has no configured remote.")
platform = scm.NoPlatform
} else {
logging.Error().Err(err).Msg("skipping finding links: unable to parse remote URL")
}
goto End
}
if platform == scm.UnknownPlatform {
platform = platformFromHost(remoteUrl)
if platform == scm.UnknownPlatform {
logging.Info().
Str("host", remoteUrl.Hostname()).
Msg("Unknown SCM platform. Use --platform to include links in findings.")
} else {
logging.Debug().
Str("host", remoteUrl.Hostname()).
Str("platform", platform.String()).
Msg("SCM platform parsed from host")
}
}
End:
var rUrl string
if remoteUrl != nil {
rUrl = remoteUrl.String()
}
return &RemoteInfo{
Platform: platform,
Url: rUrl,
}
}
var sshUrlpat = regexp.MustCompile(`^git@([a-zA-Z0-9.-]+):([\w/.-]+?)(?:\.git)?$`)
func getRemoteUrl(source string) (*url.URL, error) {
// This will return the first remote — typically, "origin".
cmd := exec.Command("git", "ls-remote", "--quiet", "--get-url")
if source != "." {
cmd.Dir = source
}
stdout, err := cmd.Output()
if err != nil {
var exitError *exec.ExitError
if errors.As(err, &exitError) {
return nil, fmt.Errorf("command failed (%d): %w, stderr: %s", exitError.ExitCode(), err, string(bytes.TrimSpace(exitError.Stderr)))
}
return nil, err
}
remoteUrl := string(bytes.TrimSpace(stdout))
if matches := sshUrlpat.FindStringSubmatch(remoteUrl); matches != nil {
remoteUrl = fmt.Sprintf("https://%s/%s", matches[1], matches[2])
}
remoteUrl = strings.TrimSuffix(remoteUrl, ".git")
parsedUrl, err := url.Parse(remoteUrl)
if err != nil {
return nil, fmt.Errorf("unable to parse remote URL: %w", err)
}
// Remove any user info.
parsedUrl.User = nil
return parsedUrl, nil
}
func platformFromHost(u *url.URL) scm.Platform {
switch strings.ToLower(u.Hostname()) {
case "github.com":
return scm.GitHubPlatform
case "gitlab.com":
return scm.GitLabPlatform
case "dev.azure.com", "visualstudio.com":
return scm.AzureDevOpsPlatform
case "bitbucket.org":
return scm.BitBucketPlatform
default:
return scm.UnknownPlatform
}
}

View File

@@ -1,102 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
// Location represents a location in a file
type Location struct {
startLine int
endLine int
startColumn int
endColumn int
startLineIndex int
endLineIndex int
}
func location(fragment Fragment, matchIndex []int) Location {
var (
prevNewLine int
location Location
lineSet bool
_lineNum int
)
start := matchIndex[0]
end := matchIndex[1]
// default startLineIndex to 0
location.startLineIndex = 0
// Fixes: https://github.com/zricethezav/gitleaks/issues/1037
// When a fragment does NOT have any newlines, a default "newline"
// will be counted to make the subsequent location calculation logic work
// for fragments will no newlines.
if len(fragment.newlineIndices) == 0 {
fragment.newlineIndices = [][]int{
{len(fragment.Raw), len(fragment.Raw) + 1},
}
}
for lineNum, pair := range fragment.newlineIndices {
_lineNum = lineNum
newLineByteIndex := pair[0]
if prevNewLine <= start && start < newLineByteIndex {
lineSet = true
location.startLine = lineNum
location.endLine = lineNum
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
location.startLineIndex = prevNewLine
location.endLineIndex = newLineByteIndex
}
if prevNewLine < end && end <= newLineByteIndex {
location.endLine = lineNum
location.endColumn = (end - prevNewLine)
location.endLineIndex = newLineByteIndex
}
prevNewLine = pair[0]
}
if !lineSet {
// if lines never get set then that means the secret is most likely
// on the last line of the diff output and the diff output does not have
// a newline
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
location.endColumn = (end - prevNewLine)
location.startLine = _lineNum + 1
location.endLine = _lineNum + 1
// search for new line byte index
i := 0
for end+i < len(fragment.Raw) {
if fragment.Raw[end+i] == '\n' {
break
}
if fragment.Raw[end+i] == '\r' {
break
}
i++
}
location.endLineIndex = end + i
}
return location
}

View File

@@ -1,72 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package logging
import (
"os"
"github.com/rs/zerolog"
)
var Logger zerolog.Logger
func init() {
// send all logs to stdout
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).
Level(zerolog.InfoLevel).
With().Timestamp().Logger()
}
func With() zerolog.Context {
return Logger.With()
}
func Trace() *zerolog.Event {
return Logger.Trace()
}
func Debug() *zerolog.Event {
return Logger.Debug()
}
func Info() *zerolog.Event {
return Logger.Info()
}
func Warn() *zerolog.Event {
return Logger.Warn()
}
func Error() *zerolog.Event {
return Logger.Error()
}
func Err(err error) *zerolog.Event {
return Logger.Err(err)
}
func Fatal() *zerolog.Event {
return Logger.Fatal()
}
func Panic() *zerolog.Event {
return Logger.Panic()
}

View File

@@ -1,149 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"errors"
"io"
"github.com/Infisical/infisical-merge/detect/report"
)
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
findings := []report.Finding{}
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return findings, readErr
}
fragment := Fragment{
Raw: peekBuf.String(),
}
for _, finding := range d.Detect(fragment) {
findings = append(findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if err == io.EOF {
break
}
return findings, err
}
}
return findings, nil
}
// StreamDetectReader streams the detection results from the provided io.Reader.
// It reads data using the specified buffer size (in KB) and processes each chunk through
// the existing detection logic. Findings are sent down the returned findings channel as soon as
// they are detected, while a separate error channel signals a terminal error (or nil upon successful completion).
// The function returns two channels:
// - findingsCh: a receive-only channel that emits report.Finding objects as they are found.
// - errCh: a receive-only channel that emits a single final error (or nil if no error occurred)
// once the stream ends.
//
// Recommended Usage:
//
// Since there will only ever be a single value on the errCh, it is recommended to consume the findingsCh
// first. Once findingsCh is closed, the consumer should then read from errCh to determine
// if the stream completed successfully or if an error occurred.
//
// This design avoids the need for a select loop, keeping client code simple.
//
// Example:
//
// // Assume detector is an instance of *Detector and myReader implements io.Reader.
// findingsCh, errCh := detector.StreamDetectReader(myReader, 64) // using 64 KB buffer size
//
// // Process findings as they arrive.
// for finding := range findingsCh {
// fmt.Printf("Found secret: %+v\n", finding)
// }
//
// // After the findings channel is closed, check the final error.
// if err := <-errCh; err != nil {
// log.Fatalf("StreamDetectReader encountered an error: %v", err)
// } else {
// fmt.Println("Scanning completed successfully.")
// }
func (d *Detector) StreamDetectReader(r io.Reader, bufSize int) (<-chan report.Finding, <-chan error) {
findingsCh := make(chan report.Finding, 1)
errCh := make(chan error, 1)
go func() {
defer close(findingsCh)
defer close(errCh)
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
for {
n, err := reader.Read(buf)
if n > 0 {
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
errCh <- readErr
return
}
fragment := Fragment{Raw: peekBuf.String()}
for _, finding := range d.Detect(fragment) {
findingsCh <- finding
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if errors.Is(err, io.EOF) {
errCh <- nil
return
}
errCh <- err
return
}
}
}()
return findingsCh, errCh
}

View File

@@ -1,37 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build !gore2regex
package regexp
import (
re "regexp"
)
const Version = "stdlib"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -1,37 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build gore2regex
package regexp
import (
re "github.com/wasilibs/go-re2"
)
const Version = "github.com/wasilibs/go-re2"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -1,26 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
const version = "v8.0.0"
const driver = "gitleaks"

View File

@@ -1,100 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/csv"
"io"
"strconv"
"strings"
)
type CsvReporter struct {
}
var _ Reporter = (*CsvReporter)(nil)
func (r *CsvReporter) Write(w io.WriteCloser, findings []Finding) error {
if len(findings) == 0 {
return nil
}
var (
cw = csv.NewWriter(w)
err error
)
columns := []string{"RuleID",
"Commit",
"File",
"SymlinkFile",
"Secret",
"Match",
"StartLine",
"EndLine",
"StartColumn",
"EndColumn",
"Author",
"Message",
"Date",
"Email",
"Fingerprint",
"Tags",
}
// A miserable attempt at "omitempty" so tests don't yell at me.
if findings[0].Link != "" {
columns = append(columns, "Link")
}
if err = cw.Write(columns); err != nil {
return err
}
for _, f := range findings {
row := []string{f.RuleID,
f.Commit,
f.File,
f.SymlinkFile,
f.Secret,
f.Match,
strconv.Itoa(f.StartLine),
strconv.Itoa(f.EndLine),
strconv.Itoa(f.StartColumn),
strconv.Itoa(f.EndColumn),
f.Author,
f.Message,
f.Date,
f.Email,
f.Fingerprint,
strings.Join(f.Tags, " "),
}
if findings[0].Link != "" {
row = append(row, f.Link)
}
if err = cw.Write(row); err != nil {
return err
}
}
cw.Flush()
return cw.Error()
}

View File

@@ -1,92 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"math"
"strings"
)
// Finding contains information about strings that
// have been captured by a tree-sitter query.
type Finding struct {
// Rule is the name of the rule that was matched
RuleID string
Description string
StartLine int
EndLine int
StartColumn int
EndColumn int
Line string `json:"-"`
Match string
// Secret contains the full content of what is matched in
// the tree-sitter query.
Secret string
// File is the name of the file containing the finding
File string
SymlinkFile string
Commit string
Link string `json:",omitempty"`
// Entropy is the shannon entropy of Value
Entropy float32
Author string
Email string
Date string
Message string
Tags []string
// unique identifier
Fingerprint string
}
// Redact removes sensitive information from a finding.
func (f *Finding) Redact(percent uint) {
secret := maskSecret(f.Secret, percent)
if percent >= 100 {
secret = "REDACTED"
}
f.Line = strings.Replace(f.Line, f.Secret, secret, -1)
f.Match = strings.Replace(f.Match, f.Secret, secret, -1)
f.Secret = secret
}
func maskSecret(secret string, percent uint) string {
if percent > 100 {
percent = 100
}
len := float64(len(secret))
if len <= 0 {
return secret
}
prc := float64(100 - percent)
lth := int64(math.RoundToEven(len * prc / float64(100)))
return secret[:lth] + "..."
}

View File

@@ -1,39 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"io"
)
type JsonReporter struct {
}
var _ Reporter = (*JsonReporter)(nil)
func (t *JsonReporter) Write(w io.WriteCloser, findings []Finding) error {
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(findings)
}

View File

@@ -1,129 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"encoding/xml"
"fmt"
"io"
"strconv"
)
type JunitReporter struct {
}
var _ Reporter = (*JunitReporter)(nil)
func (r *JunitReporter) Write(w io.WriteCloser, findings []Finding) error {
testSuites := TestSuites{
TestSuites: getTestSuites(findings),
}
io.WriteString(w, xml.Header)
encoder := xml.NewEncoder(w)
encoder.Indent("", "\t")
return encoder.Encode(testSuites)
}
func getTestSuites(findings []Finding) []TestSuite {
return []TestSuite{
{
Failures: strconv.Itoa(len(findings)),
Name: "gitleaks",
Tests: strconv.Itoa(len(findings)),
TestCases: getTestCases(findings),
Time: "",
},
}
}
func getTestCases(findings []Finding) []TestCase {
testCases := []TestCase{}
for _, f := range findings {
testCase := TestCase{
Classname: f.Description,
Failure: getFailure(f),
File: f.File,
Name: getMessage(f),
Time: "",
}
testCases = append(testCases, testCase)
}
return testCases
}
func getFailure(f Finding) Failure {
return Failure{
Data: getData(f),
Message: getMessage(f),
Type: f.Description,
}
}
func getData(f Finding) string {
data, err := json.MarshalIndent(f, "", "\t")
if err != nil {
fmt.Println(err)
return ""
}
return string(data)
}
func getMessage(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected a secret in file %s, line %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine))
}
return fmt.Sprintf("%s has detected a secret in file %s, line %s, at commit %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine), f.Commit)
}
type TestSuites struct {
XMLName xml.Name `xml:"testsuites"`
TestSuites []TestSuite
}
type TestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Failures string `xml:"failures,attr"`
Name string `xml:"name,attr"`
Tests string `xml:"tests,attr"`
TestCases []TestCase `xml:"testcase"`
Time string `xml:"time,attr"`
}
type TestCase struct {
XMLName xml.Name `xml:"testcase"`
Classname string `xml:"classname,attr"`
Failure Failure `xml:"failure"`
File string `xml:"file,attr"`
Name string `xml:"name,attr"`
Time string `xml:"time,attr"`
}
type Failure struct {
XMLName xml.Name `xml:"failure"`
Data string `xml:",chardata"`
Message string `xml:"message,attr"`
Type string `xml:"type,attr"`
}

View File

@@ -1,38 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"io"
)
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
StdoutReportPath = "-"
)
type Reporter interface {
Write(w io.WriteCloser, findings []Finding) error
}

View File

@@ -1,239 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"fmt"
"io"
"github.com/Infisical/infisical-merge/detect/config"
)
type SarifReporter struct {
OrderedRules []config.Rule
}
var _ Reporter = (*SarifReporter)(nil)
func (r *SarifReporter) Write(w io.WriteCloser, findings []Finding) error {
sarif := Sarif{
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
Version: "2.1.0",
Runs: r.getRuns(findings),
}
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(sarif)
}
func (r *SarifReporter) getRuns(findings []Finding) []Runs {
return []Runs{
{
Tool: r.getTool(),
Results: getResults(findings),
},
}
}
func (r *SarifReporter) getTool() Tool {
tool := Tool{
Driver: Driver{
Name: driver,
SemanticVersion: version,
InformationUri: "https://github.com/gitleaks/gitleaks",
Rules: r.getRules(),
},
}
// if this tool has no rules, ensure that it is represented as [] instead of null/nil
if hasEmptyRules(tool) {
tool.Driver.Rules = make([]Rules, 0)
}
return tool
}
func hasEmptyRules(tool Tool) bool {
return len(tool.Driver.Rules) == 0
}
func (r *SarifReporter) getRules() []Rules {
// TODO for _, rule := range cfg.Rules {
var rules []Rules
for _, rule := range r.OrderedRules {
rules = append(rules, Rules{
ID: rule.RuleID,
Description: ShortDescription{
Text: rule.Description,
},
})
}
return rules
}
func messageText(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected secret for file %s.", f.RuleID, f.File)
}
return fmt.Sprintf("%s has detected secret for file %s at commit %s.", f.RuleID, f.File, f.Commit)
}
func getResults(findings []Finding) []Results {
results := []Results{}
for _, f := range findings {
r := Results{
Message: Message{
Text: messageText(f),
},
RuleId: f.RuleID,
Locations: getLocation(f),
// This information goes in partial fingerprings until revision
// data can be added somewhere else
PartialFingerPrints: PartialFingerPrints{
CommitSha: f.Commit,
Email: f.Email,
CommitMessage: f.Message,
Date: f.Date,
Author: f.Author,
},
Properties: Properties{
Tags: f.Tags,
},
}
results = append(results, r)
}
return results
}
func getLocation(f Finding) []Locations {
uri := f.File
if f.SymlinkFile != "" {
uri = f.SymlinkFile
}
return []Locations{
{
PhysicalLocation: PhysicalLocation{
ArtifactLocation: ArtifactLocation{
URI: uri,
},
Region: Region{
StartLine: f.StartLine,
EndLine: f.EndLine,
StartColumn: f.StartColumn,
EndColumn: f.EndColumn,
Snippet: Snippet{
Text: f.Secret,
},
},
},
},
}
}
type PartialFingerPrints struct {
CommitSha string `json:"commitSha"`
Email string `json:"email"`
Author string `json:"author"`
Date string `json:"date"`
CommitMessage string `json:"commitMessage"`
}
type Sarif struct {
Schema string `json:"$schema"`
Version string `json:"version"`
Runs []Runs `json:"runs"`
}
type ShortDescription struct {
Text string `json:"text"`
}
type FullDescription struct {
Text string `json:"text"`
}
type Rules struct {
ID string `json:"id"`
Description ShortDescription `json:"shortDescription"`
}
type Driver struct {
Name string `json:"name"`
SemanticVersion string `json:"semanticVersion"`
InformationUri string `json:"informationUri"`
Rules []Rules `json:"rules"`
}
type Tool struct {
Driver Driver `json:"driver"`
}
type Message struct {
Text string `json:"text"`
}
type ArtifactLocation struct {
URI string `json:"uri"`
}
type Region struct {
StartLine int `json:"startLine"`
StartColumn int `json:"startColumn"`
EndLine int `json:"endLine"`
EndColumn int `json:"endColumn"`
Snippet Snippet `json:"snippet"`
}
type Snippet struct {
Text string `json:"text"`
}
type PhysicalLocation struct {
ArtifactLocation ArtifactLocation `json:"artifactLocation"`
Region Region `json:"region"`
}
type Locations struct {
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
}
type Properties struct {
Tags []string `json:"tags"`
}
type Results struct {
Message Message `json:"message"`
RuleId string `json:"ruleId"`
Locations []Locations `json:"locations"`
PartialFingerPrints `json:"partialFingerprints"`
Properties Properties `json:"properties"`
}
type Runs struct {
Tool Tool `json:"tool"`
Results []Results `json:"results"`
}

View File

@@ -1,68 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"fmt"
"io"
"os"
"text/template"
"github.com/Masterminds/sprig/v3"
)
type TemplateReporter struct {
template *template.Template
}
var _ Reporter = (*TemplateReporter)(nil)
func NewTemplateReporter(templatePath string) (*TemplateReporter, error) {
if templatePath == "" {
return nil, fmt.Errorf("template path cannot be empty")
}
file, err := os.ReadFile(templatePath)
if err != nil {
return nil, fmt.Errorf("error reading file: %w", err)
}
templateText := string(file)
// TODO: Add helper functions like escaping for JSON, XML, etc.
t := template.New("custom")
t = t.Funcs(sprig.TxtFuncMap())
t, err = t.Parse(templateText)
if err != nil {
return nil, fmt.Errorf("error parsing file: %w", err)
}
return &TemplateReporter{template: t}, nil
}
// writeTemplate renders the findings using the user-provided template.
// https://www.digitalocean.com/community/tutorials/how-to-use-templates-in-go
func (t *TemplateReporter) Write(w io.WriteCloser, findings []Finding) error {
if err := t.template.Execute(w, findings); err != nil {
return err
}
return nil
}

View File

@@ -1,127 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"io/fs"
"os"
"path/filepath"
"runtime"
"github.com/fatih/semgroup"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
)
type ScanTarget struct {
Path string
Symlink string
}
var isWindows = runtime.GOOS == "windows"
func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, allowlists []*config.Allowlist) (<-chan ScanTarget, error) {
paths := make(chan ScanTarget)
s.Go(func() error {
defer close(paths)
return filepath.Walk(source,
func(path string, fInfo os.FileInfo, err error) error {
logger := logging.With().Str("path", path).Logger()
if err != nil {
if os.IsPermission(err) {
// This seems to only fail on directories at this stage.
logger.Warn().Msg("Skipping directory: permission denied")
return filepath.SkipDir
}
return err
}
// Empty; nothing to do here.
if fInfo.Size() == 0 {
return nil
}
// Unwrap symlinks, if |followSymlinks| is set.
scanTarget := ScanTarget{
Path: path,
}
if fInfo.Mode().Type() == fs.ModeSymlink {
if !followSymlinks {
logger.Debug().Msg("Skipping symlink")
return nil
}
realPath, err := filepath.EvalSymlinks(path)
if err != nil {
return err
}
realPathFileInfo, _ := os.Stat(realPath)
if realPathFileInfo.IsDir() {
logger.Warn().Str("target", realPath).Msg("Skipping symlinked directory")
return nil
}
scanTarget.Path = realPath
scanTarget.Symlink = path
}
// TODO: Also run this check against the resolved symlink?
var skip bool
for _, a := range allowlists {
skip = a.PathAllowed(path) ||
// TODO: Remove this in v9.
// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
(isWindows && a.PathAllowed(filepath.ToSlash(path)))
if skip {
break
}
}
if fInfo.IsDir() {
// Directory
if skip {
logger.Debug().Msg("Skipping directory due to global allowlist")
return filepath.SkipDir
}
if fInfo.Name() == ".git" {
// Don't scan .git directories.
// TODO: Add this to the config allowlist, instead of hard-coding it.
return filepath.SkipDir
}
} else {
// File
if skip {
logger.Debug().Msg("Skipping file due to global allowlist")
return nil
}
paths <- scanTarget
}
return nil
})
})
return paths, nil
}

View File

@@ -1,211 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"bufio"
"errors"
"io"
"os/exec"
"path/filepath"
"regexp"
"strings"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
)
var quotedOptPattern = regexp.MustCompile(`^(?:"[^"]+"|'[^']+')$`)
// GitCmd helps to work with Git's output.
type GitCmd struct {
cmd *exec.Cmd
diffFilesCh <-chan *gitdiff.File
errCh <-chan error
}
// NewGitLogCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitLogCmd(source string, logOpts string) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
if logOpts != "" {
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
// Ensure that the user-provided |logOpts| aren't wrapped in quotes.
// https://github.com/gitleaks/gitleaks/issues/1153
userArgs := strings.Split(logOpts, " ")
var quotedOpts []string
for _, element := range userArgs {
if quotedOptPattern.MatchString(element) {
quotedOpts = append(quotedOpts, element)
}
}
if len(quotedOpts) > 0 {
logging.Warn().Msgf("the following `--log-opts` values may not work as expected: %v\n\tsee https://github.com/gitleaks/gitleaks/issues/1153 for more information", quotedOpts)
}
args = append(args, userArgs...)
cmd = exec.Command("git", args...)
} else {
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
"--full-history", "--all")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// NewGitDiffCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitDiffCmd(source string, staged bool) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff", ".")
if staged {
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff",
"--staged", ".")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// DiffFilesCh returns a channel with *gitdiff.File.
func (c *GitCmd) DiffFilesCh() <-chan *gitdiff.File {
return c.diffFilesCh
}
// ErrCh returns a channel that could produce an error if there is something in stderr.
func (c *GitCmd) ErrCh() <-chan error {
return c.errCh
}
// Wait waits for the command to exit and waits for any copying to
// stdin or copying from stdout or stderr to complete.
//
// Wait also closes underlying stdout and stderr.
func (c *GitCmd) Wait() (err error) {
return c.cmd.Wait()
}
// listenForStdErr listens for stderr output from git, prints it to stdout,
// sends to errCh and closes it.
func listenForStdErr(stderr io.ReadCloser, errCh chan<- error) {
defer close(errCh)
var errEncountered bool
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
// if git throws one of the following errors:
//
// exhaustive rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// inexact rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// Auto packing the repository in background for optimum performance.
// See "git help gc" for manual housekeeping.
//
// we skip exiting the program as git log -p/git diff will continue
// to send data to stdout and finish executing. This next bit of
// code prevents gitleaks from stopping mid scan if this error is
// encountered
if strings.Contains(scanner.Text(),
"exhaustive rename detection was skipped") ||
strings.Contains(scanner.Text(),
"inexact rename detection was skipped") ||
strings.Contains(scanner.Text(),
"you may want to set your diff.renameLimit") ||
strings.Contains(scanner.Text(),
"See \"git help gc\" for manual housekeeping") ||
strings.Contains(scanner.Text(),
"Auto packing the repository in background for optimum performance") {
logging.Warn().Msg(scanner.Text())
} else {
logging.Error().Msgf("[git] %s", scanner.Text())
errEncountered = true
}
}
if errEncountered {
errCh <- errors.New("stderr is not empty")
return
}
}

Some files were not shown because too many files have changed in this diff Show More